diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..106d3eb67 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,67 @@ +# Ignore development and build files +node_modules +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.npm +.yarn-integrity + +# Ignore git and version control +.git +.gitignore +.github + +# Ignore documentation and examples +docs +examples +*.md +!README.md + +# Ignore test files +test +tests +coverage +.nyc_output +.mocha* + +# Ignore development configuration +.vscode +.idea +*.swp +*.swo +*~ + +# Ignore build artifacts +lib +dist +build +target + +# Ignore temporary files +tmp +temp +.tmp +.temp + +# Ignore logs +logs +*.log + +# Ignore OS generated files +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + +# Ignore environment files +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# Ignore package manager locks for Docker (we'll copy them explicitly if needed) +package-lock.json diff --git a/.eslintignore b/.eslintignore index 3c95b8e09..ee638464f 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,3 +1 @@ -node_modules -lib -ref \ No newline at end of file +*.cjs/ diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 000000000..9a2d369de --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,166 @@ +# GitHub Copilot Instructions for sfdx-hardis + +## Project Overview + +**sfdx-hardis** is a comprehensive Salesforce DevOps toolbox that provides CI/CD pipeline capabilities, metadata backup/monitoring, and project documentation generation. It's an open-source project by Cloudity that includes AI-powered features for Salesforce development. + +## Package Manager + +**⚠️ IMPORTANT: This project uses Yarn as the package manager** + +- Use `yarn` instead of `npm` for all package management operations +- Use `yarn install` to install dependencies +- Use `yarn add ` to add new dependencies +- Use `yarn remove ` to remove dependencies +- Lock file: `yarn.lock` (do not modify manually) + +## Technology Stack + +- **Language**: TypeScript +- **CLI Framework**: Oclif (Salesforce CLI framework) +- **Build System**: Wireit (for task orchestration) +- **Testing**: Mocha with Nyc for coverage +- **Linting**: ESLint with Salesforce configurations +- **AI Integration**: LangChain with multiple providers (Anthropic, Google GenAI, Ollama) +- **Salesforce**: Salesforce Core libraries and SF Plugins Core + +## Project Structure + +``` +src/ +├── commands/ # CLI commands organized by category +│ └── hardis/ # Main command namespace +│ ├── auth/ # Authentication commands +│ ├── doc/ # Documentation generation commands +│ ├── org/ # Org management commands +│ └── ... +├── common/ # Shared utilities and helpers +│ ├── aiProvider/ # AI integration and prompt templates +│ └── utils/ # Common utility functions +test/ # Test files +lib/ # Compiled JavaScript output +messages/ # Internationalization messages +defaults/ # Default configurations and templates +docs/ # Project documentation +``` + +## Development Guidelines + +### Command Development + +- All CLI commands extend `SfCommand` from `@salesforce/sf-plugins-core` +- Commands follow the pattern: `sf hardis::` +- Use proper TypeScript typing with `AnyJson` return types +- Use the `uxLog` utility for consistent logging output with chalk colors (do not use emojis at the beginning of log lines) + +### Coding Standards + +- Follow TypeScript strict mode requirements +- Use ESLint and Prettier configurations provided +- Import statements should use `.js` extensions for compiled compatibility +- Use `/* jscpd:ignore-start */` and `/* jscpd:ignore-end */` to ignore code duplication checks where appropriate + +### AI Features + +- Prompt templates are defined in `src/common/aiProvider/promptTemplates/` +- Each template exports a `PromptTemplateDefinition` with variables and multilingual text +- Templates can be overridden by placing `.txt` files in `config/prompt-templates/` +- Support multiple AI providers via LangChain + +### Build and Test + +- Build: `yarn build` (uses Wireit orchestration) +- Test: `yarn test` +- Lint: `yarn lint` +- Clean: `yarn clean` +- Development: Use `./bin/dev.js` for testing commands locally + +### File Patterns + +- Commands: `src/commands/hardis/**/*.ts` +- Tests: `test/**/*.test.ts` or `**/*.nut.ts` for integration tests +- Messages: `messages/**/*.md` for internationalization +- Utilities: `src/common/utils/**/*.ts` + +### Dependencies + +- Salesforce-specific dependencies in `@salesforce/*` namespace +- AI features use `@langchain/*` packages +- Use `fs-extra` for file operations +- Use `chalk` for colored console output +- Use `columnify` for table formatting + +### Git Workflow + +- Uses Husky for git hooks +- Conventional commits are encouraged +- Automated workflows for testing, building, and releasing +- Mega-linter integration for code quality + +## AI Integration Notes + +- Supports multiple AI providers (Anthropic, Google GenAI, Ollama) +- Prompt templates are versioned and localizable +- AI features are used for documentation generation and error solving +- Custom prompts can be overridden via configuration files + +## Documentation + +- Main docs at +- Command documentation auto-generated via `yarn build:doc` +- Uses MkDocs for documentation site generation +- Supports AI-generated documentation features +- Each command must have a `description` property with command behavior and technical explanations + +Example: + +```typescript + public static description = ` +## Command Behavior + +**Checks the current usage of various Salesforce org limits and sends notifications if thresholds are exceeded.** + +This command is a critical component of proactive Salesforce org management, helping administrators and developers monitor resource consumption and prevent hitting critical limits that could impact performance or functionality. It provides early warnings when limits are approaching their capacity. + +Key functionalities: + +- **Limit Retrieval:** Fetches a comprehensive list of all Salesforce org limits using the Salesforce CLI. +- **Usage Calculation:** Calculates the percentage of each limit that is currently being used. +- **Threshold-Based Alerting:** Assigns a severity (success, warning, or error) to each limit based on configurable thresholds: + - **Warning:** If usage exceeds 50% (configurable via \`LIMIT_THRESHOLD_WARNING\` environment variable). + - **Error:** If usage exceeds 75% (configurable via \`LIMIT_THRESHOLD_ERROR\` environment variable). +- **CSV Report Generation:** Generates a CSV file containing all org limits, their current usage, maximum allowed, and calculated percentage used, along with the assigned severity. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of limits that have exceeded the warning or error thresholds. + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-org-limits/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce CLI Integration:** It executes the \`sf org limits list\` command to retrieve the current org limits. It parses the JSON output of this command. +- **Data Processing:** It iterates through the retrieved limits, calculates the \`used\` and \`percentUsed\` values, and assigns a \`severity\` (success, warning, error) based on the configured thresholds. +- **Environment Variable Configuration:** Reads \`LIMIT_THRESHOLD_WARNING\` and \`LIMIT_THRESHOLD_ERROR\` environment variables to set the warning and error thresholds for limit usage. +- **Report Generation:** It uses \`generateCsvFile\` to create the CSV report of org limits. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications, including attachments of the generated CSV report and detailed metrics for each limit, which can be consumed by monitoring dashboards like Grafana. +- **Exit Code Management:** Sets the process exit code to 1 if any limit is in an 'error' state, indicating a critical issue. +
+`; + +``` + +## Special Considerations + +- Large codebase with 300+ commands +- Enterprise-grade tool used in production environments +- Multi-platform support (Windows, macOS, Linux) +- Docker container support available +- VS Code extension available for UI interaction + +When working on this project, always consider the enterprise nature of the tool and maintain high code quality standards. + +## Copilot behavior + +- Do not ask if I want to continue to iterate: ALWAYS continue to iterate until the task is complete. +- Build commands using git bash for windows formatting. \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..0ef035dfe --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,22 @@ +version: 2 +updates: + - package-ecosystem: 'npm' + directory: '/' + schedule: + interval: 'weekly' + day: 'saturday' + versioning-strategy: 'increase' + labels: + - 'dependencies' + open-pull-requests-limit: 5 + pull-request-branch-name: + separator: '-' + commit-message: + # cause a release for non-dev-deps + prefix: fix(deps) + # no release for dev-deps + prefix-development: chore(dev-deps) + ignore: + - dependency-name: '@salesforce/dev-scripts' + - dependency-name: '*' + update-types: ['version-update:semver-major'] diff --git a/.github/linters/.cspell.json b/.github/linters/.cspell.json index a6304cea1..a42f4963d 100644 --- a/.github/linters/.cspell.json +++ b/.github/linters/.cspell.json @@ -3,6 +3,7 @@ "**/node_modules/**", "**/vscode-extension/**", "**/.git/**", + "**/.gitignore", "**/grafana/**", ".vscode", "package-lock.json", @@ -780,6 +781,8 @@ "unfrozeuser", "unicity", "uniquement", + "unlockedpackage", + "ulpkg", "unmanaged", "unpackaged", "unparse", @@ -826,4 +829,1111 @@ "\u00eatre", "\u00ecntegration" ] -} + "ignorePaths": [ + "**/node_modules/**", + "**/vscode-extension/**", + "**/.git/**", + "**/grafana/**", + ".vscode", + "package-lock.json", + "report", + ".cspell.json" + ], + "language": "en", + "version": "0.1", + "words": [ + "AAAS", + "AAAU", + "ACCESSTOKEN", + "ACCESSTOKEN) -- (process.env.SYSTEM_ACCESSTOKEN", + "AFFERO", + "AGENTFORCE", + "AGPL", + "ALPHAID", + "APICIL", + "APPROVALPROCESS", + "ASSIGNMENTRULES", + "ATATT", + "ATATTXXXXXXXXXXXXXXXXXXXX", + "ATBBXXXXXXXXXXXXXXXXXXXX", + "AULL", + "AUTORESPONSE", + "AUTORESPONSERULES", + "Accueil", + "Actv", + "Administrateur", + "Affero", + "Afterwork", + "Agentforce", + "Ajoutez", + "Alainbates", + "Amory", + "Ang\u00e9lique", + "Anush", + "Astran", + "Astro", + "Avlbl", + "BETAID", + "BRANCHNAME", + "BUILDID", + "Backuped", + "Badwords", + "Basri", + "Batchable", + "Buildx", + "BULKAPIV", + "Bulkify", + "CANARYID", + "CARRENET", + "CCAU", + "COLLECTIONURI", + "COMPTES", + "Callout", + "Catg", + "Chantier", + "Cloudi", + "Cloudity", + "Codecov", + "Codefresh", + "Colladon", + "Commerciaux", + "Communit", + "CONSUMERKEY", + "Contentlower", + "Correspondances", + "Couldn", + "Cyclomatic", + "DDHG", + "Dflt", + "DYSA", + "Dandb", + "Dataspace", + "Defaut", + "Demarrage", + "Devhub", + "Dont", + "Dreamforce", + "Dreamin", + "EAAU", + "ECAU", + "ECONNABORTED", + "ECONNRESET", + "EDITORCONFIG", + "ESCALATIONRULES", + "Editability", + "Eqoehef", + "Erro", + "Explainability", + "Externe", + "FGGF", + "FHDHGDH", + "FILEIO", + "FSEDS", + "Facturation", + "Famille", + "Finet", + "Firstname", + "Flexi", + "Fonction", + "Fswg", + "GHSA", + "GHTRGDHD", + "GITLEAKS", + "GRYPE", + "Gagne", + "Gmail", + "Guenego", + "Gitea", + "HADOLINT", + "HEALTHCHECK", + "Hardis", + "Hdhghg", + "Hentschke", + "Hfgfgjfh", + "IGHT", + "INTEG", + "IVMA", + "Iiwib", + "Ijoi", + "JSONLINT", + "JVXXXXXXXXXXXXXXXXXXXX", + "Jokinen", + "KERNN", + "KVDB", + "Keyfile", + "Keyv", + "Kvdb", + "L'erreur", + "LANGCHAIN", + "LISTVIEWNAME", + "LVMYAA", + "LWC", + "Licences", + "Lwcs", + "MARKDOWNLINT", + "MASTERLABEL", + "MYFOLDER", + "MYREPORTNAME", + "MYSTATUS", + "MYTASK", + "Mariia", + "Maxime", + "Mehdi", + "Metadatas", + "Modstamp", + "NOPMD", + "Ncss", + "Ndays", + "Novaxel", + "OCIs", + "ODNs", + "OOOOOPS", + "ORGALIAS", + "Ohana", + "Ollama", + "Ollama's", + "Omnichannel", + "Orgs", + "Ozil", + "Ozil's", + "PQAF", + "PROFILENAME", + "PROSELINT", + "PUBLICENDPOINT", + "PULLREQUEST", + "PULLREQUESTID", + "Pardot", + "Picklist", + "Picklists", + "Picoreau", + "Planification", + "Portail", + "Post", + "Poudel", + "Prouvot", + "Pset", + "Psets", + "Psla", + "Publishe", + "Pullrequests", + "Pyvovarchuk", + "QESRDTHFKGKH", + "Queueable", + "Quickfix", + "REPOSITORYNAME", + "RWAAA", + "Recalc", + "Recrutement", + "Reseting", + "Rizzi", + "Rypple", + "SAST", + "SEMGREP", + "SHELLCHECK", + "SOMENAMESPACE", + "SOURCEBRANCHNAME", + "STYLELINT", + "SYFT", + "Saidani", + "Schedulable", + "Scontrol", + "Scratches", + "Sebastien", + "Sfdc", + "Sfdx", + "Shirikova", + "Solva", + "Stepan", + "Stepanov", + "Stmts", + "Subflow", + "Sublicensing", + "Sucesso", + "Suspendre", + "Sustn", + "Suwi\u0144ski", + "Syst\u00e8me", + "TCRM", + "TEAMPROJECT", + "THEREISNOT", + "Taha", + "Teste", + "Trailmix", + "Transco", + "Transcos", + "Tuto", + "Typederendezvous", + "Ujut", + "Unallowed", + "Unstash", + "Uoms", + "Upsert", + "VCAS", + "VERSIONNUMBER", + "Veuillez", + "Viewfile", + "Visualforce", + "Visualiser", + "Vuillamy", + "WIPO", + "WQAR", + "Wiql", + "Wojciech", + "XLSX", + "Xmls", + "YOURDEVHUBUSERNAME", + "YOURORGNAME", + "YOURSOURCEORG", + "YOURSOURCEORGUSERNAME", + "Yahooo", + "Yeur", + "Yosra", + "ZWSAU", + "accordionallowed", + "accordionapex", + "accordionauto", + "accordionavailable", + "accordionbranch", + "accordionclean", + "accordioncommands", + "accordioncustom", + "accordiondata", + "accordiondefault", + "accordiondeployment", + "accordiondev", + "accordiondevelopment", + "accordiondoc", + "accordionextends", + "accordioninit", + "accordioninstall", + "accordioninstalled", + "accordioninstance", + "accordionlinter", + "accordionlist", + "accordionmanual", + "accordionmerge", + "accordionmonitoring", + "accordionms", + "accordionnew", + "accordionnotifications", + "accordionpackage", + "accordionpool", + "accordionproduction", + "accordionproject", + "accordionrefresh", + "accordionretrofit", + "accordionruntests", + "accordionscratch", + "accordionsfdmu", + "accordionshared", + "accordionskip", + "accordionsources", + "accordiontarget", + "accordiontest", + "accordionuse", + "activateduser", + "activateinvalid", + "addeduserpackagelicense", + "administratif", + "agentforce", + "aiapplicationconfig", + "all", + "allowfullscreen", + "allowpurgefailure", + "allproperties", + "analyste", + "antislashes", + "apexdocs", + "apexlog", + "apexp", + "apexpages", + "apextest", + "apiversion", + "apos", + "appsetup", + "aquasecurity", + "assigneduserstomobileconfig", + "astran", + "astrea", + "attributs", + "audittrail", + "auraenabled", + "authprovider", + "authproviders", + "autocleantypes", + "automations", + "autoplay", + "avant", + "avec", + "azdev", + "badwords", + "bestpractices", + "blockquotes", + "boza", + "buildargs", + "buildx", + "bulletpoints", + "cacache", + "calcul", + "callincallout", + "callins", + "callout", + "callouts", + "canmodify", + "caseentitlement", + "certaines", + "changedcommunitynickname", + "changedemail", + "changedfederationid", + "changedinteractionuseroffon", + "changedinteractionuseronoff", + "changedknowledgeuseroffon", + "changedmanager", + "changedmarketinguseroffon", + "changedmarketinguseronoff", + "changedofflineuseroffon", + "changedpassword", + "changedprofileforuser", + "changedprofileforusercusttostd", + "changedprofileforuserstdtocust", + "changedprofileforuserstdtostd", + "changedroleforuser", + "changedroleforuserfromnone", + "changedroleforusertonone", + "changedsenderemail", + "changedsfcontentuseroffon", + "changedsupportuseroffon", + "changedusername", + "changements", + "changemgmt", + "chang\u00e9", + "checkcoverage", + "checkcoverage) -- endArgs.indexOf(\"--checkcoverage\"", + "checkonly", + "choco", + "chunksize", + "cicd", + "classname", + "clientid", + "cloudiscore", + "codecov", + "codecoverage", + "codestyle", + "columnify", + "commandreference", + "commandsstop", + "commencer", + "commitlint", + "commitmode", + "commitsto", + "compris", + "concat", + "configfile", + "confirmfreeze", + "confirmunfreeze", + "connectedapp", + "conta", + "contentassets", + "contient", + "copado", + "coreutils", + "correctement", + "correspondance", + "cosmiconfig", + "cours", + "coverageformatters", + "cparra", + "createdcustomersuccessuser", + "createduser", + "crta", + "csvfile", + "csvfiles", + "currentgit", + "customindex", + "cvrg", + "cyclonedx", + "dans", + "datacategorygroup", + "datacategorygroups", + "datadotcom", + "dataflows", + "deactivateduser", + "deepseek", + "defaultdevhubusername", + "defaultmergerequest", + "defaultusername", + "definitionfile", + "delazeri", + "deletable", + "deleteafter", + "deloyment", + "demand\u00e9", + "deploydir", + "derroman", + "destructivechanges", + "destructivepackagexml", + "developername", + "developpement", + "devhubusername", + "dfgdlf", + "dhgfh", + "difftool", + "diff\u00e9rences", + "dimitrimonge", + "dlrs", + "dockerfilelintrc", + "domainbuilder", + "domcontentloaded", + "dompurify", + "duplicatefiles", + "d\u00e9commissionn\u00e9es", + "d\u00e9crire", + "d\u00e9ploiement", + "d\u00e9ploiements", + "d\u00e9ployer", + "d\u2019une", + "eatre", + "ects", + "effectuer", + "ehthumbs", + "elementsignored", + "elgohr", + "emailservices", + "emailvalid", + "emptyitems", + "endswitch", + "enduml", + "errorflow", + "erroronwarnings", + "errorprone", + "eslintcache", + "espace", + "eventtype", + "everytime", + "exceljs", + "excludefilter", + "excludeprofiles", + "excludeusers", + "exemple", + "expcloud", + "explainability", + "exttt", + "failiferror", + "farmhash", + "fflib", + "fichiers", + "fieldusage", + "filteredmetadatas", + "filterlanguage", + "filtersections", + "findduplicates", + "flexipage", + "flexipages", + "flowpositions", + "flowtest", + "flowtests", + "flowwww", + "fonctions", + "fontawesome", + "forceconnectedapps", + "forceignore", + "forcenew", + "forceoverwrite", + "fournir", + "fpath", + "freefont", + "freetype", + "friendlyname", + "fromcommit", + "fromorg", + "frontdoor", + "frozeuser", + "genai", + "genrsa", + "geodata", + "gims", + "gimsu", + "gitbeaker", + "gitbranch", + "gitdelta", + "gitea", + "gitlab", + "glcbt", + "glightbox", + "globby", + "globpattern", + "glpat", + "grafanacloud", + "granteduserpackagelicense", + "gtag", + "handleprocess", + "hardcode", + "hardis", + "hardisauthlogin", + "hardisconfigget", + "hardisdatatreeexport", + "hardisdocextractpermsetgroups", + "hardisdocplugingenerate", + "hardisgroupcom", + "hardismdapideploy", + "hardismisctoml", + "hardisorgconfigureconfig", + "hardisorgconfiguredata", + "hardisorgconfigurefiles", + "hardisorgconfiguremonitoring", + "hardisorgconnect", + "hardisorgcreate", + "hardisorgdataconfig", + "hardisorgdatadelete", + "hardisorgdataexport", + "hardisorgdataimport", + "hardisorgdiagnoselegacyapi", + "hardisorgfilesexport", + "hardisorgfixlistviewmine", + "hardisorgpurgeapexlog", + "hardisorgpurgeflow", + "hardisorgretrievepackageconfig", + "hardisorgretrievesourcesanalytics", + "hardisorgretrievesourcesdx", + "hardisorgretrievesourcesmetadata", + "hardisorgretrievesourcesretrofit", + "hardisorgselect", + "hardisorgtestapex", + "hardisorguseractivateinvalid", + "hardisorguseremailvalid", + "hardisorguserfreeze", + "hardisorgusermakeemailvalid", + "hardisorguserunfreeze", + "hardispackagecreate", + "hardispackageinstall", + "hardispackagemergexml", + "hardispackageversioncreate", + "hardispackageversionlist", + "hardispackageversionpromote", + "hardisprojectauditapiversion", + "hardisprojectauditcallincallout", + "hardisprojectauditduplicatefiles", + "hardisprojectauditremotesites", + "hardisprojectcleanemptyitems", + "hardisprojectcleanhiddenitems", + "hardisprojectcleanlistviews", + "hardisprojectcleanmanageditems", + "hardisprojectcleanminimizeprofiles", + "hardisprojectcleanorgmissingitems", + "hardisprojectcleanreferences", + "hardisprojectcleanretrievefolders", + "hardisprojectcleanstandarditems", + "hardisprojectcleansystemdebug", + "hardisprojectcleanxml", + "hardisprojectconfigureauth", + "hardisprojectconfiguredeployment", + "hardisprojectconvertprofilestopermsets", + "hardisprojectcreate", + "hardisprojectdeploysourcesdx", + "hardisprojectdeploysourcesmetadata", + "hardisprojectfixv", + "hardisprojectfixv53flexipages", + "hardisprojectgenerategitdelta", + "hardisprojectlint", + "hardisprojectworktasknew", + "hardisscratchcreate", + "hardisscratchdelete", + "hardisscratchpoolconfigure", + "hardisscratchpoolcreate", + "hardisscratchpoollocalauth", + "hardisscratchpoolrefresh", + "hardisscratchpoolreset", + "hardisscratchpoolview", + "hardisscratchpull", + "hardisscratchpush", + "hardissourcedeploy", + "hardissourcepush", + "hardissourceretrieve", + "hardisworknew", + "hardisworkpublish", + "hardisworkrefresh", + "hardisworkresetselection", + "hardisworksave", + "hardisworktaskcomplete", + "hardisworktasknew", + "hardisworktaskpublish", + "hardisworktaskrefresh", + "hardisworktasksave", + "hardisworkws", + "harfbuzz", + "hhmm", + "hiddenitems", + "high", + "historization", + "hostnames", + "hotfixes", + "hthe", + "htmlvalue", + "httprequest", + "huggingface", + "iframe", + "ignoreerrors", + "ignorerights", + "ignorewarnings", + "iids", + "includemanaged", + "includepackages", + "includeprofiles", + "inclure", + "initial", + "initialisation", + "inputfile", + "inputfolder", + "installable", + "installationkey", + "installationkeybypass", + "installkey", + "install\u00e9e", + "instanceupgrade", + "instanceurl", + "instantiate", + "inte", + "integ", + "interf", + "interf\u00e8re", + "int\u00e9gr\u00e9", + "invocablemethod", + "isactive", + "isbuildercontent", + "isfrozen", + "istest", + "javascripts", + "jeandupont", + "jetons", + "jlsfgd", + "jscpd", + "jsforce", + "jsonparser", + "jsonschema", + "jstree", + "jwtkeyfile", + "keepmetadatatypes", + "keyout", + "keyv", + "keyvalue", + "kvdb", + "l'erreur", + "langage", + "langchain", + "lastndays", + "lcone", + "lcov", + "legacyapi", + "legetz", + "libasound", + "libatk", + "libatspi", + "libcups", + "libdbus", + "libdrm", + "libnspr", + "libnss", + "libx", + "libxcomposite", + "libxdamage", + "libxext", + "libxfixes", + "libxkbcommon", + "libxrandr", + "licenseidentifiers", + "licensetypes", + "lightningloginenroll", + "ligne", + "linkinator", + "liste", + "listview", + "listviewmine", + "listviews", + "li\u00e9s", + "localauth", + "localconfig", + "localecsv", + "localfields", + "localisation", + "localisations", + "localtest", + "loginasgrantedtopartnerbt", + "loglevel", + "longpaths", + "lors", + "lycheeignore", + "l\u2019acc\u00e8s", + "maintenant", + "makeemailvalid", + "mamasse", + "manageditems", + "manifestname", + "marketingappextension", + "marketingappextensions", + "materialx", + "matheus", + "maxfields", + "maxuserdisplay", + "mdapi", + "mdapipkg", + "medium", + "megalinter", + "mergerequest", + "mergetool", + "mergexml", + "metadatastatus", + "metadatatype", + "metas", + "minimizeprofiles", + "minimumapiversion", + "missingattributes", + "mgmt", + "mkdir", + "mkdocs", + "mmdc", + "mocharc", + "monitoringhardisgroup", + "mpyvo", + "mrkdwn", + "multiselect", + "mutingpermissionset", + "mutingpermissionsets", + "myclient", + "mycompany", + "myconfig", + "mypackage", + "mypassword", + "myproject", + "myusername", + "namespaceprefix", + "networkidle", + "newapiversion", + "nico", + "niveau", + "noclean", + "noconfig", + "nodelete", + "nodelta", + "nodir", + "nogit", + "noinsight", + "noprompt", + "nopull", + "nosmart", + "notestrunrunspecifiedtestsrunlocaltestsrunalltestsinorg", + "nothrow", + "notif", + "notificationtypes", + "notifs", + "notiftype", + "npmignore", + "npmrc", + "numberfailed", + "nvuillam", + "nycrc", + "n\u0153uds", + "oauthcustomscope", + "oauthcustomscopes", + "obligatoire", + "occurences", + "oclif", + "olas", + "ollama", + "onetimepin", + "openrc", + "op\u00e9ration", + "orgfreeze", + "orginstanceurl", + "orgmissingitems", + "orgs", + "outputdir", + "outputfile", + "outputfolder", + "outputstring", + "overridable", + "oxsecurity", + "package", + "packageconfig", + "packagenames", + "packagetype", + "packagexml", + "packagexmlfull", + "packagexmls", + "packagexmltargetorg", + "pagereference", + "papaparse", + "parseable", + "partage", + "partie", + "pascalcase", + "passin", + "passout", + "pckg", + "perfs", + "permissionset", + "permissionsetgroup", + "permissionsetgroups", + "permissionsets", + "permissivediff", + "permset", + "permsetgroups", + "personnalis", + "personnalis\u00e9", + "peut", + "picklist", + "pjson", + "plantuml", + "ployer", + "pmdconfig", + "pocessed", + "polltimeout", + "poolstorage", + "postdestructivechanges", + "postpack", + "postrun", + "posttest", + "predestructivechanges", + "preid", + "prepatch", + "preprod", + "prerun", + "preselection", + "pris", + "productionbranch", + "productrequest", + "profilestopermsets", + "profiletabs", + "projectname", + "projet", + "propri\u00e9t\u00e9", + "pr\u00e9c\u00e9dente", + "psga", + "psgc", + "psgm", + "psla", + "purgeondelete", + "pushmode", + "pymdown", + "pymdownx", + "qstn", + "quotepath", + "qwen", + "rbenv", + "recette", + "redislabs", + "refactorization", + "regexes", + "relatedentitytype", + "relatednotifications", + "releaseupdates", + "remotesites", + "removedonly", + "removepackagexml", + "remplacer", + "renderable", + "rendez", + "reportfile", + "reportname", + "resetpassword", + "resetsave", + "resetselection", + "restresource", + "resultformat", + "resultsdir", + "retrievefolders", + "retrievetargetdir", + "retrofitbranch", + "retrofited", + "retrofittargetbranch", + "returnactiveusers", + "revokeduserpackagelicense", + "revparse", + "rootdir", + "ruleset", + "runtests", + "r\u00e9cente", + "r\u00e9cup\u00e9rer", + "r\u00e9f\u00e9rence", + "r\u00e9pondre", + "r\u00e9ponse", + "r\u00e9serv\u00e9", + "r\u00e9soudre", + "salesforcecli", + "salesforcedx", + "samlssoconfig", + "samlssoconfigs", + "sarif", + "sauter", + "scolladon", + "scontrols", + "scratchorg", + "sdfx", + "secretlintignore", + "secur", + "securitytype", + "seealldata", + "semver", + "setalias", + "setdefaultdevhubusername", + "setdefaultusername", + "setext", + "sfdevrc", + "sfdmu", + "sfdoc", + "sfdx", + "sfdxhardis", + "sfdxurl", + "sforce", + "sfpowerkit", + "sharingcalc", + "signkey", + "singlepackage", + "skipauth", + "skiptransfo", + "slackapp", + "slackapps", + "slctd", + "smmry", + "snote", + "soapdeploy", + "soapenv", + "soaprequest", + "sobject", + "sobjectid", + "sobjects", + "sobjecttype", + "somecommand", + "somefolder", + "someparameter", + "somevalue", + "sont", + "soql", + "sourcepath", + "sourceusername", + "squidfunk", + "standardcontroller", + "standarditems", + "startchunknumber", + "startuml", + "staticresources", + "stefanzweifel", + "subflows", + "subfolders", + "suivant", + "suivante", + "superfences", + "suspiscious", + "sustn", + "sysparm", + "systemdebug", + "s\u00e9lection", + "tahabasri", + "targetbranch", + "targetdevhubusername", + "targetusername", + "tempfolder", + "templatestyle", + "templatetype", + "testcase", + "testkit", + "testlevel", + "testsuite", + "testsuites", + "texei", + "tgci", + "thvd", + "tocommit", + "tocstop", + "tomlfile", + "tomlfileencoding", + "toplevel", + "toujours", + "tovalidate", + "tracedebuginfowarnerrorfataltracedebuginfowarnerrorfatal", + "tracksource", + "trait\u00e9", + "transco", + "transfo", + "transfoconfig", + "travaillant", + "triggerable", + "triggerhandler", + "trivy", + "trivyignore", + "twemoji", + "t\u00eate", + "uitype", + "unallowed", + "unfiled", + "unfrozeuser", + "unicity", + "uniquement", + "unitcoverage", + "unmanaged", + "unpackaged", + "unparse", + "unrstanding", + "unstage", + "unstaging", + "unusedlicenses", + "unusedmetadata", + "unusedmetadatas", + "unusedusers", + "updatedall", + "updateddatedexchrate", + "updatedfile", + "uriclear", + "urlonly", + "usagestop", + "usedonly", + "useraccesspolicies", + "useraccesspolicy", + "useremailchangesent", + "userid", + "userlicense", + "usetoolingapi", + "utilis", + "utilisant", + "utilisateur", + "utilisateurs", + "utilis\u00e9", + "uuidv", + "validateddeployrequestid", + "venv", + "veuillez", + "viewdefinitions", + "visualiser", + "vous", + "vuln", + "wapp", + "wcomp", + "wdash", + "wdpr", + "weblink", + "weblinks", + "webservice", + "webstoretemplate", + "whitespaces", + "wiql", + "wireit", + "wlens", + "workdotcom", + "workitems", + "xmark", + "xmldec", + "xmlnode", + "xoxb", + "xpaths", + "xslx", + "xunit", + "yamioliva", + "yamllint", + "yourinstanceurl", + "zipfile", + "\u00c9xito", + "\u00e9l\u00e9ments", + "\u00eates", + "\u00eatre", + "\u00ecntegration" + ] +} \ No newline at end of file diff --git a/.github/linters/.gitleaks.toml b/.github/linters/.gitleaks.toml new file mode 100644 index 000000000..525d9d85c --- /dev/null +++ b/.github/linters/.gitleaks.toml @@ -0,0 +1,20 @@ + +title = "gitleaks config" + +[extend] +# useDefault will extend the base configuration with the default gitleaks config: +# https://github.com/zricethezav/gitleaks/blob/master/config/gitleaks.toml +useDefault = true + +[allowlist] + description = "Allowlisted files" + paths = [ + '''.automation/test''', + '''megalinter-reports''', + '''.github/linters''', + '''node_modules''', + '''docs''', + '''.mypy_cache''', + '''(.*?)gitleaks\.toml$''', + '''(?i)(.*?)(png|jpeg|jpg|gif|doc|docx|pdf|bin|xls|xlsx|pyc|zip)$''', + '''(go.mod|go.sum)$'''] \ No newline at end of file diff --git a/.github/workflows/build-deploy-docs.yml b/.github/workflows/build-deploy-docs.yml index 19540ca4e..a9bf5c778 100644 --- a/.github/workflows/build-deploy-docs.yml +++ b/.github/workflows/build-deploy-docs.yml @@ -12,19 +12,19 @@ jobs: steps: - uses: actions/checkout@v4 # Build doc with sfdx-hardis - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 20 - run: yarn - run: yarn prepack - run: npm i @salesforce/cli -g - - run: echo y|sfdx plugins:install sfdx-hardis - - run: sfdx hardis:doc:plugin:generate + - run: echo y|sf plugins install sfdx-hardis@beta + - run: sf hardis:doc:plugin:generate # Deploy docs with mkdocs-material - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v5 with: python-version: 3.x - - run: pip install mkdocs-material mdx_truly_sane_lists json-schema-for-humans mkdocs-glightbox==0.3.2 pymdown-extensions==9.1 + - run: pip install mkdocs-material mdx_truly_sane_lists json-schema-for-humans mkdocs-glightbox - run: mkdocs gh-deploy --force git-sync: diff --git a/.github/workflows/create-github-release.yml.not-used b/.github/workflows/create-github-release.yml.not-used new file mode 100644 index 000000000..000d54eb5 --- /dev/null +++ b/.github/workflows/create-github-release.yml.not-used @@ -0,0 +1,34 @@ +name: create-github-release + +on: + push: + branches: + - main + - prerelease/** + tags-ignore: + - "*" + workflow_dispatch: + inputs: + prerelease: + type: string + description: "Name to use for the prerelease: beta, dev, etc. NOTE: If this is already set in the package.json, it does not need to be passed in here." + +jobs: + release: + uses: salesforcecli/github-workflows/.github/workflows/create-github-release.yml@main + secrets: + SVC_CLI_BOT_GITHUB_TOKEN: ${{ secrets.SVC_CLI_BOT_GITHUB_TOKEN }} + with: + prerelease: ${{ inputs.prerelease }} + # If this is a push event, we want to skip the release if there are no semantic commits + # However, if this is a manual release (workflow_dispatch), then we want to disable skip-on-empty + # This helps recover from forgetting to add semantic commits ('fix:', 'feat:', etc.) + skip-on-empty: ${{ github.event_name == 'push' }} + # docs: + # # Most repos won't use this + # # Depends on the 'release' job to avoid git collisions, not for any functionality reason + # needs: release + # secrets: + # SVC_CLI_BOT_GITHUB_TOKEN: ${{ secrets.SVC_CLI_BOT_GITHUB_TOKEN }} + # if: ${{ github.ref_name == 'main' }} + # uses: salesforcecli/github-workflows/.github/workflows/publishTypedoc.yml@main diff --git a/.github/workflows/deploy-ALPHA.yml b/.github/workflows/deploy-ALPHA.yml index d7db5962e..5c2197570 100644 --- a/.github/workflows/deploy-ALPHA.yml +++ b/.github/workflows/deploy-ALPHA.yml @@ -26,17 +26,18 @@ jobs: steps: - uses: actions/checkout@v4 # Setup .npmrc file to publish to npm - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 20 registry-url: "https://registry.npmjs.org" always-auth: true # Defaults to the user or organization that owns the workflow file scope: "hardisgroupcom" - - run: yarn - - run: yarn config set version-git-tag false && tsc -b + - run: yarn install --frozen-lockfile && yarn run compile + - run: yarn config set version-git-tag false - run: ALPHAID=$(date '+%Y%m%d%H%M') && yarn version --prepatch --preid="alpha$ALPHAID" - - run: yarn config set network-timeout 300000 && yarn publish --tag alpha + - run: yarn config set network-timeout 300000 + - run: yarn publish --tag alpha env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} @@ -53,26 +54,26 @@ jobs: uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build & Push Docker Image (Alpha) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile @@ -94,9 +95,7 @@ jobs: image-ref: "docker.io/hardisgroupcom/sfdx-hardis:alpha" format: "table" exit-code: "1" - ignore-unfixed: true vuln-type: "os,library" - security-checks: vuln severity: "CRITICAL,HIGH" push_alpha_to_registry_sfdx_recommended: @@ -112,26 +111,26 @@ jobs: uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build & Push Docker Image (Alpha recommended) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile @@ -153,7 +152,119 @@ jobs: image-ref: "docker.io/hardisgroupcom/sfdx-hardis:alpha-sfdx-recommended" format: "table" exit-code: "1" - ignore-unfixed: true vuln-type: "os,library" - security-checks: vuln + severity: "CRITICAL,HIGH" + + push_alpha_ubuntu_to_registry: + name: Push alpha Ubuntu Docker image to Docker Hub + needs: deploy + runs-on: ubuntu-latest + permissions: + packages: write + environment: + name: alpha + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & Push Ubuntu Docker Image (Alpha) + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile-ubuntu + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=alpha + SFDX_CLI_VERSION=latest + load: false + push: true + secrets: | + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} + tags: | + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:alpha + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:alpha + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: "docker.io/hardisgroupcom/sfdx-hardis-ubuntu:alpha" + format: "table" + exit-code: "1" + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + push_alpha_ubuntu_to_registry_sfdx_recommended: + name: Push alpha Ubuntu Docker image to Docker Hub (with @salesforce/cli version recommended by hardis) + needs: deploy + runs-on: ubuntu-latest + permissions: + packages: write + environment: + name: alpha + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & Push Ubuntu Docker Image (Alpha recommended) + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile-ubuntu + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=alpha + SFDX_CLI_VERSION=latest + load: false + push: true + secrets: | + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} + tags: | + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:alpha-sfdx-recommended + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:alpha-sfdx-recommended + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: "docker.io/hardisgroupcom/sfdx-hardis-ubuntu:alpha-sfdx-recommended" + format: "table" + exit-code: "1" + vuln-type: "os,library" severity: "CRITICAL,HIGH" diff --git a/.github/workflows/deploy-CANARY.yml b/.github/workflows/deploy-CANARY.yml index e112f82b2..75d265782 100644 --- a/.github/workflows/deploy-CANARY.yml +++ b/.github/workflows/deploy-CANARY.yml @@ -26,14 +26,14 @@ jobs: steps: - uses: actions/checkout@v4 # Setup .npmrc file to publish to npm - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 20 registry-url: "https://registry.npmjs.org" always-auth: true # Defaults to the user or organization that owns the workflow file scope: "hardisgroupcom" - - run: yarn + - run: yarn install --frozen-lockfile - run: yarn config set version-git-tag false && tsc -b - run: CANARYID=$(date '+%Y%m%d%H%M') && yarn version --prepatch --preid="canary$CANARYID" - run: yarn config set network-timeout 300000 && yarn publish --tag canary @@ -53,26 +53,26 @@ jobs: uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build & Push Docker Image (Canary) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile @@ -94,7 +94,62 @@ jobs: image-ref: "docker.io/hardisgroupcom/sfdx-hardis:canary" format: "table" exit-code: "1" - ignore-unfixed: true vuln-type: "os,library" - security-checks: vuln + severity: "CRITICAL,HIGH" + + push_canary_ubuntu_to_registry: + name: Push canary Ubuntu Docker image to Docker Hub + needs: deploy + runs-on: ubuntu-latest + permissions: + packages: write + environment: + name: canary + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & Push Ubuntu Docker Image (Canary) + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile-ubuntu + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=canary + SFDX_CLI_VERSION=latest + load: false + push: true + secrets: | + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} + tags: | + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:canary + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:canary + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: "docker.io/hardisgroupcom/sfdx-hardis-ubuntu:canary" + format: "table" + exit-code: "1" + vuln-type: "os,library" severity: "CRITICAL,HIGH" diff --git a/.github/workflows/deploy-PROD.yml b/.github/workflows/deploy-PROD.yml index 57cc1d181..546cc24dd 100644 --- a/.github/workflows/deploy-PROD.yml +++ b/.github/workflows/deploy-PROD.yml @@ -26,14 +26,14 @@ jobs: steps: - uses: actions/checkout@v4 # Setup .npmrc file to publish to npm - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 20 registry-url: "https://registry.npmjs.org" always-auth: true # Defaults to the user or organization that owns the workflow file scope: "hardisgroupcom" - - run: yarn + - run: yarn install --frozen-lockfile - run: yarn config set version-git-tag false && tsc -b - run: BETAID=$(date '+%Y%m%d%H%M') && yarn version --prepatch --preid="beta$BETAID" - run: yarn config set network-timeout 300000 && yarn publish --tag beta @@ -46,6 +46,7 @@ jobs: runs-on: ubuntu-latest permissions: packages: write + security-events: write # Added permission for security-events environment: name: beta steps: @@ -53,26 +54,26 @@ jobs: uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build & Push Docker Image (Beta) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile @@ -91,14 +92,59 @@ jobs: - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master with: - image-ref: "docker.io/hardisgroupcom/sfdx-hardis:beta" + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:beta format: "table" + output: sfdx-hardis-trivy-image-beta-results.txt exit-code: "1" - ignore-unfixed: true vuln-type: "os,library" - security-checks: vuln severity: "CRITICAL,HIGH" + - name: Generate Trivy security report + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:beta + format: "sarif" + output: sfdx-hardis-trivy-image-beta-results.sarif + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate SBOM (CycloneDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:beta + format: "cyclonedx" + output: "sfdx-hardis-image-sbom-beta.cyclonedx.json" + scan-type: "image" + + - name: Generate SBOM (SPDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:beta + format: "spdx-json" + output: "sfdx-hardis-image-sbom-beta.spdx.json" + scan-type: "image" + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: sfdx-hardis-trivy-image-beta-results.sarif + category: "trivy" + + - name: Upload SBOM and Security artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: security-and-sbom-documents + path: | + sfdx-hardis-image-sbom-beta.cyclonedx.json + sfdx-hardis-image-sbom-beta.spdx.json + sfdx-hardis-trivy-image-beta-results.txt + sfdx-hardis-trivy-image-beta-results.sarif + push_beta_to_registry_sfdx_recommended: name: Push Beta Docker image to Docker Hub (with @salesforce/cli version recommended by hardis) needs: deploy @@ -112,26 +158,26 @@ jobs: uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build & Push Docker Image (Beta recommended) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile @@ -153,7 +199,167 @@ jobs: image-ref: "docker.io/hardisgroupcom/sfdx-hardis:beta-sfdx-recommended" format: "table" exit-code: "1" - ignore-unfixed: true vuln-type: "os,library" - security-checks: vuln + severity: "CRITICAL,HIGH" + + push_beta_ubuntu_to_registry: + name: Push Beta Ubuntu Docker image to Docker Hub + needs: deploy + runs-on: ubuntu-latest + permissions: + packages: write + security-events: write # Added permission for security-events + environment: + name: beta + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & Push Ubuntu Docker Image (Beta) + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile-ubuntu + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=beta + SFDX_CLI_VERSION=latest + load: false + push: true + secrets: | + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} + tags: | + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:beta + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:beta + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:beta + format: "table" + output: sfdx-hardis-ubuntu-trivy-image-beta-results.txt + exit-code: "1" + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate Trivy security report + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:beta + format: "sarif" + output: sfdx-hardis-ubuntu-trivy-image-beta-results.sarif + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate SBOM (CycloneDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:beta + format: "cyclonedx" + output: "sfdx-hardis-ubuntu-image-sbom-beta.cyclonedx.json" + scan-type: "image" + + - name: Generate SBOM (SPDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:beta + format: "spdx-json" + output: "sfdx-hardis-ubuntu-image-sbom-beta.spdx.json" + scan-type: "image" + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: sfdx-hardis-ubuntu-trivy-image-beta-results.sarif + category: "trivy-ubuntu" + + - name: Upload SBOM and Security artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: security-and-sbom-documents-ubuntu + path: | + sfdx-hardis-ubuntu-image-sbom-beta.cyclonedx.json + sfdx-hardis-ubuntu-image-sbom-beta.spdx.json + sfdx-hardis-ubuntu-trivy-image-beta-results.txt + sfdx-hardis-ubuntu-trivy-image-beta-results.sarif + + push_beta_ubuntu_to_registry_sfdx_recommended: + name: Push Beta Ubuntu Docker image to Docker Hub (with @salesforce/cli version recommended by hardis) + needs: deploy + runs-on: ubuntu-latest + permissions: + packages: write + environment: + name: beta + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & Push Ubuntu Docker Image (Beta recommended) + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile-ubuntu + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=beta + SFDX_CLI_VERSION=latest + load: false + push: true + secrets: | + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} + tags: | + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:beta-sfdx-recommended + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:beta-sfdx-recommended + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: "docker.io/hardisgroupcom/sfdx-hardis-ubuntu:beta-sfdx-recommended" + format: "table" + exit-code: "1" + vuln-type: "os,library" severity: "CRITICAL,HIGH" diff --git a/.github/workflows/deploy-RELEASE.yml b/.github/workflows/deploy-RELEASE.yml index 938d338b1..7ebc118a2 100644 --- a/.github/workflows/deploy-RELEASE.yml +++ b/.github/workflows/deploy-RELEASE.yml @@ -25,14 +25,14 @@ jobs: steps: - uses: actions/checkout@v4 # Setup .npmrc file to publish to npm - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 20 registry-url: "https://registry.npmjs.org" # Defaults to the user or organization that owns the workflow file scope: "nvuillam" - - run: yarn - - run: yarn config set network-timeout 300000 && yarn publish || echo "Unable to publish package version. Or published in background because of NPM bug ?" + - run: yarn install --frozen-lockfile + - run: yarn config set network-timeout 300000 && yarn publish env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} @@ -42,6 +42,7 @@ jobs: runs-on: ubuntu-latest permissions: packages: write + security-events: write # Added permission for security-events environment: name: release steps: @@ -49,26 +50,26 @@ jobs: uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build & Push Docker Image (Latest) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile @@ -89,14 +90,59 @@ jobs: - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master with: - image-ref: "docker.io/hardisgroupcom/sfdx-hardis:latest" + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:${{ github.event.release.tag_name }} format: "table" + output: sfdx-hardis-trivy-image-${{ github.event.release.tag_name }}-results.txt exit-code: "1" - ignore-unfixed: true vuln-type: "os,library" - security-checks: vuln severity: "CRITICAL,HIGH" + - name: Generate Trivy security report + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:${{ github.event.release.tag_name }} + format: "sarif" + output: sfdx-hardis-trivy-image-${{ github.event.release.tag_name }}-results.sarif + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate SBOM (CycloneDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:${{ github.event.release.tag_name }} + format: "cyclonedx" + output: "sfdx-hardis-image-sbom-${{ github.event.release.tag_name }}.cyclonedx.json" + scan-type: "image" + + - name: Generate SBOM (SPDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis:${{ github.event.release.tag_name }} + format: "spdx-json" + output: "sfdx-hardis-image-sbom-${{ github.event.release.tag_name }}.spdx.json" + scan-type: "image" + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: sfdx-hardis-trivy-image-${{ github.event.release.tag_name }}-results.sarif + category: "trivy" + + - name: Upload SBOM and Security artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: security-and-sbom-documents + path: | + sfdx-hardis-image-sbom-${{ github.event.release.tag_name }}.cyclonedx.json + sfdx-hardis-image-sbom-${{ github.event.release.tag_name }}.spdx.json + sfdx-hardis-trivy-image-${{ github.event.release.tag_name }}-results.txt + sfdx-hardis-trivy-image-${{ github.event.release.tag_name }}-results.sarif + push_to_registry_sfdx_recommended: name: Push Docker image to Docker Hub (with @salesforce/cli version recommended by hardis) needs: deploy @@ -110,26 +156,26 @@ jobs: uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build & Push Docker Image (Latest) - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile @@ -151,7 +197,169 @@ jobs: image-ref: "docker.io/hardisgroupcom/sfdx-hardis:latest-sfdx-recommended" format: "table" exit-code: "1" - ignore-unfixed: true vuln-type: "os,library" - security-checks: vuln + severity: "CRITICAL,HIGH" + + push_ubuntu_to_registry: + name: Push Ubuntu Docker image to Docker Hub + needs: deploy + runs-on: ubuntu-latest + permissions: + packages: write + security-events: write # Added permission for security-events + environment: + name: release + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & Push Ubuntu Docker Image (Latest) + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile-ubuntu + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=latest + SFDX_CLI_VERSION=latest + load: false + push: true + secrets: | + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} + tags: | + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:${{ github.event.release.tag_name }} + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:latest + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:${{ github.event.release.tag_name }} + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:latest + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:${{ github.event.release.tag_name }} + format: "table" + output: sfdx-hardis-ubuntu-trivy-image-${{ github.event.release.tag_name }}-results.txt + exit-code: "1" + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate Trivy security report + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:${{ github.event.release.tag_name }} + format: "sarif" + output: sfdx-hardis-ubuntu-trivy-image-${{ github.event.release.tag_name }}-results.sarif + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate SBOM (CycloneDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:${{ github.event.release.tag_name }} + format: "cyclonedx" + output: "sfdx-hardis-ubuntu-image-sbom-${{ github.event.release.tag_name }}.cyclonedx.json" + scan-type: "image" + + - name: Generate SBOM (SPDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:${{ github.event.release.tag_name }} + format: "spdx-json" + output: "sfdx-hardis-ubuntu-image-sbom-${{ github.event.release.tag_name }}.spdx.json" + scan-type: "image" + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: sfdx-hardis-ubuntu-trivy-image-${{ github.event.release.tag_name }}-results.sarif + category: "trivy-ubuntu" + + - name: Upload SBOM and Security artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: security-and-sbom-documents-ubuntu + path: | + sfdx-hardis-ubuntu-image-sbom-${{ github.event.release.tag_name }}.cyclonedx.json + sfdx-hardis-ubuntu-image-sbom-${{ github.event.release.tag_name }}.spdx.json + sfdx-hardis-ubuntu-trivy-image-${{ github.event.release.tag_name }}-results.txt + sfdx-hardis-ubuntu-trivy-image-${{ github.event.release.tag_name }}-results.sarif + + push_ubuntu_to_registry_sfdx_recommended: + name: Push Ubuntu Docker image to Docker Hub (with @salesforce/cli version recommended by hardis) + needs: deploy + runs-on: ubuntu-latest + environment: + name: release + permissions: + packages: write + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & Push Ubuntu Docker Image (Latest recommended) + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile-ubuntu + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=latest + SFDX_CLI_VERSION=latest + load: false + push: true + secrets: | + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} + tags: | + docker.io/hardisgroupcom/sfdx-hardis-ubuntu:latest-sfdx-recommended + ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:latest-sfdx-recommended + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: "docker.io/hardisgroupcom/sfdx-hardis-ubuntu:latest-sfdx-recommended" + format: "table" + exit-code: "1" + vuln-type: "os,library" severity: "CRITICAL,HIGH" diff --git a/.github/workflows/devScripts.yml b/.github/workflows/devScripts.yml new file mode 100644 index 000000000..b7630fdcd --- /dev/null +++ b/.github/workflows/devScripts.yml @@ -0,0 +1,11 @@ +name: devScripts +on: + workflow_dispatch: + schedule: + - cron: '50 6 * * 0' + +jobs: + update: + uses: salesforcecli/github-workflows/.github/workflows/devScriptsUpdate.yml@main + secrets: + SVC_CLI_BOT_GITHUB_TOKEN: ${{ secrets.SVC_CLI_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/docker-security-scan.yml b/.github/workflows/docker-security-scan.yml new file mode 100644 index 000000000..16c56f107 --- /dev/null +++ b/.github/workflows/docker-security-scan.yml @@ -0,0 +1,90 @@ +name: Docker Security Scan +on: + push: + pull_request: + workflow_dispatch: + +jobs: + docker-security-scan: + name: Build and Scan Docker Image + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile + platforms: linux/amd64 + build-args: | + SFDX_HARDIS_VERSION=latest + SFDX_CLI_VERSION=latest + load: true + push: false + tags: | + sfdx-hardis:security-scan + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: "sfdx-hardis:security-scan" + format: "table" + output: "sfdx-hardis-trivy-image-results.txt" + exit-code: "1" + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate Trivy security report + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: "sfdx-hardis:security-scan" + format: "sarif" + output: "sfdx-hardis-trivy-image-results.sarif" + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + + - name: Generate SBOM (CycloneDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: "sfdx-hardis:security-scan" + format: "cyclonedx" + output: "sfdx-hardis-image-sbom.cyclonedx.json" + scan-type: "image" + + - name: Generate SBOM (SPDX format) + uses: aquasecurity/trivy-action@master + if: always() + with: + image-ref: "sfdx-hardis:security-scan" + format: "spdx-json" + output: "sfdx-hardis-image-sbom.spdx.json" + scan-type: "image" + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: "sfdx-hardis-trivy-image-results.sarif" + category: "trivy" + + - name: Upload SBOM artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: security-and-sbom-documents + path: | + sfdx-hardis-image-sbom.cyclonedx.json + sfdx-hardis-image-sbom.spdx.json + sfdx-hardis-trivy-image-results.txt + sfdx-hardis-trivy-image-results.sarif diff --git a/.github/workflows/mega-linter.yml b/.github/workflows/mega-linter.yml index 1031e75a1..bce75b092 100644 --- a/.github/workflows/mega-linter.yml +++ b/.github/workflows/mega-linter.yml @@ -32,20 +32,35 @@ jobs: id: ml # You can override Mega-Linter flavor used to have faster performances # More info at https://megalinter.io/latest/flavors/ - uses: oxsecurity/megalinter/flavors/javascript@latest + uses: oxsecurity/megalinter/flavors/javascript@beta env: # All available variables are described in documentation # https://megalinter.io/latest/config-file/ VALIDATE_ALL_CODEBASE: true # Set ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} to validate only diff with master branch + # Disable LLM Advisor for bot PRs (dependabot, renovate, etc.) + LLM_ADVISOR_ENABLED: >- + ${{ + github.event_name != 'pull_request' || + ( + github.event.pull_request.user.login != 'dependabot[bot]' && + github.event.pull_request.user.login != 'renovate[bot]' && + github.event.pull_request.user.login != 'github-actions[bot]' && + !startsWith(github.event.pull_request.user.login, 'dependabot') && + !startsWith(github.event.pull_request.user.login, 'renovate') + ) + }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} # ADD YOUR CUSTOM ENV VARIABLES HERE TO OVERRIDE VALUES OF .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY + # Upload Mega-Linter artifacts - name: Archive production artifacts if: success() || failure() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Mega-Linter reports + include-hidden-files: "true" path: | megalinter-reports mega-linter.log diff --git a/.github/workflows/onRelease.yml.not-used b/.github/workflows/onRelease.yml.not-used new file mode 100644 index 000000000..7f1237e1d --- /dev/null +++ b/.github/workflows/onRelease.yml.not-used @@ -0,0 +1,31 @@ +name: publish +on: + release: + # both release and prereleases + types: [published] + # support manual release in case something goes wrong and needs to be repeated or tested + workflow_dispatch: + inputs: + tag: + description: github tag that needs to publish + type: string + required: true +jobs: + getDistTag: + outputs: + tag: ${{ steps.distTag.outputs.tag }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.release.tag_name || inputs.tag }} + - uses: salesforcecli/github-workflows/.github/actions/getPreReleaseTag@main + id: distTag + npm: + uses: salesforcecli/github-workflows/.github/workflows/npmPublish.yml@main + needs: [getDistTag] + with: + tag: ${{ needs.getDistTag.outputs.tag || 'latest' }} + githubTag: ${{ github.event.release.tag_name || inputs.tag }} + + secrets: inherit diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a2e8d5c35..3bd27a7a5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,38 +1,27 @@ ---- -# -# Documentation: -# https://help.github.com/en/articles/workflow-syntax-for-github-actions -# - -####################################### -# Start the job on all push to master # -####################################### -name: "Test" +name: tests on: - push: # Comment this line to trigger action only on pull-requests (not recommended if you don't pay for GH Actions) - pull_request: - branches: [master, main] + push: + branches-ignore: [main] + workflow_dispatch: -############### -# Set the Job # -############### jobs: - test: - runs-on: ubuntu-latest - if: (github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository) && !contains(github.event.head_commit.message, 'skip deploy') - steps: - - uses: actions/checkout@v4 - # Setup .npmrc file to publish to npm - - uses: actions/setup-node@v3 - with: - node-version: 20 - registry-url: "https://registry.npmjs.org" - # Defaults to the user or organization that owns the workflow file - scope: "nvuillam" - - run: yarn - - run: yarn prepack - - run: yarn test -# - name: Submitting code coverage to codecov -# run: | -# ./node_modules/.bin/nyc report --reporter text-lcov > coverage.lcov -# curl -s https://codecov.io/bash | bash + linux-unit-tests: + uses: salesforcecli/github-workflows/.github/workflows/unitTestsLinux.yml@main + with: + skipTsDepCheck: true + + windows-unit-tests: + uses: salesforcecli/github-workflows/.github/workflows/unitTestsWindows.yml@main + + nuts: + needs: + - linux-unit-tests + - windows-unit-tests + uses: salesforcecli/github-workflows/.github/workflows/nut.yml@main + secrets: inherit + strategy: + matrix: + os: [ubuntu-latest, windows-latest] + fail-fast: false + with: + os: ${{ matrix.os }} diff --git a/.gitignore b/.gitignore index aaeb4ab33..5f9d4708c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,29 +1,55 @@ -*-debug.log -*-error.log -/. -/.sfdx -/dist -/lib -/package-lock.json -/tmp -node_modules -report -sfdx-hardis-*.tgz +# -- CLEAN +tmp/ +# use yarn by default, so ignore npm +package-lock.json + +# MacOS system files +.DS_Store .nyc_output/ -site/ +# never checking npm config +.npmrc -config/user/ +# debug logs +npm-error.log +yarn-error.log -hardis-report/ -.nyc_output/ +# compile source +lib -site/ +# test artifacts +*xunit.xml +*checkstyle.xml +*unitcoverage +.nyc_output +coverage +test_session* -config/user/ +# ignore sfdx-trust files +*.tgz +*.sig +package.json.bak. -megalinter-reports/ +npm-shrinkwrap.json +oclif.manifest.json +oclif.lock + +# -- CLEAN ALL +*.tsbuildinfo +.eslintcache +.wireit +node_modules + +# -- +# put files here you don't want cleaned with sf-clean + +# os specific files +.DS_Store .idea + +hardis-report/ +megalinter-reports/ +config/user/ diff --git a/.husky/commit-msg b/.husky/commit-msg new file mode 100644 index 000000000..c21981371 --- /dev/null +++ b/.husky/commit-msg @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +yarn commitlint --edit || true diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 000000000..21c01980f --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,5 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +yarn lint # && yarn pretty-quick --staged # Removed because it destroys markdowns table data +node build.cjs diff --git a/.husky/pre-push b/.husky/pre-push new file mode 100644 index 000000000..56b63c314 --- /dev/null +++ b/.husky/pre-push @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +yarn build && yarn test diff --git a/.jscpd.json b/.jscpd.json index 2573b7443..919360598 100644 --- a/.jscpd.json +++ b/.jscpd.json @@ -1,6 +1,9 @@ { "threshold": 0, - "reporters": ["html", "markdown"], + "reporters": [ + "html", + "markdown" + ], "ignore": [ "**/node_modules/**", "**/.git/**", @@ -19,6 +22,8 @@ "**/apex.ts", "**/access.ts", "**/audittrail.ts", + "**/auth.ts", + "**/deployTips.ts", "**/freeze.ts", "**/legacyapi.ts", "**/unfreeze.ts", @@ -28,4 +33,4 @@ "**/*.yml", "**/*.xml" ] -} +} \ No newline at end of file diff --git a/.lintstagedrc.cjs b/.lintstagedrc.cjs new file mode 100644 index 000000000..9a99d41cd --- /dev/null +++ b/.lintstagedrc.cjs @@ -0,0 +1,3 @@ +module.exports = { + '**/*.{js,json,md}?(x)': () => 'npm run reformat', +}; diff --git a/.mega-linter.yml b/.mega-linter.yml index dc12ffaaf..6f168d63b 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -11,24 +11,36 @@ DISABLE: - HTML - JAVASCRIPT DISABLE_LINTERS: + - BASH_EXEC - JSON_JSONLINT + - JSON_PRETTIER + - MARKDOWN_MARKDOWNLINT - MARKDOWN_MARKDOWN_LINK_CHECK + - JSON_NPM_PACKAGE_JSON_LINT - SPELL_PROSELINT - REPOSITORY_GIT_DIFF - REPOSITORY_SEMGREP - REPOSITORY_SECRETLINT + - REPOSITORY_SYFT + - REPOSITORY_TRIVY_SBOM + - TYPESCRIPT_PRETTIER - TYPESCRIPT_STANDARD + - YAML_PRETTIER + - YAML_YAMLLINT + - XML_XMLLINT DISABLE_ERRORS_LINTERS: - - MARKDOWN_MARKDOWN_LINK_CHECK - REPOSITORY_GRYPE - SPELL_LYCHEE + GITHUB_STATUS_REPORTER: false MARKDOWN_MARKDOWN_LINK_CHECK_FILTER_REGEX_EXCLUDE: (README\.md) +BASH_SHELLCHECK_FILTER_REGEX_EXCLUDE: (husky) YAML_V8R_DISABLE_ERRORS: true FILTER_REGEX_EXCLUDE: "(vscode|defaults|workarounds)" TYPESCRIPT_DEFAULT_STYLE: prettier DOCKERFILE_HADOLINT_ARGUMENTS: "--ignore DL3007 --ignore DL3016 --ignore DL3018 --ignore DL4006" REPOSITORY_GITLEAKS_DISABLE_ERRORS_IF_LESS_THAN: 11 +ACTION_ACTIONLINT_DISABLE_ERRORS_IF_LESS_THAN: 2 SHOW_ELAPSED_TIME: true FILEIO_REPORTER: false SPELL_LYCHEE_ARGUMENTS: @@ -36,3 +48,14 @@ SPELL_LYCHEE_ARGUMENTS: - "429" SPELL_LYCHEE_UNSECURED_ENV_VARIABLES: - GITHUB_TOKEN +SPELL_FILTER_REGEX_EXCLUDE: "Dockerfile(-.+)?" + +REPORTERS_MARKDOWN_SUMMARY_TYPE: table-sections +# LLM Advisor +LLM_ADVISOR_LEVEL: error + +# Choose your provider and model +LLM_PROVIDER: openai # openai, anthropic, google, huggingface, mistral, deepseek, grok, ollama +LLM_MODEL_NAME: gpt-4.1-mini +LLM_MAX_TOKENS: 1000 +LLM_TEMPERATURE: 0.1 diff --git a/.mocharc.json b/.mocharc.json new file mode 100644 index 000000000..8311bd3e1 --- /dev/null +++ b/.mocharc.json @@ -0,0 +1,8 @@ +{ + "require": ["ts-node/register"], + "watch-extensions": "ts", + "recursive": true, + "reporter": "spec", + "timeout": 600000, + "node-option": ["loader=ts-node/esm"] +} diff --git a/.nycrc b/.nycrc new file mode 100644 index 000000000..eeb8a0b18 --- /dev/null +++ b/.nycrc @@ -0,0 +1,7 @@ +{ + "check-coverage": true, + "lines": 50, + "statements": 50, + "functions": 50, + "branches": 50 +} diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 000000000..0e0bbe8a0 --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,6 @@ +import salesforcePrettierConfig from "@salesforce/prettier-config"; + +export default { + ...salesforcePrettierConfig, + printWidth: 150, +}; \ No newline at end of file diff --git a/.sfdevrc.json b/.sfdevrc.json new file mode 100644 index 000000000..66121e198 --- /dev/null +++ b/.sfdevrc.json @@ -0,0 +1,10 @@ +{ + "test": { + "testsPath": "test/**/*.test.ts" + }, + "wireit": { + "test": { + "dependencies": ["test:compile", "test:only", "lint"] + } + } +} diff --git a/.trivyignore b/.trivyignore index c470491ff..e4756a785 100644 --- a/.trivyignore +++ b/.trivyignore @@ -1,61 +1,67 @@ -CVE-2020-28469 -CVE-2020-7777 -CVE-2020-7792 -CVE-2020-8203 -CVE-2021-23337 -CVE-2021-23352 -CVE-2021-23440 -CVE-2021-23566 -CVE-2021-32796 -CVE-2021-33502 -CVE-2021-33623 -CVE-2021-35065 -CVE-2021-3711 -CVE-2021-3749 -CVE-2021-3807 -CVE-2021-3918 -CVE-2021-43138 -CVE-2022-2097 -CVE-2022-21213 +# @babel/runtime https://avd.aquasec.com/nvd/2025/cve-2025-27789/ +# Risk: High CPU Usage, not risky for sfdx-hardis +CVE-2025-27789 + +# @octokit/plugin-paginate-rest https://avd.aquasec.com/nvd/cve-2025-25288 +# Risk: High CPU Usage, not risky for sfdx-hardis +CVE-2025-25288 + +# @octokit/request https://avd.aquasec.com/nvd/cve-2025-25290 +# Risk: High CPU Usage, not risky for sfdx-hardis +CVE-2025-25290 + +# @octokit/request-error https://avd.aquasec.com/nvd/2025/cve-2025-25289/ +# Risk: High CPU Usage, not risky for sfdx-hardis +CVE-2025-25289 + +# tar-fs https://avd.aquasec.com/nvd/2024/cve-2024-12905/ +# Risk: Improper Limitation of a Pathname to a Restricted Directory (Path Traversal), not risky for sfdx-hardis +CVE-2024-12905 + +# tar-fs https://avd.aquasec.com/nvd/cve-2025-48387 +# Risk : Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal'), not risky for sfdx-hardis +CVE-2025-48387 + +# cross-spawn https://avd.aquasec.com/nvd/cve-2024-21538 +# Risk: Denial of Service, not risky for sfdx-hardis +CVE-2024-21538 + +# marked https://avd.aquasec.com/nvd/cve-2022-21680 +# Risk: Denial of Service, not risky for sfdx-hardis CVE-2022-21680 + +# marked https://avd.aquasec.com/nvd/cve-2022-21681 +# Risk: Denial of Service, not risky for sfdx-hardis CVE-2022-21681 -CVE-2022-23529 -CVE-2022-23539 -CVE-2022-23540 -CVE-2022-23541 -CVE-2022-24433 -CVE-2022-25881 -CVE-2022-25883 -CVE-2022-29078 -CVE-2022-33987 -CVE-2022-3517 -CVE-2022-37616 -CVE-2022-38900 -CVE-2022-39353 -CVE-2023-0286 -CVE-2023-0842 -CVE-2023-2650 -CVE-2023-26115 -CVE-2023-26136 -CVE-2023-28155 -CVE-2023-29017 -CVE-2023-29199 -CVE-2023-30533 -CVE-2023-30547 -CVE-2023-32314 -CVE-2023-34104 -CVE-2023-38704 -CVE-2023-45133 -CVE-2023-45857 -CVE-2023-5363 -CVE-2023-5678 -CVE-2024-22363 -CVE-2024-33883 -CVE-2024-37890 -CVE-2024-4067 -CVE-2024-4068 - -DS001 + +# sqlite-libs https://avd.aquasec.com/nvd/2025/cve-2025-29087/ +# Risk: Memory leak, not risky for sfdx-hardis +CVE-2025-29087 + +# lodash.template https://avd.aquasec.com/nvd/cve-2021-23337 +# Risk: Command injection via template. The dependency is located within Core SF Cli library (.local/share/sf/node_modules/lodash.template/package.json) +# so is applicable to all uses of Salesforce CLI, with or without sfdx-hardis +# Let's trust core Salesforce team to have assessed that this CVE is not risky in their context, otherwise it would mean that all uses of Salesforce CLI in the world would be a security risk ^^ +CVE-2021-23337 + +# form-data https://avd.aquasec.com/nvd/cve-2025-7783 +# Use of Insufficiently Random Values vulnerability: HTTP Parameter Pollution (HPP), not risky for sfdx-hardis uses as it is not used as a HTTP server +CVE-2025-7783 + +# sqlite https://avd.aquasec.com/nvd/cve-2025-6965 +# Integer Truncation in SQLite, not risky for sfdx-hardis use +CVE-2025-6965 + +# sha.js https://avd.aquasec.com/nvd/cve-2025-9288 +# Missing type checks leading to hash rewind, located in @salesforce/cli so they'll fix that soon +CVE-2025-9288 + +# Dockerfile https://avd.aquasec.com/misconfig/ds002 +# Image using root user, that's ok as we are using it in CI/CD jobs containers, so not risky for sfdx-hardis DS002 + +# Dockerfile https://avd.aquasec.com/misconfig/ds026 +# No HEALTHCHECK defined, that's ok as our container live only the time of the job, so not risky for sfdx-hardis DS026 -GHSA-f8pq-3926-8gx5 + + diff --git a/.vscode/launch.json b/.vscode/launch.json index 0786ae7e5..ba66bf715 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -7,36 +7,16 @@ { "type": "node", "request": "attach", - "name": "Attach to Remote", - "address": "127.0.0.1", + "name": "Attach", "port": 9229, - "localRoot": "${workspaceFolder}", - "remoteRoot": "${workspaceFolder}", - "disableOptimisticBPs": true, - "smartStep": true, - "skipFiles": [ - "/**", - "**/node_modules/**/*.js", - "**/node_modules/**/*.cjs" - ], - "windows": { - "skipFiles": [ - "C:\\**\\node_modules\\**\\*", - "/**/*" - ] - } + "skipFiles": ["/**"] }, { "name": "Run All Tests", "type": "node", "request": "launch", - "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha", - "args": [ - "--inspect", - "--no-timeouts", - "--colors", - "test/**/*.test.ts" - ], + "program": "${workspaceFolder}/node_modules/mocha/bin/mocha", + "args": ["--inspect", "--colors", "test/**/*.test.ts"], "env": { "NODE_ENV": "development", "SFDX_ENV": "development" @@ -44,19 +24,14 @@ "sourceMaps": true, "smartStep": true, "internalConsoleOptions": "openOnSessionStart", - "preLaunchTask": "Compile" + "preLaunchTask": "Compile tests" }, { "type": "node", "request": "launch", "name": "Run Current Test", - "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha", - "args": [ - "--inspect", - "--no-timeouts", - "--colors", - "${file}" - ], + "program": "${workspaceFolder}/node_modules/mocha/bin/mocha", + "args": ["--inspect", "--colors", "${file}"], "env": { "NODE_ENV": "development", "SFDX_ENV": "development" @@ -64,7 +39,7 @@ "sourceMaps": true, "smartStep": true, "internalConsoleOptions": "openOnSessionStart", - "preLaunchTask": "Compile" + "preLaunchTask": "Compile tests" } ] -} \ No newline at end of file +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 397f920b2..95f8704a6 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -4,11 +4,25 @@ "**/.svn": true, "**/.hg": true, "**/CVS": true, - "**/.DS_Store": true + "**/.DS_Store": true, + "**/node_modules": true, }, "search.exclude": { "**/lib": true, - "**/bin": true + "**/bin": true, + "**/node_modules": true, }, - "sarif-viewer.connectToGithubCodeScanning": "off" -} + "editor.tabSize": 2, + "editor.formatOnSave": true, + "rewrap.wrappingColumn": 80, + "typescript.tsserver.exclude": [ + "**/node_modules", + "**/dist", + "**/build" + ], + "pasteImage.path": "${currentFileDir}/assets/images", + "pasteImage.basePath": "${currentFileDir}", + "pasteImage.showFilePathConfirmInputBox": true, + "pasteImage.filePathConfirmInputBoxMode": "onlyName", + "workbench.colorCustomizations": {} +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 5a0260e92..efbb147c5 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -1,9 +1,9 @@ { "version": "2.0.0", - "problemMatcher": "$tsc-watch", + "problemMatcher": "$tsc", "tasks": [ { - "label": "Compile", + "label": "Compile tests", "group": { "kind": "build", "isDefault": true @@ -14,44 +14,8 @@ "focus": false, "panel": "dedicated" }, - "args": ["run", "prepack"], - "isBackground": false, - "problemMatcher": { - "owner": "typescript", - "fileLocation": "relative", - "pattern": { - "regexp": "^(.*\\.ts):(\\d*):(\\d*)(\\s*-\\s*)(error|warning|info)\\s*(TS\\d*):\\s*(.*)$", - "file": 1, - "line": 2, - "column": 3, - "severity": 5, - "code": 6, - "message": 7 - } - } - }, - { - "label": "Lint", - "command": "yarn", - "type": "shell", - "presentation": { - "focus": false, - "panel": "dedicated" - }, - "args": ["run", "lint"], - "isBackground": false, - "problemMatcher": { - "owner": "typescript", - "fileLocation": "relative", - "pattern": { - "regexp": "^(ERROR|WARNING|INFO):\\s*(.*\\.ts):(\\d*):(\\d*)(\\s*-\\s*)(.*)$", - "file": 2, - "line": 3, - "column": 4, - "severity": 1, - "message": 6 - } - } + "args": ["run", "pretest"], + "isBackground": false } ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index f7827674e..dfa876e5e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,1082 @@ Note: Can be used with `sfdx plugins:install sfdx-hardis@beta` and docker image `hardisgroupcom/sfdx-hardis@beta` +- Update installation instructions + +## [6.5.3] 2025-09-23 + +- Install Chrome in Ubuntu docker image + +## [6.5.2] 2025-09-21 + +- Display alias in org selection + +## [6.5.1] 2025-09-20 + +- [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/) enhancements: + - Creates the 'force-app/main/default' directory if it doesn't exist before retrieving metadatas +- [hardis:org:configure:monitoring](https://sfdx-hardis.cloudity.com/hardis/org/configure/monitoring/): + - Display the connected App XML in logs (while hiding sensitive info) + - When production org, run the first found test class (or allow to force its selection using ENV variable `SFDX_HARDIS_TECH_DEPLOY_TEST_CLASS` ) + - Add instructions to use ghcr.io Docker image in case of rate limits reached on Docker Hub +- Handle progress component in UI when generating documentation + +## [6.5.0] 2025-09-17 + +- Files export enhancements: + - Resume + validate downloaded files + - Improves API limit handling for file export/import +- When prompting for org url, allow to input just the domain (ex: `hardis-group`) and sfdx-hardis will build the rest of the url + +## [6.4.4] 2025-09-16 + +- When prompting for org instance URL, allow to copy-paste the full URL to gain time +- [hardis:org:diagnose:unsecure-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unsecure-connected-apps/): Salesforce limits OAuthToken queries to 2500 results. Be sneaky to get all results :) + +## [6.4.3] 2025-09-14 + +- [hardis:org:file:export](https://sfdx-hardis.cloudity.com/hardis/org/files/export/) and [hardis:org:file:import](https://sfdx-hardis.cloudity.com/hardis/org/files/import/): + - Provide record Ids in the logs +- Update labels of all report files for UI buttons +- Allow to export only files of a minimum size + +## [6.4.2] 2025-09-14 + +- Add additional dependencies in Ubuntu Dockerfile to allow mermaid-cli and chrome to run natively from the image. +- [hardis:org:file:export](https://sfdx-hardis.cloudity.com/hardis/org/files/export/) & [hardis:org:file:import](https://sfdx-hardis.cloudity.com/hardis/org/files/import/): + - Send progress notifications to WebSocketServer + - Improve console logs + - Generate a CSV log file with all files +- [hardis:work:save](https://sfdx-hardis.cloudity.com/hardis/work/save/): Display manual actions file as an action link + +## [6.4.1] 2025-09-10 + +- Allow to override Bulk API v2 settings with env variables **BULKAPIV2_POLL_INTERVAL**, **BULKAPIV2_POLL_TIMEOUT** and **BULK_QUERY_RETRY** + +## [6.4.0] 2025-09-08 + +- [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/): New beta feature **useDeltaDeploymentWithDependencies** to add dependencies to the delta deployment package. +- Fix npm dependencies (just in case, but the global npm packages hack has not impacted sfdx-hardis as it does not run in a browser) + +## [6.3.3] 2025-09-08 + +- [hardis:org:diagnose:unsecure-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unsecure-connected-apps/) + - Add an additional columns on OAuth Usage to: + - Show when the connected app has been last used + - Show which profiles are the users using the OAuth Tokens + - Run the command in the daily monitoring + +## [6.3.2] 2025-09-07 + +- Set initPermissionSets config prop to array of strings +- [hardis:org:diagnose:unsecure-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unsecure-connected-apps/): Handle case where OAuth Token App menu item is not found + +## [6.3.1] 2025-09-07 + +- Update Grafana Home Dashboard to add Unsecure Connected Apps +- Fix Auth configuration command for Dev Hub +- Allow to use org shapes for scratch org creation with env variable **SCRATCH_ORG_SHAPE** +- Replace `my.salesforce-setup.com` by `my.salesforce.com` when prompting instance URL + +## [6.3.0] 2025-09-06 + +- New command [hardis:org:diagnose:unsecure-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unsecure-connected-apps/) to detect Unsecured Connected Apps +- Add documentation about Packages installation +- Update Azure Pipelines integration documentation + +## [6.2.1] 2025-09-04 + +- [hardis:work:save](https://sfdx-hardis.cloudity.com/hardis/work/save/): Always display a button to create Merge Request +- Update GitProvider to make it compliant with GitHub Enterprise hosted on ghe.com + +## [6.2.0] 2025-09-01 + +- [hardis:org:refresh:before-refresh](https://sfdx-hardis.cloudity.com/hardis/org/refresh/before-refresh/) + - Allow to download data to save before refreshing the sandbox, using SFDMU projects + - Save Custom Settings selection in configuration +- [hardis:org:refresh:after-refresh](https://sfdx-hardis.cloudity.com/hardis/org/refresh/after-refresh/) + - Restore data after refresh using saved SFDMU project data +- [hardis:org:data:export](https://sfdx-hardis.cloudity.com/hardis/org/data/export/) & [hardis:org:data:import](https://sfdx-hardis.cloudity.com/hardis/org/data/export/): + - Add --project-name and --no-prompts arguments + - Add more examples of commands calls +- [hardis:org:select](https://sfdx-hardis.cloudity.com/hardis/org/data/export/): Improve options to be called from VsCode-sfdx-hardis Orgs Manager LWC + +## [6.1.4] 2025-08-25 + +- Update Integrations & DevOps Documentation +- Send message to refresh pipeline after updating package configuration + +## [6.1.3] 2025-08-24 + +- [hardis:org:configure:auth](https://sfdx-hardis.cloudity.com/hardis/org/configure/auth/) + - Fix issues related to VsCode background mode + - When updating existing branch authentication, pre-select merge targets. + - Send more information to the user about files that are created/updated + +## [6.1.2] 2025-08-24 + +- Simplify package retrieve command +- Handle when an org is disconnected in CodeBuilder context + +## [6.1.1] 2025-08-24 + +- Auto-detect which login type to use depending if we are in local or web context (Code Builder, CodeSpaces) +- Add documentation for Ubuntu images +- Wait for WebSocket Server to be initialized before continuing command. + +## [6.1.0] 2025-08-23 + +- [hardis:org:refresh:before-refresh](https://sfdx-hardis.cloudity.com/hardis/org/refresh/before-refresh/) + - Retrieve Certificates and other metadatas that could need to be restored + - Retrieve Custom Settings values +- [hardis:org:refresh:after-refresh](https://sfdx-hardis.cloudity.com/hardis/org/refresh/after-refresh/) + - Restore Certificates and other metadatas that could need to be restored + - Restore Custom Settings values + - Smart restore of SAML SSO Config by prompting the user to select a valid certificate +- Send path to command log file to WebSocketServer +- Improve startup performances by checking for sfdx-hardis upgrades every 6h and not every 15 mn! +- [hardis:org:diagnose:unused-connected-app](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-connected-apps/): Fix bug when not escaping App name in SOQL query +- Update banner +- New config property **manualActionsFileUrl** to indicate users where the deployment manual actions is located. + +## [6.0.6 (beta)] 2025-08-17 + +- New command [hardis:org:refresh:before-refresh](https://sfdx-hardis.cloudity.com/hardis/org/refresh/before-refresh/) : Save Connected Apps before refreshing a sandbox. +- New command [hardis:org:refresh:after-refresh](https://sfdx-hardis.cloudity.com/hardis/org/refresh/after-refresh/) : Restore Connected Apps after refreshing a sandbox. +- Update JSON Schema documentation +- When authenticating to an expired org token, delete the SF Cli file that can mess with us when we refreshed a sandbox. +- Improve logs display + +## [6.0.5 (beta)] 2025-08-14 + +- Add ENV SF_DATA_DIR to the ubuntu Dockerfile to install plugins "globally" and make the image work as non-root user + +## [6.0.4 (beta)] 2025-08-14 + +- Immediately stop when a user cancelled a multi-questions prompts +- Add log used for background process in LWC UI +- Refactor logging methods +- Display labels of prompt answers, not technical values +- Improve naming of report files + +## [6.0.3 (beta)] 2025-08-12 + +- [hardis:org:generate:packagexmlfull](https://sfdx-hardis.cloudity.com/hardis/org/generate/packagexmlfull/): Add --no-prompt option to directly use default org. +- [hardis:work:save](https://sfdx-hardis.cloudity.com/hardis/work/save/): + - Add links to create Merge Request + Display MR documentation + - Conditionally execute CleanXML command +- Improve UX when opening sandboxes + +## [6.0.2 (beta)] 2025-08-11 + +- Renaming "task" to "User Story" across the codebase and documentation for clarity. +- Enhancing SFDMU integration by improving UX and linking external docs. +- Improving CLI command outputs and workflows. + +## [6.0.1 (beta)] 2025-08-11 + +- Send messages to VsCode to enhance buttons and links +- Send messages to VsCode to display tables +- Unify the way to handle table display in all commands formerly using console.table or columnify + +## [6.0.0 (beta)] 2025-08-09 + +- Implement advanced websocket messaging for vscode sfdx-hardis LWC UI +- Refactor logging within commands for better display on vscode sfdx-hardis LWC UI +- Generate commands documentation with AI +- Refactor [hardis:org:configure:auth](https://sfdx-hardis.cloudity.com/hardis/org/configure/auth/) for better UX +- Enhance org selection prompt + +## [5.45.0] 2025-07-22 + +- Generate ubuntu-based Docker images + - docker.io/hardisgroupcom/sfdx-hardis-ubuntu + - ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu +- Display commands in blue for better readability on BitBucket +- Fix bug that did not replace existing comments on BitBucket +- Decrease docker images size + +## [5.44.1] 2025-07-16 + +- [hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/):Add new ignored items in audit trail + +## [5.44.0] 2025-06-29 + +- [hardis:project:generate:bypass](https://sfdx-hardis.cloudity.com/hardis/project/generate/bypass/): Code rework + removed global flag + Added ability to apply the bypass to VRs and Triggers +- Refactored logic to ensure preprod branches are only added if they exist, preventing null pointer exceptions. +- Upgrade npm dependencies + +## [5.43.5] 2025-06-27 + +- Filter WorkflowFlowAutomation from org-generated package.xml (workaround attempt for ) + +## [5.43.4] 2025-06-26 + +- Fix use of org API version + +## [5.43.3] 2025-06-26 + +- [hardis:project:audit:apiversion](https://sfdx-hardis.cloudity.com/hardis/project/audit/apiversion/): Add the newApiVersion parameter to specify the target version for the upgrade. + +## [5.43.2] 2025-06-25 + +- Update default API version to 63.0, but if --skipauth is not used, get the apiVersion of default org +- [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/): Automate update of sfdx-project.json and package.xml at the beginning of the command + +## [5.43.1] 2025-06-24 + +- Refactor part of the documentation + add pages about events and videos +- Upgrade dependency @cparra/apexdocs + +## [5.43.0] 2025-06-22 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/) enhancements + + - Generate Apex Class relationship diagram on each apex doc page + - Improve display of Object and Class diagrams when there are too many items + +- Upgrade npm dependencies + +## [5.42.0] 2025-06-18 + +- [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/): CI/CD enhancements + - Allow to activate special behaviors when words are written in Pull Request description + - **NO_DELTA**: Even if delta deployments are activated, a deployment in mode **full** will be performed for this Pull Request + - **PURGE_FLOW_VERSIONS**: After deployment, inactive and obsolete Flow Versions will be deleted (equivalent to command sf hardis:org:purge:flow)
**Caution: This will also purge active Flow Interviews !** + - **DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT**: If a file manifest/destructiveChanges.xml is found, it will be executed in a separate step, after the deployment of the main package + - Use CommonPullRequestInfo strong type for better use of cross-platform PR functions + - Manage cache to get Pull Request info to improve performances + +## [5.41.0] 2025-06-15 + +- Factorize common prompt text into prompt variables, that can be overridable by user. +- Implement cache for prompt templates and variables to improve performances +- New command [hardis:doc:override-prompts](https://sfdx-hardis.cloudity.com/hardis/doc/override-prompts/): Create local override files for AI prompt templates that can be customized to match your organization's specific needs and terminology +- Add Github Copilot instructions + +## [5.40.0] 2025-06-15 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Add Roles documentation +- Upgrade npm dependencies + +## [5.39.1] 2025-06-05 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Define DO_NOT_OVERWRITE_INDEX_MD=true to avoid overwriting the index.md file in docs folder, useful if you want to keep your own index.md file. + +## [5.39.0] 2025-06-05 + +- When in CI, by default a maximum time of 30 minutes can be used to call AI. This value can be overridden using `AI_MAX_TIMEOUT_MINUTES`. +- New documentation page with all environment variables used by sfdx-hardis + +## [5.38.2] 2025-06-05 + +- [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/): Do not filter standard objects if they have at least one custom field defined. +- Upgrade tar-fs to fix CVE + +## [5.38.1] 2025-06-02 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Fix crash when generating Assignment Rules doc + +## [5.38.0] 2025-05-27 + +- New command [hardis:misc:servicenow-report](https://sfdx-hardis.cloudity.com/hardis/misc/servicenow-report/) to generate reports crossing data from a Salesforce object and related entries in ServiceNow +- Automatically open Excel report files when possible (disable with env var `NO_OPEN=true`) +- Defer the `sortCrossPlatform` operation for member lists until after all elements for a specific metadata type have been collected. Sorting is now performed only once per type improving the overall performance +- Upgrade npm dependencies + +## [5.37.1] 2025-05-23 + +- Update PROMPT_DESCRIBE_PACKAGE +- Update common instructions about prompt reply language +- Make sure that projectName is compliant with the format of an environment variable + +## [5.37.0] 2025-05-22 + +- Generate and publish multilingual documentation from sfdx-hardis monitoring +- Update command to install mkdocs-material & dependencies to match more python installation types +- Upgrade way to call wrangler to publish to Cloudflare + +## [5.36.3] 2025-05-21 + +- Azure CI/CD workflows: use ubuntu-latest as default image +- Fix doc overwrite in case apex docs failed +- Sort by alphabetical order, ignoring uppercase / lowercase +- Update default prompts +- Fix & delete generated files that are not compliant with Windows file system + +## [5.36.2] 2025-05-19 + +- Do not create package files with git forbidden characters + +## [5.36.1] 2025-05-18 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Display installed package metadatas as tree view + +## [5.36.0] 2025-05-18 + +- Allow to use another org to call Agentforce, by previously connecting to an org alias TECHNICAL_ORG (to do that, just define SFDX_AUTH_URL_TECHNICAL_ORG and [hardis:auth:login](https://sfdx-hardis.cloudity.com/hardis/auth/login/) will handle the rest) + +## [5.35.0] 2025-05-18 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/) new features and fixes: + - Add doc for installed packages, enhanced with LLM + - Fix markdown returned by LLMs so it is compliant with mkdocs + - Allow to define a property **truncateAfter** on prompts variables to avoid crashes in case value is too long + - Authorizations doc: + - Filter non accessible items from tree + - Display special icons for ModifyAllData and ViewAllData items + - Fix display of Dataspace scope + - Allow to override text generated by LLM + - Allow to override a full documentation page using `` +- Upgrade dependencies + +## [5.34.1] 2025-05-15 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Fix crash when there is no HTML or JS on a LWC + +## [5.34.0] 2025-05-13 + +- [hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/): Add audit Custom Setting values updates +- Improve SOQL query functions by adding warning logs for record limits and removing redundant warning handling +- New command [hardis:misc:custom-label-translations](https://sfdx-hardis.cloudity.com/hardis/misc/custom-label-translations/): Extract selected custom labels, or of a given Lightning Web Component (LWC), from all language translation files. This command generates translation files ('\*.translation - meta.xml') for each language already retrieved in the current project, containing only the specified custom labels. + +## [5.33.0] 2025-05-10 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Allow to use ollama, Anthropic and Gemini LLMs, through langchainJs +- sfdx-hardis prompt templates enhancements: + - Add [prompt templates](https://sfdx-hardis.cloudity.com/salesforce-ai-prompts/#available-prompt-templates) in online documentation + - Allow to locally [override prompt templates](https://sfdx-hardis.cloudity.com/salesforce-ai-prompts/#overriding-prompts) text in `config/prompt-templates/${templateName}.txt` + - Rewrite old prompt templates +- Improve VsCode workspace configuration to avoid performance issues +- Upgrade npm dependencies + +## [5.32.1] 2025-05-09 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Fix crash when assignment rule doesn't have a value + +## [5.32.0] 2025-05-06 + +- [hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/): Flag more audit trail actions as not relevant +- CI/CD: Add FlowDefinition in default [package-no-overwrite.xml](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-overwrite/#package-no-overwritexml), as it is a deprecated metadata +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Escalation Rules AI-enhanced documentation + +## [5.31.0] 2025-05-05 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): New features + - AutoResponse rules, by @mpyvo in + - Lightning Web Components, by @tahabasri in + +## [5.30.0] 2025-05-04 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Generate Assignment Rules documentation +- Doc: Mention security artifacts in documentation + +## [5.29.1] 2025-05-02 + +- [hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/): Flag more audit trail actions as not relevant +- Generate SBOM (Software Bill Of Material) from CI/CD jobs +- Expose security scan results and SBOM as artifacts on release jobs + +## [5.29.0] 2025-05-02 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Generate Approval Process documentation +- Bitbucket Integration: Update default pipeline to add `clone: depth: full` +- Security: Remove markdown-toc dependency as it is not maintained anymore and contains a CVE on old lodash version +- Add documentation page about how security is handled with sfdx-hardis +- Add trivy reports in Github Actions Workflows + +## [5.28.1] 2025-04-25 + +- [hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/) enhancements + - Flag more audit trail actions as not relevant + - Display related actions next to username in summary +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Reorganize documentation menus + +## [5.28.0] 2025-04-23 + +- [hardis:lint:metadatastatus](https://sfdx-hardis.cloudity.com/hardis/lint/metadatastatus/): Detect more inactive elements that are technical debt to be cleaned + - Approval Processes + - Assignment Rules + - Auto Response Rules + - Escalation Rules + - Forecasting Types + - Record Types + - Workflow Rules + +## [5.27.0] 2025-04-18 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/) new features + - Generate Permission sets and Permission Set Groups documentation + - Display Profiles & Permission Sets attributes in a tree + +## [5.26.1] 2025-04-15 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Display Profiles attributes in a tree +- Also Display JIRA and Azure Boards issue status labels in notifications +- [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/) enhancements + - Add **--start-chunk** to help solving rotten Metadata retrieve issues + - When using **--full-apply-filters**, do not kee Custom Objects who do not have Custom Fields locally defined + - Update package-skip-items template to add MilestoneType + - Add troubleshooting documentation + +## [5.26.0] 2025-04-11 + +- [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/): Allow wildcards in package-skip-items.xml (examples: `pi__*` , `*__dlm` , or `prefix*suffix` ) + +## [5.25.2] 2025-04-10 + +- Display JIRA and Azure Boards issue status labels in Pull Request comments + +## [5.25.1] 2025-04-08 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Fix typo for Object description prompt + +## [5.25.0] 2025-04-06 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Add profile documentation generated by AI +- Refactor document generation code +- GitHub Integration: Use ENV variables as fallback [in case the job runner is not GitHub Actions](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-github/#using-github-integration-without-github-actions), like Codefresh + +## [5.24.3] 2025-04-04 + +- Fix visualization of [Azure DevOps](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure/#azure-pull-request-notes) images by linking attachments to a generic work item. + +## [5.24.2] 2025-04-02 + +- Upgrade npm dependencies + +## [5.24.1] 2025-03-24 + +- Upgrade @xmlnode/xmlnode and update related code so it works with newer version +- Upgrade NPM dependencies +- Update [Contributor Guide documentation about package management](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-work-on-task-install-packages/) + +## [5.24.0] 2025-03-21 + +- Flow documentation: Take in account new **Transform Element** + +## [5.23.0] 2025-03-19 + +- Lazy loading in hooks to improve performances when other CLI plugins commands are called +- [hardis:org:file:export](https://sfdx-hardis.cloudity.com/hardis/org/files/export/): Fix 100000 characters SOQL error limit +- Upgrade npm dependencies + +## [5.22.0] 2025-03-13 + +- [hardis:org:file:export](https://sfdx-hardis.cloudity.com/hardis/org/files/export/): Now handles to export of Attachments in addition to ContentVersions :) +- [hardis:doc:flow2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/flow2markdown/): Call AI when generating the doc of a single flow +- [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/) Fix: delta after merge is not working as expected + +## [5.21.4] 2025-03-11 + +- Support edge-case when package.xml is empty but destructive changes are present. (see [Github issue](https://github.com/hardisgroupcom/sfdx-hardis/issues/1093)) +- Upgrade dependencies + +## [5.21.3] 2025-03-01 + +- [hardis:org:data:export](https://sfdx-hardis.cloudity.com/hardis/org/data/export/): Fix crash when a record has more than 1000 attached documents + +## [5.21.2] 2025-03-01 + +- [hardis:org:diagnose:unused-connected-app](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-connected-apps/): Fix crash when a Connected App doesn't have a CreatedBy +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Avoid crash when a lookup field does not contain referenceTo + +## [5.21.1] 2025-02-27 + +- [hardis:org:test:apex](https://sfdx-hardis.cloudity.com/hardis/org/test/apex/) Take in account `--target-org` option +- [hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/) Fix **monitoringAllowedSectionsActions** + +## [5.21.0] 2025-02-27 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Generate PDF files from markdown documentation, by @matheus-delazeri + +## [5.20.0] 2025-02-22 + +- [hardis:work:new](https://sfdx-hardis.cloudity.com/hardis/work/new/) + - Document properties **availableProjects** and **availableTargetBranches** + - Allow to define **newTaskNameRegex** to enforce the naming of a new task + - Allow to remove question about upgrading the dev sandbox is `sharedDevSandboxes: true` is set +- Fix issue with **monitoringAllowedSectionsActions** not taking in account when a section is defined as `[]` to ignore all of its member types. +- Upgrade npm dependencies + +## [5.19.4] 2025-02-17 + +- Do not check for missing descriptions on Data Cloud & Managed package metadatas +- Doc: display where subflows are used in a new Dependencies paragraph +- mkdocs-to-cf: No need to authenticate to SF org + +## [5.19.3] 2025-02-15 + +- Doc: Add Cloudflare setup instructions +- Doc: Reorganize Project documentation menus +- Update default workflows to handle Cloudflare variables + +## [5.19.2] 2025-02-14 + +- [hardis:project:generate:bypass](https://sfdx-hardis.cloudity.com/hardis/project/generate/bypass/): Added necessary flags to be run from vscode sfdx-hardis extension + added skip-credits + - Bypass generator: Create metadatas folders if not existing yet +- Change default CF policy +- Update doc to request activation of **ExperienceBundle Metadata API** + +## [5.19.1] 2025-02-09 + +- Quickfix cf upload + +## [5.19.0] 2025-02-09 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Add object model diagram in documentation +- New command [hardis:project:generate:bypass](https://sfdx-hardis.cloudity.com/hardis/project/generate/bypass/) : Generates bypass custom permissions and permission sets for specified sObjects and automations, by @Mehdi-Cloudity in +- Adjusting the Grafana Configuration Variables in the Megalinter part of org-monitoring.yml, by @AhmedElAmory in + +## [5.18.1] 2025-02-04 + +- Fix typo in docUtils +- Stealth enhancements + +## [5.18.0] 2025-02-03 + +- New command [hardis:doc:fieldusage](https://sfdx-hardis.cloudity.com/hardis/doc/fieldusage/) : generate a report with custom field's usage from metadata dependencies. + +## [5.17.4] 2025-01-31 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Fixes pages menu +- Stealth feature + +## [5.17.3] 2025-01-29 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Improve Apex docs markdown +- Upgrade apexdocs version +- Fix auth message when selecting default org + +## [5.17.2] 2025-01-29 + +- [hardis:org:configure:files](https://sfdx-hardis.cloudity.com/hardis/org/configure/files/): Add examples when configuring file export format +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Avoid the command to crash if apexdocs generation fails + +## [5.17.1] 2025-01-27 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Add type of Lightning Pages in tables +- [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/): Fix issue when there is an empty metadata type + +## [5.17.0] 2025-01-26 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/) enhancements: + - Generate Apex classes documentation using `@cparra/apexdocs`, and describe them using AI if available + - Generate Lightning Pages documentation and describe them using AI if available + - Display error message in case of XML parsing error + - Do not raise issues when managed items fields don't have descriptions + - Do not raise inactive validation rule issue when the VR is from a managed package + - Fix New JSON coverage formatter is selecting wrong JSON from sf project deploy command + +## [5.16.4] 2025-01-22 + +- Doc: Exclude not relevant md from search +- Upgrade npm dependencies +- Add more logs to login command + +## [5.16.3] 2025-01-22 + +- Do not post comments with Flows if there is no real differences +- Truncate the number of flows git diff displayed in Pull Request comments to 30 (override the number using MAX_FLOW_DIFF_TO_SHOW ) +- Keep history link in main flow doc if available and history not recalculated +- Remove Flows History mkdocs menu if present from an old sfdx-hardis doc generation +- QuickFix AI Generated Summary text in PRs + +## [5.16.2] 2025-01-21 + +- Strip XML to save prompts tokens +- Fix issue when parsing CustomObject metadata +- Install latest version of plugin @salesforce/plugin-deploy-retrieve in Dockerfile to avoid the bug of its current version +- Fix: Do not recalculate Flow History doc if flow has not been updated +- Skip Data Cloud objects from documentation (enforce using variable INCLUDE_DATA_CLOUD_DOC=true) + +## [5.16.1] 2025-01-19 + +- AI Cache results enhancements + - Normalize strings before creating fingerprint to handle multiple platforms + - Delete unused cache files +- Fix variables mismatch when calling `generateFlowMarkdownFile` + +## [5.16.0] 2025-01-19 + +- New AI Provider: Agentforce +- Create Objects AI-powered documentation + - Summary + - Relationships with other objects + - Fields + - Validation rules + - Related flows +- Handle prompts multilingualism (ex: `PROMPTS_LANGUAGE=fr`) +- Handle prompts cache to save tokens +- Add `SFDX_DISABLE_FLOW_DIFF: false` in default CI/CD pipelines (must be set to true during CI/CD setup) +- Enhance branches & orgs CI/CD strategy mermaid diagram +- Improve performances by using `GLOB_IGNORE_PATTERNS` for all calls to glob + +## [5.15.5] 2025-01-16 + +- Flow Visual Diff enhancements + - Display full node fields table when it contains updated elements + - Fix removed long links + - Handle cases where Flow has been added or deleted +- Update [hardis:project:deploy:notify](https://sfdx-hardis.cloudity.com/hardis/project/deploy/notify/) documentation + +## [5.15.4] 2025-01-15 + +- Allow to disable calls to AI prompts API using DISABLE_AI=true +- Implement AI cache to save calls to AI prompts API (can be disabled using IGNORE_AI_CACHE) + +## [5.15.3] 2025-01-14 + +- [hardis:project:generate:flow-git-diff](https://sfdx-hardis.cloudity.com/hardis/project/generate/flow-git-diff/) New parameters --commit-before and --commit-after +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Filter flows from managed packages +- Display number of AI prompts API calls at the end of a command + +## [5.15.2] 2025-01-13 + +- Add AI security considerations in documentation +- Do not prompt for AI API TOKEN +- Do not crash in case of AI call failure + +## [5.15.1] 2025-01-12 + +- Improve prompt templates + +## [5.15.0] 2025-01-12 + +- Allow to call AI to describe flows in documentation +- Allow to call AI to describe differences between 2 flow versions in a pull request comment +- [Ai Provider](https://sfdx-hardis.cloudity.com/salesforce-ai-setup/) enhancements + - Change default model from gpt-4o to gpt-4o-mini + - Prompt templates factory, with capability to override default prompt with ENV variable + - Translate prompts in french +- Add dotenv to allow to define secrets variables in a local `.env` file (never commit it !) +- Add more ways to call python depending on the installation + +## [5.14.3] 2025-01-10 + +- [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/) Fix crash when deployment is ok + +## [5.14.2] 2025-01-10 + +- [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/) Fix parsing error in case it is UNKNOWN_ERROR +- Fix error `str.replace is not a function` + +## [5.14.1] 2025-01-09 + +- Generate a file **hardis-report/apex-coverage-results.json** with Apex code coverage details for the following commands: + - [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/) (only if `COVERAGE_FORMATTER_JSON=true` environment variable is defined) + - [hardis:org:test:apex](https://sfdx-hardis.cloudity.com/hardis/org/test/apex/) (always) + - [SF Cli deployment wrapper commands](https://sfdx-hardis.cloudity.com/salesforce-deployment-assistant-setup/#using-custom-cicd-pipeline) +- Do not display command output if execCommand has been called with `output: false` + +## [5.14.0] 2025-01-09 + +- Add ability to replace ApiVersion on specific Metadata Types file using `sf hardis:project:audit:apiversion` +- Add parameters `fix` and `metadatatype` on `sf hardis:project:audit:apiversion` +- Fix build of formula markdown when generating a Flow Visual Documentation + +## [5.13.3] 2025-01-08 + +- Update default JIRA Regex to catch tickets when there is an number in the project name + +## [5.13.2] 2025-01-07 + +- [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/): Fix parsing when deployment failure is related to Apex code coverage +- Flow doc fix: add description for constants, variables, text template & formulas +- Flow parsing: Fix error when there is only one formula + +## [5.13.1] 2025-01-07 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/) Display a screen emoji in documentation flows table when they are not tied to an Object +- [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/doc/project/deploy/smart/): Shorten log lines when there is a too big JSON, by removing info not relevant for display, like unchanged files or test classes results. + +## [5.13.0] 2025-01-05 + +- [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/) Add branch & orgs strategy MermaidJS diagram in documentation + +## [5.12.0] 2025-01-04 + +- New command [hardis:doc:mkdocs-to-salesforce](https://sfdx-hardis.cloudity.com/hardis/doc/mkdocs-to-salesforce/) to generate static HTML doc and host it in a Static Resource and a VisualForce page +- Remove hyperlinks from MermaidJs on Pull Request comments, to improve display on GitHub & Gitlab +- Upgrade base image to python:3.12.8-alpine3.20, so mkdocs can be installed and run if necessary +- Add links in package.xml Markdown documentation + +## [5.11.0] 2025-01-03 + +- Visual flow management, using MermaidJs + + - [hardis:doc:project2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/): Add a markdown file for each Flow + - If unable to run mermaid-cli, store markdown with mermaidJs diagram content anyway (can happen from Monitoring Backup Command) + - When called from Monitoring ([hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/)), generate Flow documentation only if it has been updated + - [hardis:doc:flow2markdown](https://sfdx-hardis.cloudity.com/hardis/doc/flow2markdown/): Generate the markdown documentation of a single flow (available from VsCode extension) + - [hardis:project:generate:flow-git-diff](https://sfdx-hardis.cloudity.com/hardis/project/generate/flow-git-diff/): Generate the visual git diff for a single flow (available from VsCode extension) + - [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/): Add visual git diff for flows updated by a Pull Request + - Flow Visual Git diff also added to [standard SF Cli commands wrappers](https://sfdx-hardis.cloudity.com/salesforce-deployment-assistant-setup/#using-custom-cicd-pipeline) + +- New command [hardis:project:deploy:notify](https://sfdx-hardis.cloudity.com/hardis/project/deploy/notify/) to send Pull Request comments (with Flow Visual Git Diff) and Slack / Teams notifications even if you are not using a sfdx-hardis command to check or process a deployment. + +- Command updates + + - [hardis:project:deploy:smart](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/): Refactor deployment errors parsing: use JSON output instead of text output + - [hardis:org:test:apex](https://sfdx-hardis.cloudity.com/hardis/org/test/apex/): Display the number of failed tests in messages and notifications + - [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/): + - New option **--exclude-namespaces** that can be used with **--full** option + - New option **--full-apply-filters** that can be used with **--full** option to apply filters anyway + +- Core enhancements & fixes + + - Obfuscate some data from text log files + - Kill some exit handlers in case they are making the app crash after a throw SfError + - Trigger notifications during the command execution, not after + - Do not display warning in case no notification has been configured in case we are running locally + - Fix Individual deployment tips markdown docs by adding quotes to YML properties + - Fix init sfdx-hardis project commands and docs + - Display warning message in case package.xml has wrong format + - Allow to override package-no-overwrite from a branch .sfdx-hardis.yml config file + - Using target_branch for Jira labels when isDeployBeforeMerge flag is true + +- Doc + - Update Microsoft Teams notifications integration User Guide + - Add troubleshooting section in Email integration User Guide + +## [5.10.1] 2024-12-12 + +- Fix sfdx-hardis docker image build by adding coreutils in dependencies + +## [5.10.0] 2024-12-12 + +- Update Docker base image to alpine to 3.21 + +## [5.9.3] 2024-12-12 + +- [hardis:org:data:import](https://sfdx-hardis.cloudity.com/hardis/org/data/import/): Allow to run the command in production using, by either: + - Define **sfdmuCanModify** in your .sfdx-hardis.yml config file. (Example: `sfdmuCanModify: prod-instance.my.salesforce.com`) + - Define an environment variable SFDMU_CAN_MODIFY. (Example: `SFDMU_CAN_MODIFY=prod-instance.my.salesforce.com`) + +## [5.9.2] 2024-12-10 + +- Fallback message in case sfdx-hardis is not able to parse newest SF CLI errors format. + +## [5.9.1] 2024-12-09 + +- Fix issue that generates valid Pull Request comment whereas there is 1 error +- Add TS test case +- Upgrade NPM dependencies + +## [5.9.0] 2024-12-02 + +- [hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/): New mode **--full**, much slower than default filtered one, but that can retrieve ALL metadatas of an org + +## [5.8.1] 2024-11-26 + +- Fix [hardis:org:diagnose:unused-apex-classes](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-apex-classes/): Use .cls file, not cls-meta.xml file to get creation date from git + +## [5.8.0] 2024-11-25 + +- New monitoring command [hardis:org:diagnose:unused-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-connected-apps/) to detect Connected Apps that are not used anymore and might be disabled or deleted. + +## [5.7.2] 2024-11-25 + +- Fix issue with auth just before running a command (ask to run again the same command meanwhile we find a way to avoid that using SF CLI architecture) + +## [5.7.1] 2024-11-22 + +- In case a prompt is requested during CI and makes a command fail, display the content of the prompt + +## [5.7.0] 2024-11-22 + +- New command **hardis:git:pull-requests:extract**: Extract Pull Requests from Git Server into CSV/XLS (Azure only for now) +- Fix bug when scratch org username is > 80 chars +- Make markdown-links-check not blocking by default in MegaLinter base config +- Make yamllint not blocking by default in MegaLinter base config + +## [5.6.3] 2024-11-17 + +- MegaLinter config: disable APPLY_FIXES by default +- Upgrade npm dependencies + +## [5.6.2] 2024-11-12 + +- hardis:org:diagnose:unused-apex-classes + - Display class created by and created name MIN(date from org,date from git) + - Replace errors by warnings, and add a message so users double-check before removing a class + - Reorder console log +- Remove unused code from MetadataUtils class + +## [5.6.1] 2024-11-11 + +- Fix hardis:org:user:activateinvalid interactive mode +- Update Dockerfile email address +- Upgrade default Grafana Dashboards to add Unused Apex Classes indicator +- Update hardis:org:diagnose:unused-apex-classes and hardis:doc:packagexml2markdown documentation + +## [5.6.0] 2024-11-09 + +- New command hardis:org:diagnose:unused-apex-classes, to detect Apex classes (Batch,Queueable,Schedulable) that has not been called for more than 365 days, that might be deleted to improve apex tests performances +- hardis:doc:project2markdown: Update documentation +- Polish CI/CD home doc +- Refactor the build of [hardis:org:monitor:all](https://sfdx-hardis.cloudity.com/hardis/org/monitor/all/) documentation +- Fix issue with ToolingApi calls: handle paginated results instead of only the first 200 records. + +## [5.5.0] 2024-11-03 + +- hardis:doc:packagexml2markdown: Generate markdown documentation from a package.xml file +- hardis:doc:project2markdown: Generate markdown documentation from any SFDX project (CI/CD, monitoring, projects not using sfdx-hardis...) in `docs` folder and add a link in README.md if existing. +- hardis:org:monitor:backup: Call hardis:doc:project2markdown after backup +- hardis:org:retrieve:packageconfig: Ignore standard Salesforce packages +- Update CI/CD home documentation + +## [5.4.1] 2024-11-02 + +- hardis:org:multi-org-query enhancements + - Improve documentation + - Allow to use --query-template as option to use one of the predefined templates via command line + - Handle errors if issues when the command is called via a CI/CD job +- Upgrade dependencies + +## [5.4.0] 2024-11-02 + +- New command hardis:org:multi-org-query allowing to execute a SOQL Bulk Query in multiple orgs and aggregate the results in a single CSV / XLS report +- New command hardis:org:community:update to Activate / Deactivate communities from command line + +## [5.3.0] 2024-10-24 + +- Update default Monitoring workflow for GitHub +- Refactor file download code + - Display progress + - Better error handling +- hardis:org:diagnose:legacyapi: Fix issue with big log files: Use stream to parse CSV and perform checks +- Update default API version toto 62.0 (Winter 25 release) + +## [5.2.4] 2024-10-21 + +- Fix hardis:org:fix:listviewmine: Use chrome-launcher to find chrome executable to use with puppeteer-core +- Remove keyv dependency + +## [5.2.3] 2024-10-19 + +- Change default `.mega-linter.yml` config +- Display number of package.xml items before or after retrieving them +- Doc: Update youtube preview images + +## [5.2.2] 2024-10-14 + +- Fix doubling -d option in hardis:scratch:create + +## [5.2.1] 2024-10-14 + +- 2 hardis commands: rename `-d` into something else when the short option was available twice on the same command + +## [5.2.0] 2024-10-14 + +- Improve [BUILD & RUN documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-hotfixes/) +- 21 hardis commands: rename `-o` short into `-f` when possible, or other short letter, to avoid collision with `-o` (`--target-org`) option +- Fix GitHub Org Monitoring workflow (remove push event + fix command typo) + +## [5.1.0] 2024-10-11 + +- hardis:project:deploy:smart: Fix to adapt stdout checks to output of `sf project deploy start` in case code coverage is ignored +- hardis:org:monitor:backup: Allow spaces in folders +- Remove pubsub from default .forceignore +- Change default deployment waiting time from 60mn to 120mn +- Display explicit warning message before ConnectedApp deployment so users don't forget to manually create the connected app with the certificate + +## [5.0.10] 2024-10-03 + +- hardis:project:deploy:smart : Fix parsing of error strings +- hardis:project:deploy:smart : Fix markdown display on PR summary + +## [5.0.9] 2024-10-03 + +- Fix link to tip doc from Pull Request / Merge Request comments +- Fixing small issues with creating scratch org and scratch org pool + +## [5.0.8] 2024-10-01 + +- Monitoring config: Fix way to define how to upload connected app +- New deployment tip: Couldn't retrieve or load information on the field +- Fix parsing of errors when they are unknown +- Fix SEO info in deployment tips documentation + +## [5.0.7] 2024-09-25 + +- hardis:org:monitoring:backup : fix issue when metadata type is unknown + +## [5.0.6] 2024-09-25 + +- Allow to purge flows & flow interviews using `--no-prompt` option +- Fix duplicate `-f` short option by replacing `delete-flow-interviews` short by `-w` + +## [5.0.5] 2024-09-24 + +- When git add / stash failure, display a message explaining to run `git config --system core.longpaths true` to solve the issue. +- Improve test classes errors collection during deployment check +- Display the number of elements deployed within a package.xml + +## [5.0.4] 2024-09-24 + +- Fix errors collection during deployment check +- Display in deployment check summary when **useSmartDeploymentTests** has been activated +- Do not send coverage formatters options when test level is NoTestRun + +## [5.0.3] 2024-09-23 + +- Add --ignore-conflicts to smartDeploy + +## [5.0.2] 2024-09-23 + +- Always use `project deploy start --dry-run` for deployment validation, until command `project deploy validate` works with --ignore-warnings & NoTestRun + +## [5.0.0] 2024-09-23 + +### Refactoring explanations + +The future [deprecation of sfdx force:source:\*\* commands on 6 november](https://github.com/forcedotcom/cli/issues/2974) finally convinced us to switch everything from SFDX core to SF CLI core. (otherwise existing CI/CD pipelines would not work anymore from this date !) + +Therefore, sfdx-hardis required a complete refactoring as described below, but this won't impact existing CI/CD and Monitoring pipelines. + +We made many tests but risk zero do not exist, so if you see any bug, please report them ASAP and we'll solve them quickly :) + +### Major changes + +- Migrate plugin from SFDX plugin core to SF Cli Plugin core + + - [Convert commands code from SfdxCommand base to SfCommand base](https://github.com/salesforcecli/cli/wiki/Migrate-Plugins-Built-for-sfdx) + - Migrate internal Bulk Api calls from Bulk API v1 to Bulk API v2 + - Upgrade all npm dependencies to their latest version (more secured) + +- Change background calls to legacy sfdx commands to call their SF Cli replacements + + - `sfdx force:mdapi:convert` -> `sf project convert mdapi` + - `sfdx force:mdapi:deploy` -> `sf project deploy start --metadata-dir` + - `sfdx force:source:retrieve` -> `sf project retrieve start` + - `sfdx force:source:deploy` -> `sf project deploy start` + - `sfdx force:source:pull` -> `sf project retrieve start` + - `sfdx force:source:push` -> `sf project deploy start` + - `sfdx force:source:tracking:clear` -> `sf project delete tracking` + - `sfdx force:source:manifest:create` -> `sf project generate manifest` + - `sfdx sgd:source:delta` -> `sf sgd:source:delta` + - `sfdx force:org:create` -> `sf org create sandbox` | `sf org create scratch` + - `sfdx force:org:list` -> `sf org list` + - `sfdx force:org:delete` -> `sf org delete scratch` + - `sfdx config:get` -> `sf config get` + - `sfdx config:set` -> `sf config set` + - `sfdx auth:web:login` -> `sf org login web` + - `sfdx auth:jwt:grant` -> `sf org login jwt` + - `sfdx auth:sfdxurl:store` -> `sf org login sfdx-url` + - `sfdx org:login:device` -> `sf org login device` + - `sfdx force:data:record:get` -> `sf data get record` + - `sfdx force:data:record:update` -> `sf data update record` + - `sfdx force:data:soql:query` -> `sf data query` + - `sfdx force:data:bulk:delete` -> `sf data delete bulk` + - `sfdx alias:list` -> `sf alias list` + - `sfdx alias:set` -> `sf alias set` + - `sfdx force:apex:test:run` -> `sf apex run test` + - `sfdx force:apex:execute` -> `sf apex run` + - `sfdx force:package:create` -> `sf package create` + - `sfdx force:package:version:create` -> `sf package version create` + - `sfdx force:package:version:delete` -> `sf package version delete` + - `sfdx force:package:version:list` -> `sf package version list` + - `sfdx force:package:version:promote` -> `sf package version promote` + - `sfdx force:package:installed:list` -> `sf package installed` + - `sfdx force:package:install` -> `sf package install` + - `sfdx force:user:password:generate` -> `sf org generate password` + - `sfdx force:user:permset:assign` -> `sf org assign permset` + - `sfdx hardis:_` -> `sf hardis:_` + +- New wrappers commands for SF Cli deployment commands + - `sf hardis project deploy validate` -> Wraps `sf project deploy validate` + - `sf hardis project deploy quick` -> Wraps `sf project deploy quick` + - `sf hardis project deploy start` -> Wraps `sf project deploy start` + +### New Features / Enhancements + +- **hardis:project:deploy:smart** + - New feature **useSmartDeploymentTests**: Improve performances by not running test classes when delta deployment contain only non impacting metadatas, and target org is not production + - Rename command **hardis:project:deploy:source:dx** into **hardis:project:deploy:smart** (previous command alias remains, no need to update your pipelines !) +- **commandsPreDeploy** and **commandsPostDeploy** + - New option **context** for a command, defining when it is run and when it is not: **all** (default), **check-deployment-only** or **process-deployment-only** + - New option **runOnlyOnceByOrg**: If set to `true`, the command will be run only one time per org. A record of SfdxHardisTrace\_\_c is stored to make that possible (it needs to be existing in target org) +- New commands + - **hardis:project:deploy:simulate** to validate the deployment of a single metadata (used by VsCode extension) + - **hardis:org:diagnose:releaseupdates** to check for org Release Updates from Monitoring or locally + - **hardis:misc:purge-references** to partially automate the cleaning of related dependencies when you need to delete a field, or change its type (for example from master detail to lookup) + - **hardis:project:clean:sensitive-metadatas** to mask sensitive metadatas from git repo (ex: Certificate content) +- **hardis:work:save** and **hardis:project:deploy:sources:dx**: Improve runtime performances thanks to internalization of sfdx-essentials commands +- **hardis:work:new** + - Allow to add labels in property `availableTargetBranches`, using a comma. For examples, `- integration,Choose this branch if you are on the BUILD side of the project !` + - Add current default org in the choices when prompting which org to use +- **hardis:project:new** + - Initialize autoCleanTypes with **destructivechanges**, **flowPositions** and **minimizeProfiles** + - Initialize package-no-overwrite.xml with Certificate metadata. (certificates must be uploaded manually) +- **hardis:org:files:export**: Improve display with spinner +- **hardis:org:purge:flow**: If FlowInterview records are preventing Flow Versions to be deleted, prompt user to delete Flow Interviews before trying again to delete Flow Versions +- **hardis:project:generate:gitdelta**: Add option to generate package.xml related to a single commit +- **hardis:org:data:delete**: Check for property "runnableInProduction" in export.json before running deletion in production org. +- **hardis:org:diagnose:audittrail**: Add new filtered actions + - Customer Portal: createdcustomersuccessuser +- Authentication: do not use alias MY_ORG anymore + do not update local user config if no values to replace. +- When selecting an org, make sure it is still connected. If not, open browser so the user can authenticate again. +- Update sfdx-hardis Grafana Dashboards to import in your Grafana Cloud + - SF Instance name + - Next platform upgrade + - Release Updates to check + - Installed packages + - Org licenses +- AI Deployment assistant + - Add error `Change Matching Rule` +- Git Providers + - On Pull Requests / Merge Requests comments, add hyperlinks to errors documentation URL + +### Fixes + +- Avoid error when removing obsolete flows (workaround using SF CLI if tooling api connection fails). Fixes [#662](https://github.com/hardisgroupcom/sfdx-hardis/issues/662) +- Improve Slack/Teams notifications display +- Display explicit error message in case a password is required to install a managed package. + +### Documentation + +- Reorganize README content + - Add link to Dreamforce 24 session +- Deployment assistant: Improve documentation by adding examples of errors, and a standalone page for each tip +- Factorize the definition of DOC_ROOT_URL + +### Deprecations + +- Deprecate wrapper commands matching sfdx commands that will be removed. All replaced by sf hardis deploy start + + - `sfdx hardis:source:push` + - `sfdx hardis:source:deploy` + - `sfdx hardis:mdapi:retrieve` + - `sfdx hardis:mdapi:deploy` + +- Deprecate `hardis:deploy:sources:metadata` as nobody uses metadata format anymore + +### Removals + +- Replace puppeteer by puppeteer-core: it means that if you use a command requiring puppeteer, please make sure to have a Chrome available in your environment (already integrated within the Docker image) + +- Get rid of [sfdx-essentials](https://github.com/nvuillam/sfdx-essentials) plugin dependency by internalizing its used commands + + - `sf hardis:packagexml:append` + - `sf hardis:packagexml:remove` + - `sf hardis:project:clean:filter-xml-content` + +- Remove npm dependencies (some of them not maintained anymore) + + - @adobe/node-fetch-retry + - @amplitude/node + - @keyv/redis + - @oclif/command + - @oclif/config + - @oclif/errors + - @salesforce/command + - @salesforce/ts-types + - find-package-json + - node-fetch + +- Remove not used keyValueStores to keep only Salesforce one + ## [4.53.0] 2024-08-20 - Upgrade workflows to Node 20 (fixes ) @@ -66,7 +1142,6 @@ Note: Can be used with `sfdx plugins:install sfdx-hardis@beta` and docker image - [hardis:project:deploy:sources:dx](https://sfdx-hardis.cloudity.com/hardis/project/deploy/sources/dx/): Allow new mode for running test during deployments: **RunRepositoryTestsExceptSeeAllData** (⚠️ Use with caution !) - ## [4.47.0] 2024-07-22 - Update emojis in prompts to make them more visible @@ -177,7 +1252,7 @@ commandsPostDeploy: ## [4.38.2] 2024-06-06 - Fix npm packages installation for GitHub monitoring to avoid random failures -- Add _notifKey in Grafana notifications to be able to build unique alerts +- Add \_notifKey in Grafana notifications to be able to build unique alerts ## [4.38.1] 2024-06-04 @@ -240,7 +1315,7 @@ commandsPostDeploy: ## [4.34.1] 2024-05-13 -- Notifications org identifier: replace dot by __ to avoid mess with Grafana label filters +- Notifications org identifier: replace dot by \_\_ to avoid mess with Grafana label filters ## [4.34.0] 2024-05-12 @@ -282,6 +1357,7 @@ commandsPostDeploy: ## [4.32.0] 2024-04-24 - Enhance [BitBucket Integration](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integrations-bitbucket/), by @Alainbates in + - Deployment status in Pull Request comments - Quick Deploy to enhance performance @@ -515,7 +1591,7 @@ commandsPostDeploy: - **Delta deployments** is no more beta but **Generally available** - **Org Monitoring** is no more beta but **Generally available** -- Generate CSV reports also in XSLX format for easier opening +- Generate CSV reports also in XLSX format for easier opening ## [4.17.1] 2023-11-28 diff --git a/Dockerfile b/Dockerfile index 6035268fd..1d780d9ac 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,30 @@ # Docker image to run sfdx-hardis -FROM alpine:3.18 +FROM python:3.12.10-alpine3.22 -LABEL maintainer="Nicolas VUILLAMY " +LABEL maintainer="Nicolas VUILLAMY " RUN apk add --update --no-cache \ - chromium \ + coreutils \ git \ bash \ nodejs \ - npm + npm \ + # Required for docker + docker \ + openrc \ + # Required for puppeteer + chromium \ + nss \ + freetype \ + harfbuzz \ + ca-certificates \ + ttf-freefont && \ + # Clean up package cache + rm -rf /var/cache/apk/* + +# Start docker daemon in case mermaid-cli image is used +RUN rc-update add docker boot && (rc-service docker start || true) # Do not use puppeteer embedded chromium ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" @@ -27,13 +42,17 @@ ARG SFDX_HARDIS_VERSION=latest RUN npm install --no-cache yarn -g && \ npm install --no-cache @salesforce/cli@${SFDX_CLI_VERSION} -g && \ sf plugins install @salesforce/plugin-packaging && \ - echo 'y' | sfdx plugins:install sfdx-hardis@${SFDX_HARDIS_VERSION} && \ - echo 'y' | sfdx plugins:install sfdmu && \ - echo 'y' | sfdx plugins:install sfdx-git-delta && \ - echo 'y' | sfdx plugins:install sfdx-essentials && \ - echo 'y' | sfdx plugins:install texei-sfdx-plugin && \ + sf plugins install @salesforce/plugin-deploy-retrieve && \ + echo 'y' | sf plugins install sfdx-hardis@${SFDX_HARDIS_VERSION} && \ + echo 'y' | sf plugins install sfdx-git-delta && \ + echo 'y' | sf plugins install sfdmu && \ sf version --verbose --json && \ - rm -rf /root/.npm/_cacache + # Clean up npm cache and temporary files + rm -rf /root/.npm/_cacache && \ + rm -rf /tmp/* && \ + npm cache clean --force + +ENV MERMAID_MODES="docker" # Workaround for https://github.com/forcedotcom/salesforcedx-apex/issues/213 COPY ref/workarounds/dateUtil.js /usr/local/lib/node_modules/@salesforce/cli/node_modules/@salesforce/apex-node/lib/src/utils/dateUtil.js diff --git a/Dockerfile-ubuntu b/Dockerfile-ubuntu new file mode 100644 index 000000000..2475b49ff --- /dev/null +++ b/Dockerfile-ubuntu @@ -0,0 +1,90 @@ +# Docker image to run sfdx-hardis + +FROM ubuntu:latest + +LABEL maintainer="Nicolas VUILLAMY " + +ENV SF_DATA_DIR=/usr/local/lib + +# Update package list and install required packages +RUN apt-get update && apt-get install -y --no-install-recommends \ + git \ + curl \ + gnupg \ + # Required for Python + python3 \ + python3-pip \ + python3-venv \ + # Required for docker + docker.io \ + fonts-liberation \ + # Required for mermaid cli + libglib2.0-0 \ + libnss3 \ + libx11-6 \ + libx11-xcb1 \ + libxcb1 \ + libxcomposite1 \ + libxcursor1 \ + libxdamage1 \ + libxext6 \ + libxfixes3 \ + libxi6 \ + libxtst6 \ + libdrm2 \ + libgbm1 \ + libpango-1.0-0 \ + libatk1.0-0 \ + libcups2 \ + libatspi2.0-0 \ + libgtk-3-0 \ + libasound2t64 \ + wget \ + xdg-utils \ + libvulkan1 \ + && \ + # Create python symlink for compatibility + ln -sf /usr/bin/python3 /usr/bin/python && \ + ln -sf /usr/bin/pip3 /usr/bin/pip && \ + # Install Node.js + curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \ + apt-get install -y nodejs && \ + # Chrome installation + wget -q -O google-chrome-stable_current_amd64.deb https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \ + apt-get install -y ./google-chrome-stable_current_amd64.deb && \ + rm google-chrome-stable_current_amd64.deb && \ + # Clean up + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# Add node packages to path +ENV PATH="/node_modules/.bin:${PATH}" + +# Chromium path for puppeteer +ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" +ENV CHROMIUM_PATH="/usr/bin/google-chrome" +ENV PUPPETEER_EXECUTABLE_PATH="${CHROMIUM_PATH}" + +ARG SFDX_CLI_VERSION=latest +ARG SFDX_HARDIS_VERSION=latest + +# Install npm packages +install sfdx plugins & display versions +RUN npm install --no-cache yarn -g && \ + npm install --no-cache @salesforce/cli@${SFDX_CLI_VERSION} -g && \ + npm install --no-cache @mermaid-js/mermaid-cli -g && \ + sf plugins install @salesforce/plugin-packaging && \ + sf plugins install @salesforce/plugin-deploy-retrieve && \ + echo 'y' | sf plugins install sfdx-hardis@${SFDX_HARDIS_VERSION} && \ + echo 'y' | sf plugins install sfdx-git-delta && \ + echo 'y' | sf plugins install sfdmu && \ + sf version --verbose --json && \ + # Clean up npm cache and temporary files + rm -rf /root/.npm/_cacache && \ + rm -rf /tmp/* && \ + npm cache clean --force + +ENV MERMAID_MODES="cli" + +# Workaround for https://github.com/forcedotcom/salesforcedx-apex/issues/213 +COPY ref/workarounds/dateUtil.js /usr/local/lib/node_modules/@salesforce/cli/node_modules/@salesforce/apex-node/lib/src/utils/dateUtil.js +COPY ref/workarounds/junitReporter.js /usr/local/lib/node_modules/@salesforce/cli/node_modules/@salesforce/apex-node/lib/src/reporters/junitReporter.js diff --git a/GEMINI.md b/GEMINI.md new file mode 100644 index 000000000..207193101 --- /dev/null +++ b/GEMINI.md @@ -0,0 +1,7 @@ +# Gemini Instructions + +For all development, coding standards, and project guidelines, please refer to the main Copilot instructions: + +[.github/copilot-instructions.md](../.github/copilot-instructions.md) + +All requirements, best practices, and conventions for this project are defined in that file. No additional Gemini-specific instructions are provided. diff --git a/README.md b/README.md index 86db80787..0a6d542da 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,8 @@ -[![sfdx-hardis by Cloudity Banner](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/sfdx-hardis-banner.png)](https://sfdx-hardis.cloudity.com) -# sfdx-hardis +[![sfdx-hardis by Cloudity Banner](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/sfdx-hardis-banner.png)](https://sfdx-hardis.cloudity.com) -[_Presented at Dreamforce 23!_](https://reg.salesforce.com/flow/plus/df23/sessioncatalog/page/catalog/session/1684196389783001OqEl) +_Presented at_ [_Dreamforce 23_](https://reg.salesforce.com/flow/plus/df23/sessioncatalog/page/catalog/session/1684196389783001OqEl) _and [_Dreamforce 24!_](https://reg.salesforce.com/flow/plus/df24/sessioncatalog/page/catalog/session/1718915808069001Q7HH)_ [![Version](https://img.shields.io/npm/v/sfdx-hardis.svg)](https://npmjs.org/package/sfdx-hardis) [![Downloads/week](https://img.shields.io/npm/dw/sfdx-hardis.svg)](https://npmjs.org/package/sfdx-hardis) @@ -16,35 +15,65 @@ [![License](https://img.shields.io/npm/l/sfdx-hardis.svg)](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/package.json) [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](http://makeapullrequest.com) -Toolbox for Salesforce DX, by [Cloudity](https://cloudity.com/), natively compliant with most platforms and tools +Sfdx-hardis is a **CLI and visual productivity tools suite for Salesforce**, by [**Cloudity**](https://cloudity.com/) & friends, natively compliant with most Git platforms, messaging tools, ticketing systems and AI providers (including Agentforce). ![Native Integrations](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/integrations.png) -It will allow you to: +It is free and Open-Source, and will allow you to simply: + +- [Deliver your projects with **State of the art Salesforce DevOps**](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) + +![DevOps Pipeline UI](https://sfdx-hardis.cloudity.com/assets/images/DevOpsPipelineUI.png) + +- [**Backup Metadatas** and **Monitor your Salesforce orgs**](https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/) + +![Monitoring with Grafana](https://sfdx-hardis.cloudity.com/assets/images/grafana-screenshot.jpg) + +- [Generate your **Project Documentation** with AI](https://sfdx-hardis.cloudity.com/salesforce-project-documentation/) + +![Salesforce AI Generated Documentation](https://sfdx-hardis.cloudity.com/assets/images/screenshot-object-diagram.jpg) -- Do with simple commands what could be done manually in minutes/hours -- [Define a complete CI/CD Pipeline for your Salesforce project](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) -- [Backup Metadatas and monitor any Salesforce org](https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/) +- Enjoy many commands that will **save your minutes, hours or even days** in your daily **Admin or Developer** work. -[**Please see the full list of commands in Online documentation**](https://sfdx-hardis.cloudity.com) +![Productivity commands](https://sfdx-hardis.cloudity.com/assets/images/ProductivityCommands.png) -**sfdx-hardis** commands are also available with UI in [**SFDX Hardis Visual Studio Code Extension**](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) +If you need guidance about how to leverage sfdx-hardis to bring more value to your business, Cloudity's international multi-cloud teams of business experts and technical experts can help: [contact us](https://cloudity.com/#form) ! + +[_See online documentation for a better navigation_](https://sfdx-hardis.cloudity.com) + +___ + +**sfdx-hardis** commands and configuration are best used with an UI in [**SFDX Hardis Visual Studio Code Extension**](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) [![VsCode SFDX Hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/extension-demo.gif)](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) +___ + _See Dreamforce presentation_ -[![See Dreamforce presentation](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs) +[![See Dreamforce presentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/play-dreamforce-session.png)](https://www.youtube.com/watch?v=o0Mm9F07UFs) ## Installation + + ### With IDE -You can install [Visual Studio Code](https://code.visualstudio.com/) extension [VsCode SFDX Hardis](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) +You can install [Visual Studio Code](https://code.visualstudio.com/), then VSCode Extension [VsCode SFDX Hardis](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) + +Once installed, click on ![Hardis Group button](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/hardis-button.jpg) in VsCode left bar, click on **Install dependencies** and follow the additional installation instructions :) + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/install-dependencies-highlight.png) + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/install-dependencies-screenshot.png) -Once installed, click on ![Hardis Group button](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/hardis-button.jpg) in VsCode left bar, and follow the additional installation instructions +When you are all green, you are all good 😊 -[![Installation tutorial](https://img.youtube.com/vi/LA8m-t7CjHA/0.jpg)](https://www.youtube.com/watch?v=LA8m-t7CjHA) +_You can also watch the video tutorial below_ + +[![Installation tutorial](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/play-install-tuto.png)](https://www.youtube.com/watch?v=LA8m-t7CjHA) + +___ ### As SFDX Plugin @@ -56,42 +85,126 @@ Once installed, click on ![Hardis Group button](https://github.com/hardisgroupco #### Plugin installation ```sh-session -sfdx plugins:install sfdx-hardis +sf plugins install sfdx-hardis ``` For advanced use, please also install dependencies ```sh-session sf plugins install @salesforce/plugin-packaging -sfdx plugins:install sfdmu -sfdx plugins:install sfdx-git-delta -sfdx plugins:install sfdx-essentials -sfdx plugins:install texei-sfdx-plugin +sf plugins install sfdx-git-delta +sf plugins install sfdmu ``` -If you are using CI/CD scripts, use `echo y | sfdx plugins:install ...` to bypass prompt. +If you are using CI/CD scripts, use `echo y | sf plugins install ...` to bypass prompt. + +___ ### Docker -You can use sfdx-hardis docker images to run in CI +You can use sfdx-hardis docker images to run in CI. + +> All our Docker images are checked for security issues with [MegaLinter by OX Security](https://megalinter.io/latest/) -- Docker Hub - - [**hardisgroupcom/sfdx-hardis:latest**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with latest @salesforce/cli version) - - [**hardisgroupcom/sfdx-hardis:latest-sfdx-recommended**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) +- Linux **Alpine** based images (works on Gitlab) -- GitHub Packages (ghcr.io) - - [**ghcr.io/hardisgroupcom/sfdx-hardis:latest**](https://github.com/orgs/hardisgroupcom/packages) (with latest @salesforce/cli version) - - [**ghcr.io/hardisgroupcom/sfdx-hardis:latest-sfdx-recommended**](https://github.com/orgs/hardisgroupcom/packages) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + - Docker Hub + + - [**hardisgroupcom/sfdx-hardis:latest**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with latest @salesforce/cli version) + - [**hardisgroupcom/sfdx-hardis:latest-sfdx-recommended**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + + - GitHub Packages (ghcr.io) + + - [**ghcr.io/hardisgroupcom/sfdx-hardis:latest**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis) (with latest @salesforce/cli version) + - [**ghcr.io/hardisgroupcom/sfdx-hardis:latest-sfdx-recommended**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) _See [Dockerfile](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/Dockerfile)_ +- Linux **Ubuntu** based images (works on GitHub, Azure & Bitbucket) + + - [**hardisgroupcom/sfdx-hardis-ubuntu:latest**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis-ubuntu) (with latest @salesforce/cli version) + - [**hardisgroupcom/sfdx-hardis-ubuntu:latest-sfdx-recommended**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis-ubuntu) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + + - GitHub Packages (ghcr.io) + + - [**ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:latest**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis-ubuntu) (with latest @salesforce/cli version) + - [**ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:latest-sfdx-recommended**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis-ubuntu) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + +_See [Dockerfile-ubuntu](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/Dockerfile-ubuntu)_ + + + ## Usage ```sh-session -sfdx hardis: +sf hardis: ``` -## Articles +## Events + + + +### London's Calling '25, London + +Auto-generate your SF project Documentation site with open-source and Agentforce + +![image](https://github.com/user-attachments/assets/9b99120c-b660-4f67-b734-793148ac9d00) + +### Czech Dreamin '25, Prague + +Auto-generate your SF project Documentation site with open-source and Agentforce, with [Mariia Pyvovarchuk](https://www.linkedin.com/in/mpyvo/) + +![Czech Dreamin 2025](https://github.com/user-attachments/assets/fa7b7f12-6d6a-437c-badd-20a626bb2163) + +### Trailblazer Admin Group '25, Lyon + +Techs for Admins: Afterwork Salesforce Inspector Reloaded & sfdx-hardis, with Thomas Prouvot + +![](https://github.com/user-attachments/assets/90621fe0-6527-4a34-8a0b-c14bd6d21cbd) + +### Dreamforce 2024, San Francisco + +[Save the Day by Monitoring Your Org with Open-Source Tools](https://reg.salesforce.com/flow/plus/df24/sessioncatalog/page/catalog/session/1718915808069001Q7HH), with Olga Shirikova + +[![Dreamforce 2024 Video](https://img.youtube.com/vi/NxiLiYeo11A/0.jpg)](https://www.youtube.com/watch?v=NxiLiYeo11A) + +### Wir Sind Ohana '24, Berlin + +Automate the Monitoring of your Salesforce orgs with open-source tools only!, with Yosra Saidani + +[![Wir Sind Ohana Video](https://img.youtube.com/vi/xGbT6at7RZ0/0.jpg)](https://www.youtube.com/watch?v=xGbT6at7RZ0) + +### Polish Dreamin '24, Wroclaw, Poland + +[Easy and complete Salesforce CI/CD with open-source only!](https://coffeeforce.pl/dreamin/speaker/nicolas-vuillamy/), with Wojciech Suwiński + +![Polish Dreamin 2024](https://github.com/nvuillam/nvuillam/assets/17500430/e843cc08-bf8a-452d-b7f0-c64a314f1b60) + +### French Touch Dreamin '23, Paris + +[Automate the Monitoring of your Salesforce orgs with open-source tools only!](https://frenchtouchdreamin.com/index.php/schedule/), with Maxime Guenego + +![French Touch Dreamin 2023](https://github.com/nvuillam/nvuillam/assets/17500430/8a2e1bbf-3402-4929-966d-5f99cb13cd29) + +### Dreamforce 2023, San Francisco + +[Easy Salesforce CI/CD with open-source and clicks only thanks to sfdx-hardis!](https://reg.salesforce.com/flow/plus/df23/sessioncatalog/page/catalog/session/1684196389783001OqEl), with Jean-Pierre Rizzi + +[![Dreamforce 2023 Video](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs) + +### Yeur Dreamin' 2023, Brussels + +An easy and complete Salesforce CI/CD release management with open-source only !, with Angélique Picoreau + +[![image](https://github.com/nvuillam/nvuillam/assets/17500430/6470df20-7449-444b-a0a5-7dc22f5f6188)](https://www.linkedin.com/posts/nicolas-vuillamy_cicd-opensource-trailblazercommunity-activity-7076859027321704448-F1g-?utm_source=share&utm_medium=member_desktop) + + + +## Articles & Videos + + + +### Web Articles Here are some articles about [sfdx-hardis](https://sfdx-hardis.cloudity.com/) @@ -114,9 +227,115 @@ Here are some articles about [sfdx-hardis](https://sfdx-hardis.cloudity.com/) - [Exporter en masse les fichiers d’une org Salesforce](https://leblog.hardis-group.com/portfolio/exporter-en-masse-les-fichiers-dune-org-salesforce/) - [Suspendre l’accès aux utilisateurs lors d’une mise en production Salesforce](https://leblog.hardis-group.com/portfolio/suspendre-lacces-aux-utilisateurs-lors-dune-mise-en-production-salesforce/) +### Recorded Conferences + +#### Dreamforce Sessions + +- Dreamforce 2024 - Save the Day by Monitoring Your Org with Open-Source Tools (with Olga Shirikova) + +[![Dreamforce 2024: Save the Day by Monitoring Your Org with Open-Source Tools](https://img.youtube.com/vi/NxiLiYeo11A/0.jpg)](https://www.youtube.com/watch?v=NxiLiYeo11A){target=blank} + +- Dreamforce 2023 - Easy Salesforce CI/CD with open-source and clicks only thanks to sfdx-hardis! (with Jean-Pierre Rizzi) + +[![Dreamforce 2023: Easy Salesforce CI/CD with open-source](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs){target=blank} + +#### Community Events + +- Wir Sind Ohana 2024 - Automate the Monitoring of your Salesforce orgs with open-source tools only! (with Yosra Saidani) + +[![Wir Sind Ohana 2024: Automate Monitoring with Open-Source](https://img.youtube.com/vi/xGbT6at7RZ0/0.jpg)](https://www.youtube.com/watch?v=xGbT6at7RZ0){target=blank} + +### Podcasts + +- Apex Hours 2025 - Org monitoring with Grafana + AI generated doc + +[![Apex Hours 2025: Org monitoring with Grafana + AI generated doc](https://img.youtube.com/vi/oDaCh66pRcI/0.jpg)](https://www.youtube.com/watch?v=oDaCh66pRcI){target=blank} + +- Salesforce Way Podcast #102 - Sfdx-hardis with Nicolas Vuillamy + +[![Salesforce Way Podcast: Sfdx-hardis](https://img.youtube.com/vi/sfdx-hardis/0.jpg)](https://salesforceway.com/podcast/sfdx-hardis/){target=blank} + +- Salesforce Developers Podcast Episode 182: SFDX-Hardis with Nicolas Vuillamy + +[![Salesforce Developers Podcast](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-sfdev.jpg)](https://developer.salesforce.com/podcast/2023/06/sfdx){target=blank} + +### sfdx-hardis Usage + +#### Features Overview + +- sfdx-hardis 2025 new features overview + +[![sfdx-hardis 2025 new features](https://img.youtube.com/vi/JRKH5COUVQ0/0.jpg)](https://youtu.be/JRKH5COUVQ0){target=blank} + +- SFDX-HARDIS – A demo with Nicolas Vuillamy from Cloudity + +[![SalesforceDevOps.net Demo](https://img.youtube.com/vi/qP6MaZUGzik/0.jpg)](https://www.youtube.com/watch?v=qP6MaZUGzik){target=blank} + +#### Installation & Setup + +- Complete installation tutorial for sfdx-hardis - [📖 Documentation](https://sfdx-hardis.cloudity.com/installation/) + +[![Installation Tutorial](https://img.youtube.com/vi/LA8m-t7CjHA/0.jpg)](https://www.youtube.com/watch?v=LA8m-t7CjHA){target=blank} + +#### CI/CD Workflows + +- Complete CI/CD workflow for Salesforce projects - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) + +[![Dreamforce demo video: Easy Salesforce CI/CD with sfdx-hardis and open-source only !](https://img.youtube.com/vi/zEYqTd2txU4/0.jpg)](https://www.youtube.com/watch?v=zEYqTd2txU4){target=blank} + +- How to start a new User Story in sandbox - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-create-new-task/) + +[![Create New User Story](https://img.youtube.com/vi/WOqssZwjPhw/0.jpg)](https://www.youtube.com/watch?v=WOqssZwjPhw){target=blank} + +- How to commit updates and create merge requests - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/) + +[![Publish User Story Tutorial](https://img.youtube.com/vi/Ik6whtflmfY/0.jpg)](https://www.youtube.com/watch?v=Ik6whtflmfY){target=blank} + +- How to resolve git merge conflicts in Visual Studio Code - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-validate-merge-request/) + +[![Merge Conflicts Resolution](https://img.youtube.com/vi/lz5OuKzvadQ/0.jpg)](https://www.youtube.com/watch?v=lz5OuKzvadQ){target=blank} + +- How to install packages in your org - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-work-on-task-install-packages/) + +[![Install Packages Tutorial](https://img.youtube.com/vi/5-MgqoSLUls/0.jpg)](https://www.youtube.com/watch?v=5-MgqoSLUls){target=blank} + +- Configure CI server authentication to Salesforce orgs - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/) + +[![Configure CI Authentication](https://img.youtube.com/vi/OzREUu5utVI/0.jpg)](https://www.youtube.com/watch?v=OzREUu5utVI){target=blank} + +#### Monitoring + +- How to configure monitoring for your Salesforce org - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-monitoring-config-home/) + +[![Org Monitoring Setup](https://img.youtube.com/vi/bcVdN0XItSc/0.jpg)](https://www.youtube.com/watch?v=bcVdN0XItSc){target=blank} + +#### Integrations + +- Configure Slack integration for deployment notifications - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-slack/) + +[![Slack Integration](https://img.youtube.com/vi/se292ABGUmI/0.jpg)](https://www.youtube.com/watch?v=se292ABGUmI){target=blank} + +- How to create a Personal Access Token in GitLab - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-clone-repository/) + +[![GitLab Personal Access Token](https://img.youtube.com/vi/9y5VmmYHuIg/0.jpg)](https://www.youtube.com/watch?v=9y5VmmYHuIg){target=blank} + +#### Documentation + +- How to generate AI-enhanced Salesforce project documentation - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-project-doc-generate/) + +[![Generate Project Documentation](https://img.youtube.com/vi/ZrVPN3jp1Ac/0.jpg)](https://www.youtube.com/watch?v=ZrVPN3jp1Ac){target=blank} + +- Host your documentation on Cloudflare free tier - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-project-doc-cloudflare/) + +[![Cloudflare Doc Hosting Setup](https://img.youtube.com/vi/AUipbKjgsDI/0.jpg)](https://www.youtube.com/watch?v=AUipbKjgsDI){target=blank} + + + ## Contributing -Anyone is welcome to contribute to this sfdx-hardis + + +Everyone is welcome to contribute to sfdx-hardis (even juniors: we'll assist you !) - Install Node.js ([recommended version](https://nodejs.org/en/)) - Install typescript by running `npm install typescript --global` @@ -127,29 +346,46 @@ Anyone is welcome to contribute to this sfdx-hardis - Run `yarn` to install dependencies - Run `sf plugins link` to link the local sfdx-hardis to SFDX CLI - Run `tsc --watch` to transpile typescript into js everytime you update a TS file -- Debug commands using `NODE_OPTIONS=--inspect-brk sfdx hardis:somecommand -someparameter somevalue` +- Debug commands using `NODE_OPTIONS=--inspect-brk sf hardis:somecommand -someparameter somevalue` + + ## Dependencies **sfdx-hardis** partially relies on the following SFDX Open-Source packages -- [Salesforce Data Move Utility](https://github.com/forcedotcom/SFDX-Data-Move-Utility) -- [SFDX Essentials](https://github.com/nvuillam/sfdx-essentials) - [SFDX Git Delta](https://github.com/scolladon/sfdx-git-delta) -- [Texei Sfdx Plugin](https://github.com/texei/texei-sfdx-plugin) +- [Salesforce Data Move Utility](https://github.com/forcedotcom/SFDX-Data-Move-Utility) ## Contributors + + +### Organization + +sfdx-hardis is primarily led by Nicolas Vuillamy & [Cloudity](https://www.cloudity.com/), but has many external contributors that we cant thank enough ! + +### Pull Requests Authors + -## Commands - -[**Read Online Documentation**](https://sfdx-hardis.cloudity.com) +### Special Thanks +- [Roman Hentschke](https://www.linkedin.com/in/derroman/), for building the BitBucket CI/CD integration +- [Leo Jokinen](https://www.linkedin.com/in/leojokinen/), for building the GitHub CI/CD integration +- [Mariia Pyvovarchuk](https://www.linkedin.com/in/mpyvo/), for her work about generating automations documentation +- [Matheus Delazeri](https://www.linkedin.com/in/matheus-delazeri-souza/), for the PDF output of documentation +- [Taha Basri](https://www.linkedin.com/in/tahabasri/), for his work about generating documentation of LWC +- [Anush Poudel](https://www.linkedin.com/in/anushpoudel/), for integrating sfdx-hardis with multiple LLMs using langchainJs +- [Sebastien Colladon](https://www.linkedin.com/in/sebastien-colladon/), for providing sfdx-git-delta which is highly used within sfdx-hardis +- [Stepan Stepanov](https://www.linkedin.com/in/stepan-stepanov-79a48734/), for implementing the deployment mode _delta with dependencies_ + +## Commands +[**Read Online Documentation to see everything you can do with SFDX Hardis :)**](https://sfdx-hardis.cloudity.com) \ No newline at end of file diff --git a/SECURITY.md b/SECURITY.md index d5b3b3322..dff614e18 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,21 +1,41 @@ # Security Policy +## Introduction + +Salesforce orgs contain critical data, so we are very serious regarding the security around the use of sfdx-hardis locally or from CI/CD servers. + ## Supported Versions -Use this section to tell people about which versions of your project are -currently being supported with security updates. +Always use the latest sfdx-hardis version to be up to date with security updates. -| Version | Supported | -|---------|--------------------| -| 5.1.x | :white_check_mark: | -| 5.0.x | :x: | -| 4.0.x | :white_check_mark: | -| < 4.0 | :x: | +## Supply Chain Security -## Reporting a Vulnerability +### Continuous Scanning + +All development and release workflows contain security checks using [Trivy](https://trivy.dev/latest/) + +- Scan npm package files + +- Scan docker images + +Some exceptions has been added in [.trivyignore config file](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/.trivyignore), with comments explaining why these CVE are not risky within sfdx-hardis usage. + +You can find security scan results and SBOM (Software Build Of Materials) in CycloneDX and SPDX formats in the [artifacts of release workflows](https://github.com/hardisgroupcom/sfdx-hardis/actions/workflows/deploy-RELEASE.yml) or directly at the end of the Release notes. -Use this section to tell people how to report a vulnerability. +![Security artifacts screenshot](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-security-artifacts-1.jpg) + +### Dependencies + +We are using [dependabot](https://github.com/dependabot) to keep dependencies up to date. + +## Architecture + +- sfdx-hardis plugin is built using the latest [sfdx-plugin framework provided by Salesforce](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_plugins.meta/sfdx_cli_plugins/cli_plugins.htm), including the use of official CI/CD workflows used by official Salesforce CLI plugins. + +- Authentication between sfdx-hardis and Salesforce orgs are performed using a Connect App created during configuration. Each connection requires 2 secured environment variables: one with the connected app Client Id, and one used to decrypt "on the fly" an encrypted self-signed certificate stored in the repository. + +- There is no embedded telemetry: sfdx-hardis maintainers have 0 information about sfdx-hardis command line usage, and it is by design. + +## Reporting a Vulnerability -Tell them where to go, how often they can expect to get an update on a -reported vulnerability, what to expect if the vulnerability is accepted or -declined, etc. +In case of detected vulnerability, please write directly to [Nicolas Vuillamy on LinkedIn](https://www.linkedin.com/in/nicolas-vuillamy/) diff --git a/bin/dev.cmd b/bin/dev.cmd new file mode 100644 index 000000000..cec553be4 --- /dev/null +++ b/bin/dev.cmd @@ -0,0 +1,3 @@ +@echo off + +node --loader ts-node/esm --no-warnings=ExperimentalWarning "%~dp0\dev" %* diff --git a/bin/dev.js b/bin/dev.js new file mode 100644 index 000000000..89a549a7f --- /dev/null +++ b/bin/dev.js @@ -0,0 +1,8 @@ +#!/usr/bin/env -S node --loader ts-node/esm --no-warnings=ExperimentalWarning +// eslint-disable-next-line node/shebang +async function main() { + const { execute } = await import('@oclif/core'); + await execute({ development: true, dir: import.meta.url }); +} + +await main(); diff --git a/bin/run b/bin/run deleted file mode 100644 index 3c4ae3ac0..000000000 --- a/bin/run +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env node - -require('@oclif/command').run() -.catch(require('@oclif/errors/handle')) diff --git a/bin/run.js b/bin/run.js new file mode 100755 index 000000000..cf13fb937 --- /dev/null +++ b/bin/run.js @@ -0,0 +1,9 @@ +#!/usr/bin/env node + +// eslint-disable-next-line node/shebang +async function main() { + const { execute } = await import('@oclif/core'); + await execute({ dir: import.meta.url }); +} + +await main(); diff --git a/build.cjs b/build.cjs new file mode 100644 index 000000000..a9a38dbe4 --- /dev/null +++ b/build.cjs @@ -0,0 +1,237 @@ +#!/usr/bin/node +/* eslint-disable */ +const fs = require("fs-extra"); +const yaml = require("js-yaml"); + +class SfdxHardisBuilder { + async run() { + console.log("Start additional building of sfdx-hardis repository..."); + await this.generatePagesFromReadme(); + await this.buildDeployTipsDoc(); + await this.buildPromptTemplatesDocs(); + this.truncateReadme(); + // this.fixOnlineIndex(); + console.log("All done."); + } + + async buildDeployTipsDoc() { + console.log("Building salesforce-deployment-assistant-error-list.md doc..."); + const deployTipsDocFile = "./docs/salesforce-deployment-assistant-error-list.md"; + const { getAllTips } = await import("./lib/common/utils/deployTipsList.js"); + const deployTips = getAllTips(); + const deployTipsMd = [ + "---", + "title: Sfdx-hardis deployment assistant list of errors", + "description: List of errors that are handled by sfdx-hardis deployment assistant", + "---", + "", + "", + "# Salesforce deployment assistant errors list", + "", + "sfdx-hardis can help solve solve deployment errors using a predefined list of issues and associated solutions", + "", + "See how to [setup sfdx-hardis deployment assistant](salesforce-deployment-assistant-setup.md)", + "", + "If you see a deployment error which is not here yet, please [add it in this file](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/deployTipsList.ts) :)", + "" + ]; + for (const tip of deployTips) { + const linkName = `sf-deployment-assistant/${tip.label.replace(/[^a-zA-Z0-9 -]|\s/g, '-')}.md` + const tipFile = `./docs/` + linkName; + this.buildIndividualMarkdownPageForTip(tip, tipFile); + this.buildMainDeployFixesMarkdown(tip, deployTipsMd, linkName); + } + fs.writeFileSync(deployTipsDocFile, deployTipsMd.join("\n") + "\n"); + console.log("Written doc file " + deployTipsDocFile); + } + + buildMainDeployFixesMarkdown(tip, deployTipsMd, linkName) { + if (!tip.label) { + throw new Error(`Missing label for ${JSON.stringify(tip)}`); + } + deployTipsMd.push(`## [${tip.label}](${linkName})`); + deployTipsMd.push(...["", "**Detection**", ""]); + if (tip.expressionRegex) { + deployTipsMd.push(...tip.expressionRegex.map((regEx) => "- RegExp: `" + regEx.toString().slice(1).replace("/gm", "") + "`")); + } + if (tip.expressionString) { + deployTipsMd.push(...tip.expressionString.map((str) => "- String: `" + str + "`")); + } + if (tip.examples) { + deployTipsMd.push(...["", "**Examples**", ""]); + deployTipsMd.push(...tip.examples.map((str) => "- `" + str + "`")); + } + deployTipsMd.push(...["", "**Resolution**", ""]); + if (!tip.tip) { + throw new Error(`Missing tip for ${JSON.stringify(tip)}`); + } + deployTipsMd.push("```shell"); + deployTipsMd.push(...tip.tip.split("\n")); + deployTipsMd.push("```"); + deployTipsMd.push(""); + deployTipsMd.push("---"); + } + + buildIndividualMarkdownPageForTip(tip, tipFile) { + const errorDescription = tip?.examples?.length > 0 ? tip.examples[0] : + tip?.expressionString?.length > 0 ? tip?.expressionString[0] : + tip.expressionRegex[0].toString().replace("/Error", "Error").replace("/gm", "").replace(/\"/gm, '\\\"'); + const tipFileMd = [ + "---", + `title: "${tip.label} (Deployment assistant)"`, + `description: "How to solve Salesforce deployment error \\\"${errorDescription}\\\""`, + "---", + "" + ]; + tipFileMd.push(`# ${tip.label}`); + tipFileMd.push(...["", "## Detection", ""]); + if (tip.expressionRegex) { + tipFileMd.push(...tip.expressionRegex.map((regEx) => "- RegExp: `" + regEx.toString().slice(1).replace("/gm", "") + "`")); + } + if (tip.expressionString) { + tipFileMd.push(...tip.expressionString.map((str) => "- String: `" + str + "`")); + } + if (tip.examples) { + tipFileMd.push(...["", "## Examples", ""]); + tipFileMd.push(...tip.examples.map((str) => "- `" + str + "`")); + } + tipFileMd.push(...["", "## Resolution", ""]); + if (!tip.tip) { + throw new Error(`Missing tip for ${JSON.stringify(tip)}`); + } + tipFileMd.push("```shell"); + tipFileMd.push(...tip.tip.split("\n")); + tipFileMd.push("```"); + fs.writeFileSync(tipFile, tipFileMd.join("\n") + "\n"); + } + + async buildPromptTemplatesDocs() { + console.log("Building prompt templates documentation..."); + const { PROMPT_TEMPLATES } = await import("./lib/common/aiProvider/promptTemplates/index.js"); + const { PROMPT_VARIABLES } = await import("./lib/common/aiProvider/promptTemplates/variablesIndex.js"); + const docsPromptDir = "./docs/prompt-templates"; + fs.ensureDirSync(docsPromptDir); + + // Build prompt templates documentation + const promptNav = []; + for (const templateName of Object.keys(PROMPT_TEMPLATES)) { + const templateDocFile = `${docsPromptDir}/${templateName}.md`; + const prompt = PROMPT_TEMPLATES[templateName] + // Read the template file and extract the prompt text + const md = [ + `---`, + `title: ${templateName}`, + `description: Prompt template for ${templateName}`, + `---`, + '', + `# ${templateName}`, + '', + `## Variables`, + "| Name | Description | Example |", + "| :------|:-------------|:---------|", + ...prompt.variables.map( + v => { + // Escape pipe characters in example to avoid breaking the markdown table + let example = String(v.example ?? ""); + // Replace | with \| and newlines with
for markdown table safety + example = example.replace(/\|/g, "\\|").replace(/\n/g, "
"); + return `| **${v.name}** | ${v.description} | \`${example}\` |`; + } + ), + '', + `## Prompt`, + "", + "```", + prompt.text.en, + "```", + '', '## How to override', + + '', + `To define your own prompt text, you can define a local file **config/prompt-templates/${templateName}.txt**`, + ``, + `You can also use the command \`sf hardis:doc:override-prompts\` to automatically create all override template files at once.`, + ``, + `If you do so, please don't forget to use the replacement variables :)` + ]; + fs.writeFileSync(templateDocFile, md.join("\n") + "\n"); + promptNav.push({ [templateName]: `prompt-templates/${templateName}.md` }); + } + + // Build prompt variables documentation in the same folder + const variablesNav = []; + for (const variableName of Object.keys(PROMPT_VARIABLES)) { + const variableDocFile = `${docsPromptDir}/${variableName}.md`; + const variable = PROMPT_VARIABLES[variableName]; + // Read the variable file and extract the variable text + const md = [ + `---`, + `title: ${variableName}`, + `description: Prompt variable for ${variableName}`, + `---`, + '', + `# ${variableName}`, + '', + `## Description`, + '', + 'This is a reusable prompt variable that provides common instructions across multiple prompt templates.', + '', + `## Content`, + "", + "```", + variable.text.en, + "```", + '', + '## How to override', + '', + `To define your own variable content, you can define a local file **config/prompt-templates/${variableName}.txt**`, + ``, + `You can also use the command \`sf hardis:doc:override-prompts\` to automatically create all override variable files at once.`, + `` + ]; + fs.writeFileSync(variableDocFile, md.join("\n") + "\n"); + promptNav.push({ [variableName]: `prompt-templates/${variableName}.md` }); + } + + console.log("Prompt templates and variables documentation generated"); + } + + // Read README.md + // Find sub-content between HTML comments and (example: & ) + // For each start & and found, generate a new markdown file in docs/ folder with the name PAGENAME.md (example: contributing.md) + async generatePagesFromReadme() { + console.log("Generating pages from README.md..."); + const readmeFile = "./README.md"; + const readmeContent = fs.readFileSync(readmeFile, "utf-8"); + const regex = /([\s\S]*?)/g; + let match; + while ((match = regex.exec(readmeContent)) !== null) { + const pageName = match[1].trim(); + const pageContent = match[2].trim(); + const pageFile = `./docs/${pageName}`; + fs.writeFileSync(pageFile, pageContent + "\n"); + console.log(`Generated ${pageFile}`); + } + console.log("All pages generated from README.md"); + } + + truncateReadme() { + const readmeFile = "./README.md"; + const readmeContent = fs.readFileSync(readmeFile, "utf-8"); + const chunks = readmeContent.split("") + fs.writeFileSync(readmeFile, chunks[0] + ""); + console.log("Removed README.md commands"); + } + + fixOnlineIndex() { + const indexFile = "./docs/index.md"; + let indexContent = fs.readFileSync(indexFile, "utf-8"); + indexContent = indexContent.replace("[_See online documentation for a better navigation_](https://sfdx-hardis.cloudity.com)", ""); + fs.writeFileSync(indexFile, fixedLines.join("\n")); + console.log("Fixed online index.md links"); + } +} + +(async () => { + await new SfdxHardisBuilder().run(); +})(); + diff --git a/build.js b/build.js deleted file mode 100644 index 3f8015f9c..000000000 --- a/build.js +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/node -/* eslint-disable */ -const fs = require("fs-extra"); -const { getAllTips } = require("./lib/common/utils/deployTipsList"); - -class SfdxHardisBuilder { - run() { - console.log("Start additional building of sfdx-hardis repository..."); - this.buildDeployTipsDoc(); - this.truncateReadme(); - } - - buildDeployTipsDoc() { - console.log("Building salesforce-deployment-assistant-error-list.md doc..."); - const deployTipsDocFile = "./docs/salesforce-deployment-assistant-error-list.md"; - const deployTips = getAllTips(); - const deployTipsMd = [ - "---", - "title: Sfdx-hardis deployment assistant list of errors", - "description: List of errors that are handled by sfdx-hardis deployment assistant", - "---", - "", - "", - "# Salesforce deployment assistant errors list", - "", - "sfdx-hardis can help solve solve deployment errors using a predefined list of issues and associated solutions", - "", - "See how to [setup sfdx-hardis deployment assistant](salesforce-deployment-assistant-setup.md)", - "", - "If you see a deployment error which is not here yet, please [add it in this file](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/deployTipsList.ts) :)", - "" - ]; - for (const tip of deployTips) { - if (!tip.label) { - throw new Error(`Missing label for ${JSON.stringify(tip)}`); - } - deployTipsMd.push(`## ${tip.label}`); - deployTipsMd.push(""); - if (tip.expressionRegex) { - deployTipsMd.push(...tip.expressionRegex.map((regEx) => "- `" + regEx.toString().slice(1).replace("/gm", "") + "`")); - } - if (tip.expressionString) { - deployTipsMd.push(...tip.expressionString.map((str) => "- `" + str + "`")); - } - deployTipsMd.push(...["", "**Resolution tip**", ""]); - if (!tip.tip) { - throw new Error(`Missing tip for ${JSON.stringify(tip)}`); - } - deployTipsMd.push("```shell"); - deployTipsMd.push(...tip.tip.split("\n")); - deployTipsMd.push("```"); - deployTipsMd.push(""); - deployTipsMd.push("---"); - } - fs.writeFileSync(deployTipsDocFile, deployTipsMd.join("\n") + "\n"); - console.log("Written doc file " + deployTipsDocFile); - } - - truncateReadme() { - const readmeFile = "./README.md"; - const readmeContent = fs.readFileSync(readmeFile, "utf-8"); - const chunks = readmeContent.split("") - fs.writeFileSync(readmeFile, chunks[0]+"\n\n"); - console.log("Removed README.md commands"); - } -} - -new SfdxHardisBuilder().run(); diff --git a/commitlint.config.cjs b/commitlint.config.cjs new file mode 100644 index 000000000..422b19445 --- /dev/null +++ b/commitlint.config.cjs @@ -0,0 +1 @@ +module.exports = { extends: ['@commitlint/config-conventional'] }; diff --git a/config/sfdx-hardis.jsonschema.json b/config/sfdx-hardis.jsonschema.json index 5ab996680..9c2288a75 100644 --- a/config/sfdx-hardis.jsonschema.json +++ b/config/sfdx-hardis.jsonschema.json @@ -14,12 +14,45 @@ "UNUSED_METADATAS", "METADATA_STATUS", "MISSING_ATTRIBUTES", - "UNUSED_LICENSES" + "UNUSED_LICENSES", + "RELEASE_UPDATES" + ], + "enumNames": [ + "Audit Trail", + "Apex Tests", + "Backup", + "Deployment", + "Legacy API", + "Lint Access", + "Unused Metadatas", + "Metadata Status", + "Missing Attributes", + "Unused Licenses", + "Release Updates" ], "type": "string" }, "enum_monitoring_commands": { - "enum": ["AUDIT_TRAIL", "LEGACY_API", "LINT_ACCESS", "UNUSED_METADATAS", "METADATA_STATUS", "MISSING_ATTRIBUTES", "UNUSED_LICENSES"], + "enum": [ + "AUDIT_TRAIL", + "LEGACY_API", + "LINT_ACCESS", + "UNUSED_METADATAS", + "METADATA_STATUS", + "MISSING_ATTRIBUTES", + "UNUSED_LICENSES", + "RELEASE_UPDATES" + ], + "enumNames": [ + "Audit Trail", + "Legacy API", + "Lint Access", + "Unused Metadatas", + "Metadata Status", + "Missing Attributes", + "Unused Licenses", + "Release Updates" + ], "type": "string" } }, @@ -27,19 +60,37 @@ "properties": { "allowedOrgTypes": { "$id": "#/properties/allowedOrgTypes", - "description": "Types of orgs allowed for config & development. If not set, sandbox and scratch are allowed by default", - "examples": [["sandbox"]], + "description": "Types of Salesforce Orgs allowed for config & development. If not set, Sandbox Orgs and Scratch Orgs are allowed by default", + "examples": [ + [ + "sandbox" + ] + ], "items": { "type": "string", - "enum": ["sandbox", "scratch"] + "enum": [ + "sandbox", + "scratch" + ], + "enumNames": [ + "Sandbox Orgs", + "Scratch Orgs" + ] }, "title": "Allowed org types", "type": "array" }, "autoCleanTypes": { "$id": "#/properties/autoCleanTypes", - "description": "When saving a sfdx-hardis task, the list of cleanings will be automatically applied to sfdx sources", - "examples": [["dashboards", "datadotcom", "destructivechanges"]], + "description": "When saving/publishing a User Story, sfdx-hardis can automatically clean the sources before submitting a Pull Request.\nSelect the cleanings that will be automatically applied on your project.", + "docUrl": "https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-cleaning/", + "examples": [ + [ + "dashboards", + "datadotcom", + "destructivechanges" + ] + ], "items": { "type": "string", "enum": [ @@ -53,8 +104,24 @@ "listViewsMine", "minimizeProfiles", "productrequest", + "sensitiveMetadatas", "systemDebug", "v60" + ], + "enumNames": [ + "Case Entitlement", + "Check Permissions", + "Dashboards", + "Data.com", + "Destructive Changes", + "Flow Positions", + "Local Fields", + "List Views Mine", + "Minimize Profiles", + "Product Request", + "Sensitive Metadatas", + "System Debug", + "v60" ] }, "title": "Auto-Cleaning types", @@ -62,8 +129,15 @@ }, "autoRemoveUserPermissions": { "$id": "#/properties/autoRemoveUserPermissions", - "description": "When saving a sfdx-hardis task, these permissions will be removed from profiles", - "examples": [["EnableCommunityAppLauncher", "FieldServiceAccess", "OmnichannelInventorySync"]], + "description": "When your dev sandbox is using the next SF Platform version, sometimes some permissions on Profiles exist on the next version but not the current one.\nTo avoid issues, you can force the removal of such permissions when Saving/Publishing a User Story\nExample:\n- EnableCommunityAppLauncher\n- OmnichannelInventorySync", + "docUrl": "https://sfdx-hardis.cloudity.com/hardis/work/save/#command-behavior", + "examples": [ + [ + "EnableCommunityAppLauncher", + "FieldServiceAccess", + "OmnichannelInventorySync" + ] + ], "items": { "type": "string" }, @@ -72,8 +146,17 @@ }, "autoRetrieveWhenPull": { "$id": "#/properties/autoRetrieveWhenPull", - "description": "When calling hardis:scratch:pull, if you define metadatas (named or not), they will also be retrieved using force:source:retrieve", - "examples": [["CustomApplication"], ["CustomApplication:MyApp1", "CustomApplication:MyApp2"]], + "description": "Sometimes, SF Cli forgets to pull some metadata updates, like Custom Application for example.\nsfdx-hardis can automatically retrieve named metadatas to avoid issues.\nExample:\n- CustomApplication:MyApp1\n- CustomApplication:MyApp2\nWorks also with full metadata types (ex: CustomApplication)", + "docUrl": "https://sfdx-hardis.cloudity.com/hardis/scratch/pull/#command-behavior", + "examples": [ + [ + "CustomApplication" + ], + [ + "CustomApplication:MyApp1", + "CustomApplication:MyApp2" + ] + ], "items": { "type": "string" }, @@ -83,15 +166,24 @@ "apexTestsMinCoverageOrgWide": { "$id": "#/properties/apexTestsMinCoverageOrgWide", "default": 75.0, - "description": "Minimum percentage of apex code coverage accepted", - "examples": [80.0, 95.0], - "title": "Minimum apex test coverage %", + "description": "Minimum percentage of apex code coverage accepted.\n75.0% by default, but if you are on a clean project, it's better to define 80.0, 95.0 or 90.0 😎", + "examples": [ + 80.0, + 95.0 + ], + "title": "Minimum Apex Tests coverage % accepted for a deployment", "type": "number" }, "availableProjects": { "$id": "#/properties/availableProjects", - "description": "List of business projects that are managed in the same repository. Will be used to build git branch name when using hardis:work:new", - "examples": [["sales_cloud", "service_cloud", "community"]], + "description": "List of business projects that are managed in the same repository.\nIf defined, when creating a new User Story, it will be prompted to the contributor then used to create the git branch name.\nIf a value contains a comma, the left part will be used for key and the right part as label for the users.", + "examples": [ + [ + "sales_cloud", + "service_cloud", + "community" + ] + ], "items": { "type": "string" }, @@ -100,12 +192,17 @@ }, "availableTargetBranches": { "$id": "#/properties/availableTargetBranches", - "description": "List of git branches that can be used as target for merge requests", - "examples": [["develop", "develop_next"]], + "description": "List of git branches that can be used as target for Pull Requests.\nContributors will be prompt to select one of these target branch when creating a new User Story\nA classic example on a project with BUILD & RUN in parallel is to have preprod and integration as available target branches.\nIf defined, makes obsolete the parameter Default Pull Request target branch.", + "examples": [ + [ + "preprod", + "integration" + ] + ], "items": { "type": "string" }, - "title": "Available target branches", + "title": "Available PR/MR target branches", "type": "array" }, "branchPrefixChoices": { @@ -146,11 +243,15 @@ [ { "globPattern": "/**/*.flexipage-meta.xml", - "xpaths": ["//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]"] + "xpaths": [ + "//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]" + ] }, { "globPattern": "/**/*.layout-meta.xml", - "xpaths": ["//ns:relatedLists//ns:relatedList[contains(text(),'RelatedSolutionList')]"] + "xpaths": [ + "//ns:relatedLists//ns:relatedList[contains(text(),'RelatedSolutionList')]" + ] } ] ], @@ -164,14 +265,20 @@ "globPattern": { "$id": "#/properties/cleanXmlPatterns/items/properties/globPattern", "description": "Glob pattern to identify XML files to clean", - "examples": ["/**/*.flexipage-meta.xml"], + "examples": [ + "/**/*.flexipage-meta.xml" + ], "title": "Glob pattern", "type": "string" }, "xpaths": { "$id": "#/properties/cleanXmlPatterns/items/properties/xpaths", "description": "XPaths to identify elements to remove", - "examples": [["//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]"]], + "examples": [ + [ + "//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]" + ] + ], "title": "XPath list", "items": { "type": "string" @@ -179,7 +286,10 @@ "type": "array" } }, - "required": ["globPattern", "xpaths"] + "required": [ + "globPattern", + "xpaths" + ] }, "title": "Clean XML Patterns", "type": "array" @@ -228,7 +338,11 @@ }, "title": "Command" }, - "required": ["id", "label", "command"], + "required": [ + "id", + "label", + "command" + ], "title": "Commands Pre-Deployment", "type": "array" }, @@ -273,6 +387,28 @@ "title": "Command", "type": "string" }, + "context": { + "$id": "#/properties/commandsPostDeploy/items/properties/context", + "description": "Context when the command must be run", + "title": "Context", + "default": "all", + "type": "string", + "enum": [ + "all", + "check-deployment-only", + "process-deployment-only" + ], + "enumNames": [ + "All", + "Check Deployment Only", + "Process Deployment Only" + ], + "examples": [ + "all", + "check-deployment-only", + "process-deployment-only" + ] + }, "skipIfError": { "$id": "#/properties/commandsPostDeploy/items/properties/skipIfError", "description": "Do not run the command if there is a deployment error", @@ -281,7 +417,11 @@ "type": "boolean" } }, - "required": ["id", "label", "command"], + "required": [ + "id", + "label", + "command" + ], "title": "Command" }, "title": "Commands Post-Deployment", @@ -301,14 +441,14 @@ "label": "Generate manifest", "icon": "file.svg", "tooltip": "Generates a manifest package.xml using local sfdx source files", - "command": "sfdx force:source:manifest:create --sourcepath force-app --manifestname myNewManifest" + "command": "sf project generate manifest --source-path force-app --name myNewManifest" }, { "id": "list-all-orgs", "label": "List all orgs", "icon": "salesforce.svg", "tooltip": "List all orgs that has already been authenticated using sfdx", - "command": "sfdx force:org:list --all" + "command": "sf org list --all" } ] }, @@ -391,13 +531,20 @@ "type": "string" } }, - "required": ["id", "label", "command"] + "required": [ + "id", + "label", + "command" + ] }, "title": "Label", "type": "array" } }, - "required": ["id", "label"] + "required": [ + "id", + "label" + ] }, "title": "Custom commands", "type": "array" @@ -406,8 +553,18 @@ "$id": "#/properties/customCommandsPosition", "description": "Position of custom commands in the menu (first or last)", "default": "first", - "enum": ["first", "last"], - "examples": ["first", "last"], + "enum": [ + "first", + "last" + ], + "enumNames": [ + "First", + "Last" + ], + "examples": [ + "first", + "last" + ], "title": "Custom commands position", "type": "string" }, @@ -442,14 +599,20 @@ "name": { "$Id": "#/properties/customPlugins/items/properties/name", "description": "Name of the plugin npm package", - "examples": ["mo-dx-plugin", "shane-sfdx-plugins"], + "examples": [ + "mo-dx-plugin", + "shane-sfdx-plugins" + ], "title": "Name", "type": "string" }, "helpUrl": { "$Id": "#/properties/customPlugins/items/properties/helpUrl", "description": "Url of plugin documentation", - "examples": ["https://github.com/msrivastav13/mo-dx-plugin", "https://github.com/mshanemc/shane-sfdx-plugins"], + "examples": [ + "https://github.com/msrivastav13/mo-dx-plugin", + "https://github.com/mshanemc/shane-sfdx-plugins" + ], "title": "Name", "type": "string" } @@ -481,7 +644,10 @@ "type": "boolean" } }, - "required": ["dataPath", "importInScratchOrgs"] + "required": [ + "dataPath", + "importInScratchOrgs" + ] }, "title": "Data packages", "type": "array" @@ -489,16 +655,25 @@ "defaultPackageInstallationKey": { "$id": "#/properties/defaultPackageInstallationKey", "description": "When generating a new package version protected with password, use this value as default package installation key", - "examples": ["hardis", "myPassword", "dFGGF43656YfdFDG{{{dhgfh:::;FSEDSFd78"], - "title": "Defaut package installation key", + "examples": [ + "hardis", + "myPassword", + "dFGGF43656YfdFDG{{{dhgfh:::;FSEDSFd78" + ], + "title": "Default package installation key", "type": "string" }, "developmentBranch": { "$id": "#/properties/developmentBranch", "default": "developpement", - "description": "When creating a new sfdx-hardis task, this git branch is used as base to create the feature/debug sub branch. The merge request will later have this branch as target.", - "examples": ["developpement", "dev_lot2", "hotfixes"], - "title": "Default pull/merge request target org", + "description": "When creating a new sfdx-hardis User Story, this git branch is used as base to create the feature/debug sub branch. The merge request will later have this branch as target.", + "docUrl": "https://sfdx-hardis.cloudity.com/hardis/work/new/", + "examples": [ + "developpement", + "dev_lot2", + "hotfixes" + ], + "title": "Default Pull Request/Merge Request target branch when you create a new User Story.", "type": "string" }, "deploymentPlan": { @@ -551,38 +726,55 @@ "properties": { "dataPath": { "$id": "#/properties/install/properties/packages/items/dataPath", - "examples": ["scripts/data/EmailTemplate"], + "examples": [ + "scripts/data/EmailTemplate" + ], "title": "Path to SFDMU data project to deploy", "type": "string" }, "label": { "$id": "#/properties/install/properties/packages/items/label", - "examples": ["Deploy EmailTemplate", "Import EmailTemplate records"], + "examples": [ + "Deploy EmailTemplate", + "Import EmailTemplate records" + ], "title": "Source or data package label", "type": "string" }, "order": { "$id": "#/properties/install/properties/packages/items/order", - "examples": [-20, 13, 50], + "examples": [ + -20, + 13, + 50 + ], "title": "Execution order in deployment plan", "type": "number" }, "packageXmlFile": { "$id": "#/properties/install/properties/packages/items/packageXmlFile", - "examples": ["manifest/splits/packageXmlEmails.xml"], + "examples": [ + "manifest/splits/packageXmlEmails.xml" + ], "title": "Path to package.xml file to use for deployment", "type": "string" }, "waitAfter": { "$id": "#/properties/install/properties/packages/items/waitAfter", "description": "Delay to wait before installing the next package", - "examples": [10, 20], + "examples": [ + 10, + 20 + ], "title": "Wait after install (seconds)", "type": "number" } } }, - "required": ["label", "order"], + "required": [ + "label", + "order" + ], "title": "List of packages to deploy", "type": "array" } @@ -594,43 +786,76 @@ "$id": "#/properties/devHubAlias", "default": "", "description": "Dev Hub alias, usually DevHub_ProjectName", - "examples": ["DevHub_MyClientMyProject", "DevHub_GoogleGmail", "DevHub_AppleIWatch"], + "examples": [ + "DevHub_MyClientMyProject", + "DevHub_GoogleGmail", + "DevHub_AppleIWatch" + ], "title": "Dev Hub org alias", "type": "string" }, + "devHubInstanceUrl": { + "$id": "#/properties/devHubInstanceUrl", + "default": "https://login.salesforce.com", + "description": "Dev Hub instance URL used for authenticating to DevHub from CI jobs", + "examples": [ + "DevHub_MyClientMyProject" + ], + "title": "Dev Hub Instance URL", + "type": "string" + }, "devHubUsername": { "$id": "#/properties/devHubUsername", "default": "", "description": "Dev Hub username, used to authenticate to DevHub from CI jobs", - "examples": ["cicd-user@myclient.com", "scratch-user@google.fr", "scratch-user@apple.fr"], + "examples": [ + "cicd-user@myclient.com", + "scratch-user@google.fr", + "scratch-user@apple.fr" + ], "title": "Dev Hub Username", "type": "string" }, + "docDeployToCloudflare": { + "$id": "#/properties/docDeployToCloudflare", + "default": false, + "description": "Automatically deploy MkDocs HTML documentation from CI/CD Workflows to Cloudflare", + "examples": [ + true + ], + "title": "Doc: Deploy to Cloudflare", + "type": "boolean" + }, + "docDeployToOrg": { + "$id": "#/properties/docDeployToOrg", + "default": false, + "description": "Automatically deploy MkDocs HTML documentation from CI/CD Workflows to Salesforce org as static resource", + "examples": [ + true + ], + "title": "Doc: Deploy to Salesforce Org", + "type": "boolean" + }, "extends": { "$id": "#/properties/extends", "description": "You can base your local sfdx-hardis configuration on a remote config file. That allows you to have the same config base for all your projects", - "examples": ["https://raw.githubusercontent.com/worldcompany/shared-config/main/.sfdx-hardis.yml"], + "examples": [ + "https://raw.githubusercontent.com/worldcompany/shared-config/main/.sfdx-hardis.yml" + ], "title": "Extends remote configuration URL", "type": "string" }, "initPermissionSets": { "$id": "#/properties/initPermissionSets", - "description": "When creating a scratch org, Admin user will be automatically assigned to those permission sets", - "examples": [["MyPermissionSet", "MyPermissionSetGroup"]], + "description": "When creating a scratch org, Admin user will be automatically assigned to those permission sets. Example: PS_Admin", + "examples": [ + [ + "MyPermissionSet", + "MyPermissionSetGroup" + ] + ], "items": { - "$id": "#/properties/initPermissionSets/items", - "type": ["object", "string"], - "additionalProperties": false, - "description": "Permission Set or Permission Set Group", - "properties": { - "name": { - "$Id": "#/properties/initPermissionSets/items/properties/name", - "description": "Permission Set or Permission Set Group name", - "examples": [["MyPermissionSet", "MyPermissionSetGroup", "MyPermissionSetGroup2"]], - "title": "Name", - "type": "string" - } - } + "type": "string" }, "title": "Initial Permission Sets", "type": "array" @@ -673,43 +898,60 @@ "properties": { "Id": { "$Id": "#/properties/install/properties/packages/items/Id", - "examples": ["0A35r000000GveVCAS"], + "examples": [ + "0A35r000000GveVCAS" + ], "title": "(unused) PackageId", "type": "string" }, "SubscriberPackageId": { "$Id": "#/properties/install/properties/packages/items/SubscriberPackageId", - "examples": ["033b0000000Pf2AAAS"], + "examples": [ + "033b0000000Pf2AAAS" + ], "title": "Subscriber Package Id", "type": "string" }, "SubscriberPackageName": { "$Id": "#/properties/install/properties/packages/items/SubscriberPackageName", - "examples": ["Files Attachment Notes"], + "examples": [ + "Files Attachment Notes" + ], "title": "Subscriber Package Name", "type": "string" }, "SubscriberPackageNamespace": { "$Id": "#/properties/install/properties/packages/items/SubscriberPackageNamespace", - "examples": ["fan_astrea"], + "examples": [ + "fan_astrea" + ], "title": "Subscriber Package NameSpace", - "type": ["string", "null"] + "type": [ + "string", + "null" + ] }, "SubscriberPackageVersionId": { "$Id": "#/properties/install/properties/packages/items/SubscriberPackageVersionId", - "examples": ["04t0o000003nRWAAA2"], + "examples": [ + "04t0o000003nRWAAA2" + ], "title": "Subscriber Version Id (IMPORTANT)", "type": "string" }, "SubscriberPackageVersionName": { "$Id": "#/properties/install/properties/packages/items/SubscriberPackageVersionName", - "examples": ["Summer2021"], + "examples": [ + "Summer2021" + ], "title": "Subscriber Version Name", "type": "string" }, "SubscriberPackageVersionNumber": { "$Id": "#/properties/install/properties/packages/items/SubscriberPackageVersionNumber", - "examples": ["1.22.0.2"], + "examples": [ + "1.22.0.2" + ], "title": "Subscriber Version Number", "type": "string" }, @@ -717,7 +959,10 @@ "$Id": "#/properties/install/properties/packages/items/installDuringDeployments", "default": false, "description": "If true, during deployments this package will be installed in target org if not installed yet", - "examples": [true, false], + "examples": [ + true, + false + ], "title": "Install during deployments", "type": "boolean" }, @@ -725,19 +970,27 @@ "$Id": "#/properties/install/properties/packages/items/installOnScratchOrgs", "default": false, "description": "If true, this package will be installed when creating a new scratch org with sfdx-hardis", - "examples": [true, false], + "examples": [ + true, + false + ], "title": "Install on scratch orgs", "type": "boolean" }, "installationkey": { "$Id": "#/properties/install/properties/packages/items/installationkey", - "examples": ["MyInstallationKey", "4FzkMzUSwFfP#@"], + "examples": [ + "MyInstallationKey", + "4FzkMzUSwFfP#@" + ], "description": "Installation key for key-protected package", "title": "Package installation key", "type": "string" } }, - "required": ["SubscriberPackageVersionId"], + "required": [ + "SubscriberPackageVersionId" + ], "title": "Salesforce package" }, "title": "Installed Packages", @@ -746,16 +999,23 @@ "installPackagesDuringCheckDeploy": { "$id": "#/properties/installPackagesDuringCheckDeploy", "default": false, - "description": "When calling deployment check command, installs any package referred within installedPackages property", - "examples": [true], - "title": "Install packages during deployment checks", + "description": "If your configuration contains an installedPackages property, activating this option allows you to make sfdx-hardis automatically install packages during the Deployments Check workflow, and not to wait after the merge of the Pull Request.", + "docUrl": "https://sfdx-hardis.cloudity.com/salesforce-ci-cd-work-on-task-install-packages/", + "examples": [ + true + ], + "title": "Install packages during deployment checks workflow", "type": "boolean" }, "instanceUrl": { "$id": "#/properties/instanceUrl", "default": "", "description": "Salesforce instance URL used by CI for deployment or backups", - "examples": ["https://myclient.force.com", "https://google.force.com", "https://apple.force.com"], + "examples": [ + "https://myclient.force.com", + "https://google.force.com", + "https://apple.force.com" + ], "title": "Instance URL", "type": "string" }, @@ -773,13 +1033,46 @@ "sourcesToRetrofit": { "$id": "#/properties/sourcesToRetrofit", "description": "List of metadata to retrieve for retrofit job", - "examples": [["CustomField", "Layout", "PermissionSet"]], + "examples": [ + [ + "CustomField", + "Layout", + "PermissionSet" + ] + ], "items": { "type": "string" }, "title": "Metadata to retrofit", "type": "array" }, + "manualActionsFileUrl": { + "$id": "#/properties/manualActionsFileUrl", + "default": "", + "description": "URL of the XLS file that contains manual actions to perform before or after a deployment", + "examples": [ + "https://some.sharepoint.com/file.xlsx" + ], + "title": "Manual Actions File URL", + "type": "string" + }, + "mergeTargets": { + "$id": "#/properties/mergeTargets", + "description": "In branch-scoped config file, declares the list of branches that the current one can have as merge target. For example, integration will have mergeTargets [uat]", + "examples": [ + [ + "preprod" + ], + [ + "integration" + ] + ], + "items": { + "type": "string" + }, + "title": "Merge target branches", + "type": "array" + }, "monitoringCommands": { "$id": "#/properties/monitoringCommands", "description": "List of monitoring commands to run with command hardis:org:monitor:all", @@ -788,13 +1081,13 @@ { "title": "Detect calls to deprecated API versions", "key": "LEGACYAPI", - "command": "sfdx hardis:org:diagnose:legacyapi", + "command": "sf hardis:org:diagnose:legacyapi", "frequency": "weekly" }, { "title": "My custom command", "key": "MY_CUSTOM_KEY", - "command": "sfdx my:custom:command", + "command": "sf my:custom:command", "frequency": "daily" } ] @@ -830,7 +1123,10 @@ "type": "string" } }, - "required": ["title", "command"] + "required": [ + "title", + "command" + ] }, "title": "Monitoring commands", "type": "array" @@ -845,7 +1141,12 @@ "monitoringDisable": { "$id": "#/properties/monitoringDisable", "description": "List of commands to skip during monitoring jobs", - "examples": [["METADATA_STATUS", "UNUSED_METADATAS"]], + "examples": [ + [ + "METADATA_STATUS", + "UNUSED_METADATAS" + ] + ], "items": { "$ref": "#/definitions/enum_monitoring_commands" }, @@ -855,7 +1156,12 @@ "monitoringExcludeUsernames": { "$id": "#/properties/monitoringExcludeUsernames", "description": "List of usernames to exclude while running monitoring commands", - "examples": [["deploymentuser@cloudity.com", "mc-cloud-user@cloudity.com"]], + "examples": [ + [ + "deploymentuser@cloudity.com", + "mc-cloud-user@cloudity.com" + ] + ], "items": { "type": "string" }, @@ -866,7 +1172,9 @@ "$id": "#/properties/msTeamsWebhookUrl", "default": "", "description": "Url of the Ms Teams channel Web Hook that can be used to send ALL notifications", - "examples": ["https://my.msteams.webhook.url"], + "examples": [ + "https://my.msteams.webhook.url" + ], "title": "MsTeams WebHook Url (ALL)", "type": "string" }, @@ -874,7 +1182,9 @@ "$id": "#/properties/msTeamsWebhookUrlCritical", "default": "", "description": "Url of the Ms Teams channel Web Hook that can be used to send CRITICAL notifications", - "examples": ["https://my.msteams.webhook.url"], + "examples": [ + "https://my.msteams.webhook.url" + ], "title": "MsTeams WebHook Url (CRITICAL)", "type": "string" }, @@ -882,7 +1192,9 @@ "$id": "#/properties/msTeamsWebhookUrlSevere", "default": "", "description": "Url of the Ms Teams channel Web Hook that can be used to send SEVERE notifications", - "examples": ["https://my.msteams.webhook.url"], + "examples": [ + "https://my.msteams.webhook.url" + ], "title": "MsTeams WebHook Url (SEVERE)", "type": "string" }, @@ -890,28 +1202,70 @@ "$id": "#/properties/msTeamsWebhookUrlWarning", "default": "", "description": "Url of the Ms Teams channel Web Hook that can be used to send WARNING notifications", - "examples": ["https://my.msteams.webhook.url"], + "examples": [ + "https://my.msteams.webhook.url" + ], "title": "MsTeams WebHook Url (WARNING)", "type": "string" }, "msTeamsWebhookUrlInfo": { - "$id": "#/properties/msTeamsWebhookUrlWarning", + "$id": "#/properties/msTeamsWebhookUrlInfo", "default": "", "description": "Url of the Ms Teams channel Web Hook that can be used to send INFO notifications", - "examples": ["https://my.msteams.webhook.url"], + "examples": [ + "https://my.msteams.webhook.url" + ], "title": "MsTeams WebHook Url (INFO)", "type": "string" }, + "newTaskNameRegex": { + "$id": "#/properties/newTaskNameRegex", + "default": "", + "description": "If you define a regular expression, it will be used to validate the name of new User Stories.\nFor example, you can enforce a Jira number in the name with regex '^MYPROJECT-[0-9]+ .*'", + "examples": [ + "^[A-Z]+-[0-9]+ .*", + "^CLOUDITY-[0-9]+ .*", + "^MYPROJECT-[0-9]+ .*" + ], + "title": "User Story name validation regex", + "type": "string" + }, + "newTaskNameRegexExample": { + "$id": "#/properties/newTaskNameRegexExample", + "default": "", + "description": "If you activated User Story name validation via RegEx, define an example value that will be displayed to users.\nExample: 'MYPROJECT-168 Update account status validation rule'", + "examples": [ + "MYPROJECT-123 Update account status validation rule" + ], + "title": "Example string for User Story name validation regex", + "type": "string" + }, "notificationsDisable": { "$id": "#/properties/notificationsDisable", "description": "List of notifications types to skip sending", - "examples": [["METADATA_STATUS", "UNUSED_METADATAS"]], + "examples": [ + [ + "METADATA_STATUS", + "UNUSED_METADATAS" + ] + ], "items": { "$ref": "#/definitions/enum_notification_types" }, "title": "Disabled notification types", "type": "array" }, + "packageNoOverwritePath": { + "$id": "#/properties/packageNoOverwritePath", + "default": "", + "description": "By default, manifest/package-no-overwrite.xml is used, but you could decide to use a different file for specific major branches.\nIn that case, set the path to a custom package-no-overwrite XML file in a branch-scoped sfdx-hardis configuration file.", + "examples": [ + "manifest/package-no-overwrite-main.xml", + "manifest/package-no-overwrite-custom.xml" + ], + "title": "Branch-scoped custom Package-No-Overwrite path", + "type": "string" + }, "poolConfig": { "$id": "#/properties/poolConfig", "description": "Configuration allowing to generate and fetch scratch orgs from scratch org pool", @@ -955,7 +1309,11 @@ "$id": "#/properties/productionBranch", "default": "", "description": "Name of the git branch corresponding to production environment", - "examples": ["master", "main", "production"], + "examples": [ + "master", + "main", + "production" + ], "title": "Production branch name", "type": "string" }, @@ -963,15 +1321,78 @@ "$id": "#/properties/projectName", "default": "", "description": "Identifier for the project (can be the client and project)", - "examples": ["MyClientMyProject", "GoogleGmail", "AppleIWatch"], + "examples": [ + "MyClientMyProject", + "GoogleGmail", + "AppleIWatch" + ], "title": "Project Name", "type": "string" }, + "refreshSandboxConfig": { + "$id": "#/properties/refreshSandboxConfig", + "description": "Configuration for sandbox refresh. Will be used by command hardis:org:refresh:before-refresh and hardis:org:refresh:after-refresh", + "properties": { + "connectedApps": { + "$id": "#/properties/refreshSandboxConfig/connectedApps", + "description": "List of connected apps to download before refresh and to upload after refresh", + "examples": [ + [ + "My_Connected_App_1", + "My_Connected_App_2" + ] + ], + "items": { + "type": "string" + }, + "title": "Connected Apps", + "type": "array" + }, + "customSettings": { + "$id": "#/properties/refreshSandboxConfig/customSettings", + "description": "List of Custom Settings to download before refresh and to upload after refresh", + "examples": [ + [ + "MyCustomSetting1__c", + "MyCustomSetting2__c" + ] + ], + "items": { + "type": "string" + }, + "title": "Custom Settings", + "type": "array" + }, + "dataWorkspaces": { + "$id": "#/properties/refreshSandboxConfig/dataWorkspaces", + "description": "List of data workspaces to download before refresh and to upload after refresh", + "examples": [ + [ + "scripts/data/AnonymizeAccounts", + "scripts/data/AnonymizeContacts", + "scripts/data/AnonymizeLeads" + ] + ], + "items": { + "type": "string" + }, + "title": "Data Workspaces", + "type": "array" + } + }, + "additionalProperties": false, + "title": "Refresh Sandbox Configuration", + "type": "object" + }, "retrofitBranch": { "$id": "#/properties/retrofitBranch", "default": "", "description": "Name of the git branch where retrofit merge requests targets to", - "examples": ["preprod", "dev", "maintenance"], + "examples": [ + "preprod", + "dev", + "maintenance" + ], "title": "Retrofit branch name", "type": "string" }, @@ -994,14 +1415,22 @@ "$id": "#/properties/runtests", "default": "", "description": "WARNING: Use with caution, only in branch scoped config ! Can be a list of test classes if testLevel=RunSpecifiedTests, or a regex if testLevel=RunRepositoryTests", - "examples": ["MyTestClass1,MyTestClass2", "^(?!FLI|fli|BatchableCodeSolvaTest|BatchableRemoveCodeSolvaTest|HelperNovaxelApiTest).*"], + "examples": [ + "MyTestClass1,MyTestClass2", + "^(?!FLI|fli|BatchableCodeSolvaTest|BatchableRemoveCodeSolvaTest|HelperNovaxelApiTest).*" + ], "title": "Selected tests to run (list or regex)", "type": "string" }, "scratchOrgInitApexScripts": { "$id": "#/properties/scratchOrgInitApexScripts", - "description": "Apex scripts to call after scratch org initialization", - "examples": [["scripts/apex/init-scratch.apex", "scripts/apex/init-custom-settings.apex"]], + "description": "Apex scripts to call after scratch org initialization. Example: scripts/apex/init-scratch.apex", + "examples": [ + [ + "scripts/apex/init-scratch.apex", + "scripts/apex/init-custom-settings.apex" + ] + ], "items": { "type": "string" }, @@ -1021,7 +1450,9 @@ "$id": "#/properties/sfdmuCanModify", "default": "", "description": "Instance host name to allow SFDMU to deploy data in a production org", - "examples": ["myproject.force.com"], + "examples": [ + "myproject.force.com" + ], "title": "SFDMU can modify", "type": "string" }, @@ -1035,7 +1466,12 @@ "skipMinimizeProfiles": { "$id": "#/properties/skipMinimizeProfiles", "description": "These profiles will not be reformatted by command hardis:project:clean:minimizeprofiles", - "examples": [["MyClient Customer Community Login User", "MyClientPortail Profile"]], + "examples": [ + [ + "MyClient Customer Community Login User", + "MyClientPortail Profile" + ] + ], "items": { "type": "string" }, @@ -1056,11 +1492,22 @@ "title": "Skip update .gitignore file", "type": "boolean" }, + "sharedDevSandboxes": { + "$id": "#/properties/sharedDevSandboxes", + "default": false, + "description": "Set to true if contributors can share dev sandboxes\nIf active, contributors will never be asked to refresh their sandbox metadata, to avoid to overwrite by accident their colleagues work :)", + "title": "Contributors can share Dev Sandboxes", + "type": "boolean" + }, "targetUsername": { "$id": "#/properties/targetUsername", "default": "", "description": "Salesforce username used by CI for deployment or backups", - "examples": ["deployments@myclient.com", "deployments@google.fr", "deployments@apple.com"], + "examples": [ + "deployments@myclient.com", + "deployments@google.fr", + "deployments@apple.com" + ], "title": "Target Username", "type": "string" }, @@ -1075,27 +1522,67 @@ "$id": "#/properties/testLevel", "description": "WARNING: Use with caution, only in branch scoped config ! You can override default test level for deployments for special use cases, for example when you have SeeAllData=true you can use RunRepositoryTests associated with a regex in runtests option", "default": "RunLocalTests", - "enum": ["NoTestRun", "RunSpecifiedTests", "RunRepositoryTests", "RunRepositoryTestsExceptSeeAllData", "RunLocalTests", "RunAllTestsInOrg"], - "examples": ["RunRepositoryTests", "RunSpecifiedTests"], + "enum": [ + "NoTestRun", + "RunSpecifiedTests", + "RunRepositoryTests", + "RunRepositoryTestsExceptSeeAllData", + "RunLocalTests", + "RunAllTestsInOrg" + ], + "enumNames": [ + "No Test Run", + "Run Specified Tests", + "Run Repository Tests", + "Run Repository Tests Except SeeAllData", + "Run Local Tests", + "Run All Tests In Org" + ], + "examples": [ + "RunRepositoryTests", + "RunSpecifiedTests" + ], "title": "Test level for deployments", "type": "string" }, "useDeltaDeployment": { "$id": "#/properties/useDeltaDeployment", "default": false, - "description": "Defines if sfdx-hardis will deploy in delta from minor to major branches", + "description": "Defines if sfdx-hardis will deploy in delta from minor to major branches.\nWhen active, Delta Deployments allow to deploy only the metadatas in the branch / User Story, and not the full sources of the SFDX project.\nNote: Even if activated, Delta Deployments will be applied only for Pull Requests from minor (features,hotfixes) to major branches (integration,preprod).", + "docUrl": "https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-delta-deployment/#delta-mode", "title": "Use Delta Deployment", "type": "boolean" }, + "useDeltaDeploymentWithDependencies": { + "$id": "#/properties/useDeltaDeploymentWithDependencies", + "default": false, + "description": "Also deploy dependencies of the metadatas identified by delta deployment, to avoid broken deployments due to missing dependencies.\n Example: removed picklist value in a field, that is still used in a record type.", + "docUrl": "https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-delta-deployment/#delta-with-dependencies-beta", + "title": "Use Delta Deployment with dependencies", + "type": "boolean" + }, + "useSmartDeploymentTests": { + "$id": "#/properties/useSmartDeploymentTests", + "default": false, + "description": "Define if Smart Deployment Tests will be activated and run Apex test classes only if metadata that can impact them are present in the branch / User Story.\nNote: Smart Deployment Tests will be applied only for Pull Requests from minor (features,hotfixes) to major branches (integration,preprod).", + "docUrl": "https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/#smart-deployments-tests", + "title": "Use Smart Deployment Tests", + "type": "boolean" + }, "linterIgnoreRightMetadataFile": { "$id": "#/properties/linterIgnoreRightMetadataFile", "default": "", "description": "Ignore profiles or permission sets", - "examples": ["Profile", "Profile:ProfileA", "PermissionSet", "PermissionSet:PermissionSetA, Profile:ProfileA"], + "examples": [ + "Profile", + "Profile:ProfileA", + "PermissionSet", + "PermissionSet:PermissionSetA, Profile:ProfileA" + ], "title": "Linter ignore permission set or/and profile", "type": "string" } }, "title": "sfdx-hardis configuration", "type": "object" -} +} \ No newline at end of file diff --git a/config/sfdx-hardis.mega-linter-config.yml b/config/sfdx-hardis.mega-linter-config.yml index 0fa4ed86c..85327a31a 100644 --- a/config/sfdx-hardis.mega-linter-config.yml +++ b/config/sfdx-hardis.mega-linter-config.yml @@ -5,8 +5,8 @@ # EXTENDS: # - https://raw.githubusercontent.com/hardisgroupcom/sfdx-hardis/main/config/sfdx-hardis.mega-linter-config.yml -APPLY_FIXES: all # all, none, or list of linter keys -DEFAULT_BRANCH: master # Usually master or main +APPLY_FIXES: none # all, none, or list of linter keys +DEFAULT_BRANCH: main # Usually master or main DISABLE: - HTML - SPELL # Uncomment to enable checks of spelling mistakes # - SPELL # Uncomment to disable checks of spelling mistakes @@ -19,3 +19,6 @@ PRE_COMMANDS: SALESFORCE_SFDX_SCANNER_APEX_ARGUMENTS: - --pmdconfig - ./config/pmd-ruleset.xml +BASH_FILTER_REGEX_EXCLUDE: (pre-commit) +MARKDOWN_MARKDOWN_LINK_CHECK_DISABLE_ERRORS: true +YAML_YAMLLINT_DISABLE_ERRORS: true diff --git a/defaults/ci/.github/workflows/check-deploy.yml b/defaults/ci/.github/workflows/check-deploy.yml index 22b630ba3..b3e942480 100644 --- a/defaults/ci/.github/workflows/check-deploy.yml +++ b/defaults/ci/.github/workflows/check-deploy.yml @@ -37,7 +37,7 @@ jobs: fetch-depth: 0 # Fetch all branches # Setup node - name: Setup Node - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: "20" # SFDX & plugins @@ -45,9 +45,8 @@ jobs: run: | npm install --no-cache @salesforce/cli --global sf plugins install @salesforce/plugin-packaging - echo 'y' | sfdx plugins:install sfdx-hardis - echo 'y' | sfdx plugins:install sfdx-essentials - echo 'y' | sfdx plugins:install sfdx-git-delta + echo 'y' | sf plugins install sfdx-hardis + echo 'y' | sf plugins install sfdx-git-delta sf version --verbose --json # Login & check deploy with test classes & code coverage - name: Login & Simulate deployment @@ -60,6 +59,7 @@ jobs: SFDX_CLIENT_KEY_PREPROD: ${{ secrets.SFDX_CLIENT_KEY_PREPROD}} SFDX_CLIENT_ID_MAIN: ${{ secrets.SFDX_CLIENT_ID_MAIN}} SFDX_CLIENT_KEY_MAIN: ${{ secrets.SFDX_CLIENT_KEY_MAIN}} + SFDX_AUTH_URL_TECHNICAL_ORG: ${{ SFDX_AUTH_URL_TECHNICAL_ORG }} SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary CI_COMMIT_REF_NAME: ${{ github.event.pull_request.base.ref }} # Defines the target branch of the PR @@ -77,7 +77,8 @@ jobs: JIRA_TICKET_REGEX: ${{ secrets.JIRA_TICKET_REGEX }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} FORCE_COLOR: "1" + SFDX_DISABLE_FLOW_DIFF: false # Set to true to disable Flow doc during CI/CD run: | echo "Simulate SFDX deployment using Hardis against \"$CONFIG_BRANCH\"" - sfdx hardis:auth:login - sfdx hardis:project:deploy:sources:dx --check + sf hardis:auth:login + sf hardis:project:deploy:smart --check diff --git a/defaults/ci/.github/workflows/megalinter.yml b/defaults/ci/.github/workflows/megalinter.yml index 596637d5d..e617aa28c 100644 --- a/defaults/ci/.github/workflows/megalinter.yml +++ b/defaults/ci/.github/workflows/megalinter.yml @@ -52,9 +52,10 @@ jobs: # Upload Mega-Linter artifacts - name: Archive production artifacts if: success() || failure() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Mega-Linter reports + include-hidden-files: "true" path: | megalinter-reports mega-linter.log diff --git a/defaults/ci/.github/workflows/process-deploy.yml b/defaults/ci/.github/workflows/process-deploy.yml index 12e18bf40..eb280b0da 100644 --- a/defaults/ci/.github/workflows/process-deploy.yml +++ b/defaults/ci/.github/workflows/process-deploy.yml @@ -32,7 +32,7 @@ jobs: fetch-depth: 0 # Setup node - name: Setup Node - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: "20" # SFDX & plugins @@ -40,10 +40,9 @@ jobs: run: | npm install @salesforce/cli --global sf plugins install @salesforce/plugin-packaging - echo 'y' | sfdx plugins:install sfdx-hardis - echo 'y' | sfdx plugins:install sfdx-essentials - # echo 'y' | sfdx plugins:install sfdmu # Disabled while it does not play well with @salesforce/cli - echo 'y' | sfdx plugins:install sfdx-git-delta + echo 'y' | sf plugins install sfdx-hardis + # echo 'y' | sf plugins install sfdmu # Disabled while it does not play well with @salesforce/cli + echo 'y' | sf plugins install sfdx-git-delta sf version --verbose --json # Set env branch variable (github.ref_name seems to not work) - name: Set env.BRANCH @@ -59,6 +58,7 @@ jobs: SFDX_CLIENT_KEY_PREPROD: ${{ secrets.SFDX_CLIENT_KEY_PREPROD}} SFDX_CLIENT_ID_MAIN: ${{ secrets.SFDX_CLIENT_ID_MAIN}} SFDX_CLIENT_KEY_MAIN: ${{ secrets.SFDX_CLIENT_KEY_MAIN}} + SFDX_AUTH_URL_TECHNICAL_ORG: ${{ secrets.SFDX_AUTH_URL_TECHNICAL_ORG }} SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary CI_COMMIT_REF_NAME: ${{ env.BRANCH }} # Defines the target branch of the PR @@ -75,5 +75,5 @@ jobs: FORCE_COLOR: "1" run: | echo "Process SFDX deployment using Hardis against \"$CONFIG_BRANCH\"" - sfdx hardis:auth:login - sfdx hardis:project:deploy:sources:dx + sf hardis:auth:login + sf hardis:project:deploy:smart diff --git a/defaults/ci/.gitlab-ci.yml b/defaults/ci/.gitlab-ci.yml index f0d356f23..fa06f6c33 100644 --- a/defaults/ci/.gitlab-ci.yml +++ b/defaults/ci/.gitlab-ci.yml @@ -12,15 +12,15 @@ include: # Pipeline stages stages: - - build # Check code quality (+ create testing scratch org if necessary) - - test # Apex unit tests on testing scratch org (if used) - - clean # Delete testing scratch org (if used) - - check_deploy # Simulate deployment to target branch - - deploy # After a merge, automatically deploys the new commit state into the related Salesforce org + - build # Check code quality (+ create testing scratch org if necessary) + - test # Apex unit tests on testing scratch org (if used) + - clean # Delete testing scratch org (if used) + - check_deploy # Simulate deployment to target branch + - deploy # After a merge, automatically deploys the new commit state into the related Salesforce org # Jobs are run on sfdx-hardis image, that includes all required dependencies. # You can use latest, beta or latest-recommended -image: hardisgroupcom/sfdx-hardis:latest +image: hardisgroupcom/sfdx-hardis:latest # If rate limits reached, use ghcr.io/hardisgroupcom/sfdx-hardis:latest # Force color for output logs for better readability variables: @@ -81,10 +81,11 @@ check_deploy_to_target_branch_org: variables: CONFIG_BRANCH: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME ORG_ALIAS: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME + SFDX_DISABLE_FLOW_DIFF: false # Set to true to disable Flow doc during CI/CD setup script: - '[ -z "$CI_MERGE_REQUEST_TARGET_BRANCH_NAME" ] && exit 0;' # Skip this job if it is launched from web UI and we are not in merge request context - - sfdx hardis:auth:login - - sfdx hardis:project:deploy:sources:dx --check + - sf hardis:auth:login + - sf hardis:project:deploy:smart --check # Create scratch org to check the sources push & the unit tests create_scratch_org: @@ -101,14 +102,15 @@ create_scratch_org: - $USE_SCRATCH_ORGS == "false" interruptible: true script: - - sfdx hardis:auth:login --devhub - - sfdx hardis:scratch:create + - sf hardis:auth:login --devhub + - sf hardis:scratch:create artifacts: when: always expire_in: 60 minutes paths: - .cache/sfdx-hardis/.sfdx - .sfdx + - .sf - config/user # Refresh scratch org pool: IF you use scratch orgs, job to schedule with variable SCRATCH_ORG_POOL: true @@ -121,8 +123,8 @@ refresh_scratch_org_pool: - $SCRATCH_ORG_POOL == "true" interruptible: true script: - - sfdx hardis:auth:login --devhub - - sfdx hardis:scratch:pool:refresh + - sf hardis:auth:login --devhub + - sf hardis:scratch:pool:refresh artifacts: when: always @@ -145,8 +147,8 @@ test_apex: artifacts: true script: - sleep 120 # Orgs just created can be not ready yet, let's wait a while before running tests - - sfdx hardis:auth:login --scratchorg || true - - sfdx hardis:org:test:apex + - sf hardis:auth:login --scratchorg || true + - sf hardis:org:test:apex artifacts: when: always paths: @@ -165,7 +167,7 @@ test_apex: # - job: create_scratch_org # artifacts: true # script: -# - sfdx hardis:auth:login --scratchorg +# - sf hardis:auth:login --scratchorg # - echo "Automated tests not implemented yet" # Delete testing scratch org @@ -187,9 +189,9 @@ clean: - job: create_scratch_org - job: test_apex script: - - sfdx hardis:auth:login --devhub - - sfdx hardis:auth:login --scratchorg || true - - sfdx force:org:delete --noprompt || true + - sf hardis:auth:login --devhub + - sf hardis:auth:login --scratchorg || true + - sf org delete scratch --no-prompt || true # Simulate deployment to related org # Is triggered only when scheduled, or via manual launch @@ -206,8 +208,8 @@ check_deploy_to_current_branch_org: - $SCRATCH_ORG_POOL == "true" interruptible: true script: - - sfdx hardis:auth:login - - sfdx hardis:project:deploy:sources:dx --check + - sf hardis:auth:login + - sf hardis:project:deploy:smart --check # Deploy to branch related org when detecting new commit (after a merged merge request) # Don't forget to define variable DEPLOY_BRANCHES to match your branches in .gitlab-ci-config.yml @@ -224,5 +226,5 @@ deploy_to_org: - $SCRATCH_ORG_POOL == "true" interruptible: true script: - - sfdx hardis:auth:login - - sfdx hardis:project:deploy:sources:dx + - sf hardis:auth:login + - sf hardis:project:deploy:smart diff --git a/defaults/ci/Jenkinsfile b/defaults/ci/Jenkinsfile index ef4270ce5..64a2c10d9 100644 --- a/defaults/ci/Jenkinsfile +++ b/defaults/ci/Jenkinsfile @@ -15,6 +15,7 @@ pipeline { SFDX_CLIENT_KEY_PREPROD = credentials('SFDX_CLIENT_KEY_PREPROD') //Example SFDX_CLIENT_ID_MAIN = credentials('SFDX_CLIENT_ID_MAIN') //Example SFDX_CLIENT_KEY_MAIN = credentials('SFDX_CLIENT_KEY_MAIN') //Example + SFDX_AUTH_URL_TECHNICAL_ORG = credentials('SFDX_AUTH_URL_TECHNICAL_ORG') SLACK_TOKEN = credentials('SLACK_TOKEN') // Remove if not used SLACK_CHANNEL_ID = credentials('SLACK_CHANNEL_ID') // Remove if not used NOTIF_EMAIL_ADDRESS = credentials('NOTIF_EMAIL_ADDRESS') // Remove if not used @@ -25,6 +26,7 @@ pipeline { CONFIG_BRANCH = "${GIT_BRANCH}" CI_COMMIT_REF_NAME = "${GIT_BRANCH}" ORG_ALIAS = "${GIT_BRANCH}" + SFDX_DISABLE_FLOW_DIFF = 'false' // Set to true to disable Flow doc during CI/CD setup } //Stage of the job @@ -52,15 +54,16 @@ pipeline { stage('Validation') { agent { docker { - image 'hardisgroupcom/sfdx-hardis:latest' + // If rate limits reached, use ghcr.io/hardisgroupcom/sfdx-hardis:latest + image 'hardisgroupcom/sfdx-hardis:latest' } } when { changeRequest() } //Validation on the appropriate org steps { script { - sh 'sfdx hardis:auth:login' - sh 'sfdx hardis:project:deploy:sources:dx --check' + sh 'sf hardis:auth:login' + sh 'sf hardis:project:deploy:smart --check' } } post { @@ -72,26 +75,27 @@ pipeline { stage('Deployment') { agent { docker { + // If rate limits reached, use ghcr.io/hardisgroupcom/sfdx-hardis:latest image 'hardisgroupcom/sfdx-hardis:latest' } } //MANUAL: add your major branch if necessary when { - allOf{ + allOf { anyOf { - branch: 'integration'; //Example - branch: 'uat'; //Example - branch: 'preprod'; //Example + branch: 'integration' //Example + branch: 'uat' //Example + branch: 'preprod' //Example branch: 'main' //Example - }; - not {changeRequest()} + }; + not { changeRequest() } } } //deploy on the appropriate org steps { script { - sh 'sfdx hardis:auth:login' - sh 'sfdx hardis:project:deploy:sources:dx' + sh 'sf hardis:auth:login' + sh 'sf hardis:project:deploy:smart' } } post { diff --git a/defaults/ci/azure-pipelines-checks.yml b/defaults/ci/azure-pipelines-checks.yml index a8972638d..0fa7a43ca 100644 --- a/defaults/ci/azure-pipelines-checks.yml +++ b/defaults/ci/azure-pipelines-checks.yml @@ -19,7 +19,7 @@ # - Tick "Override the YAML continuous integration trigger from here" and select "Disable continuous integration" # - Now go to Repos -> Branches and create a "Branch policy" for each of your major branches -# - In Build Validation, click + and then select your Build pipeline (even if it is prefilled, it's buggy and you +# - In Build Validation, click + and then select your Build pipeline (even if it is prefilled, it's buggy and you # need to manually select it again) and keep the default settings for the other options then save. # This causes the pipeline to be triggered on Pull Requests even if we previously disabled it. @@ -33,14 +33,12 @@ variables: value: $[replace(variables['System.PullRequest.TargetBranch'], 'refs/heads/', '')] jobs: - # Simulate SFDX deployment - job: DeploymentCheck timeoutInMinutes: 150 pool: - vmImage: ubuntu-20.04 + vmImage: ubuntu-latest steps: - # Checkout repo - checkout: self fetchDepth: 0 @@ -52,22 +50,20 @@ jobs: inputs: version: ">=20.0.0" displayName: "Use Node.js" - # Install SFDX & Dependencies - script: | npm install @salesforce/cli --global sf plugins install @salesforce/plugin-packaging - echo 'y' | sfdx plugins:install sfdx-hardis - echo 'y' | sfdx plugins:install sfdx-essentials - echo 'y' | sfdx plugins:install sfdx-git-delta + echo 'y' | sf plugins install sfdx-hardis + echo 'y' | sf plugins install sfdx-git-delta sf version --verbose --json displayName: "Install SFDX & plugins" - + # Login & check deployment to PR target branch related org (configuration: https://hardisgroupcom.github.io/sfdx-hardis/salesforce-ci-cd-setup-auth/ ) - script: | - sfdx hardis:auth:login - sfdx hardis:project:deploy:sources:dx --check + sf hardis:auth:login + sf hardis:project:deploy:smart --check env: SFDX_CLIENT_ID_INTEGRATION: $(SFDX_CLIENT_ID_INTEGRATION) SFDX_CLIENT_KEY_INTEGRATION: $(SFDX_CLIENT_KEY_INTEGRATION) @@ -87,6 +83,7 @@ jobs: JIRA_PAT: $(JIRA_HOST) JIRA_TICKET_REGEX: $(JIRA_TICKET_REGEX) OPENAI_API_KEY: $(OPENAI_API_KEY) + SFDX_AUTH_URL_TECHNICAL_ORG: $(SFDX_AUTH_URL_TECHNICAL_ORG) SYSTEM_ACCESSTOKEN: $(System.AccessToken) CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) SYSTEM_COLLECTIONURI: $(System.CollectionUri) @@ -98,6 +95,7 @@ jobs: BUILD_REPOSITORYNAME: $(Build.Repository.Name) BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) BUILD_BUILD_ID: $(Build.BuildId) + SFDX_DISABLE_FLOW_DIFF: false # Set to true to disable Flow doc during CI/CD setup displayName: "Simulate deploy to org" # Run MegaLinter to detect linting and security issues @@ -105,22 +103,22 @@ jobs: # Config is editable in .mega-linter.yml - job: MegaLinter pool: - vmImage: ubuntu-20.04 + vmImage: ubuntu-latest steps: - # Pull MegaLinter docker image - - script: docker pull oxsecurity/megalinter-salesforce:latest - displayName: Pull MegaLinter - # Run MegaLinter - - script: | - docker run -v $(System.DefaultWorkingDirectory):/tmp/lint \ - --env-file <(env | grep -e SYSTEM_ -e BUILD_ -e TF_ -e AGENT_) \ - -e CI=true \ - -e SYSTEM_ACCESSTOKEN=$(System.AccessToken) \ - -e GIT_AUTHORIZATION_BEARER=$(System.AccessToken) \ - oxsecurity/megalinter-salesforce:latest - displayName: Run MegaLinter - # Publish Megalinter reports - - publish: $(System.DefaultWorkingDirectory)/megalinter-reports/ - condition: succeededOrFailed() - artifact: megalinter-reports - displayName: Publish reports + # Pull MegaLinter docker image + - script: docker pull oxsecurity/megalinter-salesforce:latest + displayName: Pull MegaLinter + # Run MegaLinter + - script: | + docker run -v $(System.DefaultWorkingDirectory):/tmp/lint \ + --env-file <(env | grep -e SYSTEM_ -e BUILD_ -e TF_ -e AGENT_) \ + -e CI=true \ + -e SYSTEM_ACCESSTOKEN=$(System.AccessToken) \ + -e GIT_AUTHORIZATION_BEARER=$(System.AccessToken) \ + oxsecurity/megalinter-salesforce:latest + displayName: Run MegaLinter + # Publish Megalinter reports + - publish: $(System.DefaultWorkingDirectory)/megalinter-reports/ + condition: succeededOrFailed() + artifact: megalinter-reports + displayName: Publish reports diff --git a/defaults/ci/azure-pipelines-deployment.yml b/defaults/ci/azure-pipelines-deployment.yml index b98e814dd..681cc0f38 100644 --- a/defaults/ci/azure-pipelines-deployment.yml +++ b/defaults/ci/azure-pipelines-deployment.yml @@ -32,9 +32,8 @@ jobs: - job: Deployment timeoutInMinutes: 150 pool: - vmImage: ubuntu-20.04 + vmImage: ubuntu-latest steps: - # Checkout repo - checkout: self fetchDepth: 0 @@ -51,17 +50,16 @@ jobs: - script: | npm install @salesforce/cli --global sf plugins install @salesforce/plugin-packaging - echo 'y' | sfdx plugins:install sfdx-hardis - echo 'y' | sfdx plugins:install sfdx-essentials - echo 'y' | sfdx plugins:install sfdmu - echo 'y' | sfdx plugins:install sfdx-git-delta + echo 'y' | sf plugins install sfdx-hardis + echo 'y' | sf plugins install sfdmu + echo 'y' | sf plugins install sfdx-git-delta sf version --verbose --json displayName: "Install SFDX & plugins" # Login & Deploy sfdx sources to related org (configuration: https://hardisgroupcom.github.io/sfdx-hardis/salesforce-ci-cd-setup-auth/ ) - script: | - sfdx hardis:auth:login - sfdx hardis:project:deploy:sources:dx + sf hardis:auth:login + sf hardis:project:deploy:smart env: SFDX_CLIENT_ID_INTEGRATION: $(SFDX_CLIENT_ID_INTEGRATION) SFDX_CLIENT_KEY_INTEGRATION: $(SFDX_CLIENT_KEY_INTEGRATION) @@ -79,6 +77,7 @@ jobs: JIRA_PAT: $(JIRA_HOST) JIRA_TICKET_REGEX: $(JIRA_TICKET_REGEX) SFDX_DEPLOY_WAIT_MINUTES: 150 + SFDX_AUTH_URL_TECHNICAL_ORG: $(SFDX_AUTH_URL_TECHNICAL_ORG) CI: "true" SYSTEM_ACCESSTOKEN: $(System.AccessToken) CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) diff --git a/defaults/ci/bitbucket-pipelines.yml b/defaults/ci/bitbucket-pipelines.yml index 8214b5896..e02bf646f 100644 --- a/defaults/ci/bitbucket-pipelines.yml +++ b/defaults/ci/bitbucket-pipelines.yml @@ -1,5 +1,5 @@ # You might add new branch names in pipeline "branches" -image: node:18 +image: node:20 definitions: services: docker: @@ -10,48 +10,51 @@ pipelines: pull-requests: "**": - parallel: - # Run MegaLinter - - step: - name: Run MegaLinter - image: oxsecurity/megalinter-salesforce:latest - script: - - export DEFAULT_WORKSPACE=$BITBUCKET_CLONE_DIR && bash /entrypoint.sh - artifacts: - - megalinter-reports/** - # Simulate deployment - - step: - name: Simulate SFDX deployment - script: - - npm install --no-cache @salesforce/cli --global - - sf plugins install @salesforce/plugin-packaging - - echo 'y' | sfdx plugins:install sfdx-hardis - - echo 'y' | sfdx plugins:install sfdx-essentials - - echo 'y' | sfdx plugins:install sfdx-git-delta - - sf version --verbose --json - - export BRANCH_NAME=$(echo "$BITBUCKET_PR_DESTINATION_BRANCH" | sed 's/refs\/heads\///') - - export CI_COMMIT_REF_NAME=$BRANCH_NAME - - export ORG_ALIAS=$BRANCH_NAME - - export CONFIG_BRANCH=$BRANCH_NAME - - export FORCE_COLOR=1 - - sfdx hardis:auth:login - - sfdx hardis:project:deploy:sources:dx --check + # Run MegaLinter + - step: + name: Run MegaLinter + image: oxsecurity/megalinter-salesforce:latest + script: + - export DEFAULT_WORKSPACE=$BITBUCKET_CLONE_DIR && bash /entrypoint.sh + artifacts: + - megalinter-reports/** + # Simulate deployment + - step: + name: Simulate SFDX deployment + clone: + depth: full + script: + - npm install --no-cache @salesforce/cli --global + - sf plugins install @salesforce/plugin-packaging + - echo 'y' | sf plugins install sfdx-hardis + - echo 'y' | sf plugins install sfdx-git-delta + - sf version --verbose --json + - export BRANCH_NAME=$(echo "$BITBUCKET_PR_DESTINATION_BRANCH" | sed 's/refs\/heads\///') + - export CI_COMMIT_REF_NAME=$BRANCH_NAME + - export ORG_ALIAS=$BRANCH_NAME + - export CONFIG_BRANCH=$BRANCH_NAME + - export FORCE_COLOR=1 + - export SFDX_DISABLE_FLOW_DIFF=false # Set to true to disable Flow doc during CI/CD setup + - sf hardis:auth:login + - sf hardis:project:deploy:smart --check branches: # Add all your major branches here - '{integration,uat,preprod,production,main}': + "{integration,uat,preprod,production,main}": - step: name: Deploy to major org + clone: + depth: full script: - - npm install --no-cache @salesforce/cli --global - - sf plugins install @salesforce/plugin-packaging - - echo 'y' | sfdx plugins:install sfdx-hardis - - echo 'y' | sfdx plugins:install sfdx-essentials - - echo 'y' | sfdx plugins:install sfdx-git-delta - - sf version --verbose --json - - export BRANCH_NAME=$(echo "$BITBUCKET_BRANCH" | sed 's/refs\/heads\///') - - export CI_COMMIT_REF_NAME=$BRANCH_NAME - - export CONFIG_BRANCH=$BRANCH_NAME - - export ORG_ALIAS=$BRANCH_NAME - - export FORCE_COLOR=1 - - sfdx hardis:auth:login - - sfdx hardis:project:deploy:sources:dx + - npm install --no-cache @salesforce/cli --global + - sf plugins install @salesforce/plugin-packaging + - echo 'y' | sf plugins install sfdx-hardis + - echo 'y' | sf plugins install sfdx-git-delta + - sf version --verbose --json + - export BRANCH_NAME=$(echo "$BITBUCKET_BRANCH" | sed 's/refs\/heads\///') + - export CI_COMMIT_REF_NAME=$BRANCH_NAME + - export CONFIG_BRANCH=$BRANCH_NAME + - export ORG_ALIAS=$BRANCH_NAME + - export FORCE_COLOR=1 + - sf hardis:auth:login + - sf hardis:project:deploy:smart diff --git a/defaults/ci/manifest/package-no-overwrite.xml b/defaults/ci/manifest/package-no-overwrite.xml index c6ff34357..c9d85a75a 100644 --- a/defaults/ci/manifest/package-no-overwrite.xml +++ b/defaults/ci/manifest/package-no-overwrite.xml @@ -4,6 +4,11 @@ * ApprovalProcess + + + + * + Certificate @@ -15,6 +20,11 @@ * Dashboard + + + * + FlowDefinition + * diff --git a/defaults/lintonly/.gitlab-ci.yml b/defaults/lintonly/.gitlab-ci.yml index 3775d9da0..2c89cd0a9 100644 --- a/defaults/lintonly/.gitlab-ci.yml +++ b/defaults/lintonly/.gitlab-ci.yml @@ -7,7 +7,7 @@ stages: # On execute les jobs sur l'image hardisgroupcom/sfdx-hardis qui contient les applications necessaires # Version latest recommandée, cependant beta et alpha peuvent être utilisées pour les tests -image: hardisgroupcom/sfdx-hardis:latest +image: hardisgroupcom/sfdx-hardis:latest # If rate limits reached, use ghcr.io/hardisgroupcom/sfdx-hardis:latest # Variables globales aux jobs variables: diff --git a/defaults/mkdocs-project-doc/docs/javascripts/gtag.js b/defaults/mkdocs-project-doc/docs/javascripts/gtag.js new file mode 100644 index 000000000..4bb097e15 --- /dev/null +++ b/defaults/mkdocs-project-doc/docs/javascripts/gtag.js @@ -0,0 +1,27 @@ +/* +location$.subscribe(function(url) { + window.dataLayer = window.dataLayer || []; + function gtag() { + dataLayer.push(arguments); + } + gtag("js", new Date()); + + gtag("config", "G-3DM50255LC"); +}); +*/ +var gtag_id = "G-XXXXXXXXXX"; + +var script = document.createElement("script"); +script.src = "https://www.googletagmanager.com/gtag/js?id=" + gtag_id; +document.head.appendChild(script); + +location$.subscribe(function (url) { + window.dataLayer = window.dataLayer || []; + + function gtag() { + dataLayer.push(arguments); + } + + gtag("js", new Date()); + gtag("config", gtag_id); +}); diff --git a/defaults/mkdocs-project-doc/docs/javascripts/jstree-handler.js b/defaults/mkdocs-project-doc/docs/javascripts/jstree-handler.js new file mode 100644 index 000000000..3eca931d1 --- /dev/null +++ b/defaults/mkdocs-project-doc/docs/javascripts/jstree-handler.js @@ -0,0 +1,111 @@ + +/* requires the following configuration in mkdocs.yml +extra_javascript: + - https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.4/jquery.min.js + - https://cdnjs.cloudflare.com/ajax/libs/jstree/3.3.12/jstree.min.js + - javascripts/jstree-handler.js +extra_css: + - https://cdnjs.cloudflare.com/ajax/libs/jstree/3.3.12/themes/default/style.min.css + - https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.0/css/all.min.css +*/ + +function countJsTreeNodes(nodes) { + let count = 0; + + function traverse(nodeList) { + for (const node of nodeList) { + count++; + if (node.children && node.children.length > 0) { + traverse(node.children); + } + } + } + + traverse(nodes); + return count; +} + +document$.subscribe(async () => { + // Initialize jstree with some sample data + const container = $('#jstree-container'); + if (!container || container.length === 0) { + return; // Ensure the container exists before initializing + } + + const pathParts = window.location.pathname.split('/').filter(Boolean); + + let section = ""; + let name = ""; + if (pathParts.length < 2) { + section = "root"; + name = pathParts.slice(-1); + } + else { + [section, name] = pathParts.slice(-2); // Get last two segments + } + const jsonPath = `/json/${section}-${name}.json`; + + try { + const response = await fetch(jsonPath); + if (!response.ok) { + throw new Error(`HTTP error ! Status: ${response.status}`); + } + const jsonData = await response.json(); + const nodesNumber = countJsTreeNodes(jsonData); + container.jstree({ + 'core': { + 'data': jsonData + }, + "plugins": [ + "search" + ], + 'search': { + //'show_only_matches': true, + 'case_sensitive': false + } + }).on('ready.jstree', function () { + $('#jstree-container').on('click', '.jstree-anchor', function (e) { + // Prevent the default behavior of the click (which might conflict with jsTree's event) + e.preventDefault(); + // Get the node + const node = $(this).closest('li'); + // Check if it's a folder (node with children) and toggle + if (node.hasClass('jstree-open')) { + $('#jstree-container').jstree('close_node', node); + } else { + $('#jstree-container').jstree('open_node', node); + } + }); + // Open all if there are less than 30 nodes + if (nodesNumber < 20) { + $('#jstree-container').jstree('open_all'); + } + }); + + // Add search button + if (container?.[0]?.parentNode) { + // Create the input element + const searchInput = document.createElement('input'); + searchInput.type = 'text'; + searchInput.id = 'jstree-search'; + searchInput.placeholder = 'Input items to find'; + searchInput.style.marginBottom = '1em'; // Optional spacing + // Insert it before the jstree container + container[0].parentNode.insertBefore(searchInput, container[0]); + + // Perform search + let to = false; + $('#jstree-search').on('input', function () { + if (to) clearTimeout(to); + to = setTimeout(function () { + const searchValue = $('#jstree-search').val(); + $('#jstree-container').jstree(true).search(searchValue); + }, 250); // debounce for smoother UX + }); + } + + } catch (err) { + console.error('Failed to load JSON file:', jsonPath, err); + } + +}); \ No newline at end of file diff --git a/defaults/mkdocs-project-doc/docs/javascripts/tables.js b/defaults/mkdocs-project-doc/docs/javascripts/tables.js new file mode 100644 index 000000000..513f93ec9 --- /dev/null +++ b/defaults/mkdocs-project-doc/docs/javascripts/tables.js @@ -0,0 +1,6 @@ +document$.subscribe(function () { + var tables = document.querySelectorAll("article table") + tables.forEach(function (table) { + new Tablesort(table) + }) +}) diff --git a/defaults/mkdocs-project-doc/docs/json/index.md b/defaults/mkdocs-project-doc/docs/json/index.md new file mode 100644 index 000000000..f875b6f79 --- /dev/null +++ b/defaults/mkdocs-project-doc/docs/json/index.md @@ -0,0 +1 @@ +Json files used for advanced display like jsTree \ No newline at end of file diff --git a/defaults/mkdocs-project-doc/docs/overrides/main.html b/defaults/mkdocs-project-doc/docs/overrides/main.html new file mode 100644 index 000000000..3cf4af8cb --- /dev/null +++ b/defaults/mkdocs-project-doc/docs/overrides/main.html @@ -0,0 +1,4 @@ +{% extends "base.html" %} + + + diff --git a/defaults/mkdocs-project-doc/docs/stylesheets/extra.css b/defaults/mkdocs-project-doc/docs/stylesheets/extra.css new file mode 100644 index 000000000..b01f21eed --- /dev/null +++ b/defaults/mkdocs-project-doc/docs/stylesheets/extra.css @@ -0,0 +1,33 @@ +/* stylelint-disable SelectorFormat */ +.md-typeset__table { + min-width: 100%; +} + +.md-typeset table:not([class]) { + display: table; +} + +/* light mode table header bgcolor */ +.md-typeset__table th { + background-color: #f2edfe; +} + +/* dark mode table header bgcolor */ +[data-md-color-scheme="slate"] .md-typeset__table th { + background-color: hsla(var(--md-hue), 25%, 25%, 1) +} + +/* light mode alternating table bg colors */ +.md-typeset__table tr:nth-child(2n) { + background-color: #f8f8f8; +} + +/* dark mode alternating table bg colors */ +[data-md-color-scheme="slate"] .md-typeset__table tr:nth-child(2n) { + background-color: hsla(var(--md-hue), 25%, 25%, 1) +} + +:root>* { + --md-primary-fg-color: #1B1464; + --md-footer-bg-color: #1B1464; +} \ No newline at end of file diff --git a/defaults/mkdocs-project-doc/docs/stylesheets/jstree-custom.css b/defaults/mkdocs-project-doc/docs/stylesheets/jstree-custom.css new file mode 100644 index 000000000..e31c2ac54 --- /dev/null +++ b/defaults/mkdocs-project-doc/docs/stylesheets/jstree-custom.css @@ -0,0 +1,15 @@ +.icon-warning { + color: orange; +} + +.icon-success { + color: green; +} + +.icon-error { + color: red; +} + +.icon-blue { + color: #00A1E0; +} \ No newline at end of file diff --git a/defaults/mkdocs-project-doc/mkdocs.yml b/defaults/mkdocs-project-doc/mkdocs.yml new file mode 100644 index 000000000..0934b9464 --- /dev/null +++ b/defaults/mkdocs-project-doc/mkdocs.yml @@ -0,0 +1,68 @@ +site_name: Salesforce Project Documentation +site_url: https://sfdx-hardis.cloudity.com +repo_url: https://github.com/hardisgroupco/sfdx-hardis +edit_uri: tree/master/docs +site_author: Nicolas Vuillamy +site_description: Salesforce project documentation generated by sfdx-hardis +copyright: Doc generated by sfdx-hardis, provided by Cloudity +theme: + name: material + icon: + logo: material/infinity + # logo: assets/images/cloudity-text-logo.svg + # favicon: assets/images/cloudity-logo.svg + custom_dir: docs/overrides + features: + - navigation.instant + - navigation.footer + palette: + primary: indigo +plugins: + - search + - exclude-search: + exclude: + - index.md + - cache-ai-results/*.md + - "*package.xml.md" + - package-*items.xml.md +markdown_extensions: + - pymdownx.emoji: + emoji_index: !!python/name:materialx.emoji.twemoji + emoji_generator: !!python/name:materialx.emoji.to_svg + - pymdownx.snippets: + base_path: docs + check_paths: true + restrict_base_path: false + - mdx_truly_sane_lists + - attr_list + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true +extra_javascript: + - https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.4/jquery.min.js + - https://cdnjs.cloudflare.com/ajax/libs/jstree/3.3.12/jstree.min.js + - https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/tablesort.min.js + - javascripts/tables.js + - javascripts/gtag.js + - javascripts/jstree-handler.js +extra_css: + - stylesheets/extra.css + - https://cdnjs.cloudflare.com/ajax/libs/jstree/3.3.12/themes/default/style.min.css +extra: + social: + - icon: fontawesome/regular/circle-question + link: https://github.com/hardisgroupcom/sfdx-hardis/issues + name: Need help? Post an issue :) + - icon: fontawesome/brands/github + link: https://github.com/hardisgroupcom/sfdx-hardis + - icon: fontawesome/solid/infinity + link: https://cloudity.com/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/NicolasVuillamy + generator: false +nav: + - "Home": "index.md" diff --git a/defaults/mkdocs/.github/workflows/build-deploy-docs.yml b/defaults/mkdocs/.github/workflows/build-deploy-docs.yml index a4ecece0f..2823163bf 100644 --- a/defaults/mkdocs/.github/workflows/build-deploy-docs.yml +++ b/defaults/mkdocs/.github/workflows/build-deploy-docs.yml @@ -12,16 +12,16 @@ jobs: steps: - uses: actions/checkout@v4 # Build doc with sfdx-hardis - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: "20.x" - run: yarn - run: yarn prepack - run: npm i @salesforce/cli -g - - run: echo y|sfdx plugins:install sfdx-hardis - - run: sfdx hardis:doc:plugin:generate + - run: echo y|sf plugins install sfdx-hardis + - run: sf hardis:doc:plugin:generate # Deploy docs with mkdocs-material - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v5 with: python-version: 3.x - run: pip install mkdocs-material mdx_truly_sane_lists json-schema-for-humans diff --git a/defaults/mkdocs/docs/stylesheets/extra.css b/defaults/mkdocs/docs/stylesheets/extra.css index 8503fe4dd..b01f21eed 100644 --- a/defaults/mkdocs/docs/stylesheets/extra.css +++ b/defaults/mkdocs/docs/stylesheets/extra.css @@ -6,3 +6,28 @@ .md-typeset table:not([class]) { display: table; } + +/* light mode table header bgcolor */ +.md-typeset__table th { + background-color: #f2edfe; +} + +/* dark mode table header bgcolor */ +[data-md-color-scheme="slate"] .md-typeset__table th { + background-color: hsla(var(--md-hue), 25%, 25%, 1) +} + +/* light mode alternating table bg colors */ +.md-typeset__table tr:nth-child(2n) { + background-color: #f8f8f8; +} + +/* dark mode alternating table bg colors */ +[data-md-color-scheme="slate"] .md-typeset__table tr:nth-child(2n) { + background-color: hsla(var(--md-hue), 25%, 25%, 1) +} + +:root>* { + --md-primary-fg-color: #1B1464; + --md-footer-bg-color: #1B1464; +} \ No newline at end of file diff --git a/defaults/mkdocs/mkdocs.yml b/defaults/mkdocs/mkdocs.yml index 3915d17b2..661e26d4c 100644 --- a/defaults/mkdocs/mkdocs.yml +++ b/defaults/mkdocs/mkdocs.yml @@ -9,11 +9,16 @@ theme: name: material custom_dir: docs/overrides features: + - content.code.copy - navigation.instant -# logo: assets/images/salesforce-logo.png -# favicon: assets/images/salesforce-icon.png + - navigation.footer + # logo: assets/images/salesforce-logo.png + # favicon: assets/images/salesforce-icon.png palette: primary: "light blue" +plugins: + - glightbox + - search markdown_extensions: - pymdownx.emoji: emoji_index: !!python/name:materialx.emoji.twemoji @@ -21,8 +26,16 @@ markdown_extensions: - pymdownx.snippets: base_path: docs check_paths: true + restrict_base_path: false - mdx_truly_sane_lists - attr_list + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true extra_javascript: - https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/tablesort.min.js - javascripts/tables.js diff --git a/defaults/monitoring/.github/workflows/org-monitoring.yml b/defaults/monitoring/.github/workflows/org-monitoring.yml index 0dc2a457f..0e164742b 100644 --- a/defaults/monitoring/.github/workflows/org-monitoring.yml +++ b/defaults/monitoring/.github/workflows/org-monitoring.yml @@ -10,7 +10,7 @@ # - Do a CTRL+F and look for "MANUAL" # - Add your monitored git branches here where asked to replace # - Add your authentication variable names where asked to replace -# - Commit & push: there should be a SINGLE GitHub Actions job (using matrix) that will run the monitoring on all orgs +# - Commit & push: there should be a SINGLE GitHub Actions job (using matrix) that will run the monitoring on all orgs # You may also: # - Update manifest/package-skip-items.xml to filter elements to retrieve (must be done directly in monitored branches) @@ -18,13 +18,11 @@ # Doc & support: https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/ on: - push: # Automatically run every day at midnight schedule: - - cron: '0 0 * * *' # Cron format -> https://crontab.cronhub.io/ + - cron: "0 0 * * *" # Cron format -> https://crontab.cronhub.io/ workflow_dispatch: - name: Org Monitoring sfdx-hardis # concurrency: @@ -41,9 +39,10 @@ jobs: runs-on: ubuntu-latest name: Backup metadatas permissions: write-all + timeout-minutes: 360 strategy: fail-fast: false - max-parallel: 10 + max-parallel: 10 # Set to 1 if you have issues with command npm install --no-cache @salesforce/cli --global matrix: # MANUAL: Add your monitored git branches here branch: @@ -52,74 +51,80 @@ jobs: - monitoring_myclient__recette_sandbox - monitoring_myclient__preprod_sandbox steps: - # Checkout repo - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Faster code checkout fetching only latest commit - ref: ${{ matrix.branch }} - persist-credentials: true - # Setup node - - name: Setup Node - uses: actions/setup-node@v3 - with: - node-version: "20" - # SFDX & plugins - - name: Install SFDX and plugins - run: | - npm install --no-cache @salesforce/cli --global - sf plugins install --force @salesforce/plugin-packaging - echo 'y' | sfdx plugins:install --force sfdx-hardis - echo 'y' | sfdx plugins:install --force sfdx-essentials - echo 'y' | sfdx plugins:install --force sfdx-git-delta - sf version --verbose --json - # Login & check deploy with test classes & code coverage - - name: Login & Retrieve Metadata - env: - # MANUAL: Update variables below to add authentication variables you need - SFDX_CLIENT_ID_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT}} - SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX}} - SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX}} - SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary - SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary - CI_COMMIT_REF_NAME: ${{ matrix.branch }} - ORG_ALIAS: ${{ matrix.branch }} - CONFIG_BRANCH: ${{ matrix.branch }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} - NOTIF_EMAIL_ADDRESS: ${{ secrets.NOTIF_EMAIL_ADDRESS }} - FORCE_COLOR: "1" - NOTIF_API_URL: ${{ secrets.NOTIF_API_URL }} - NOTIF_API_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} - NOTIF_API_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} - NOTIF_API_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} - NOTIF_API_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} - NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} + # Checkout repo + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Faster code checkout fetching only latest commit + ref: ${{ matrix.branch }} + persist-credentials: true + # Setup node + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + # SFDX & plugins + - name: Install SFDX and plugins + run: | + npm install --no-cache @salesforce/cli --global + sf plugins install --force @salesforce/plugin-packaging + echo 'y' | sf plugins install --force sfdx-hardis + echo 'y' | sf plugins install --force sfdx-git-delta + sf version --verbose --json + # Login & check deploy with test classes & code coverage + - name: Login & Retrieve Metadata + env: + # MANUAL: Update variables below to add authentication variables you need + SFDX_CLIENT_ID_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT}} + SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX}} + SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX}} + SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary + SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary + CI_COMMIT_REF_NAME: ${{ matrix.branch }} + ORG_ALIAS: ${{ matrix.branch }} + CONFIG_BRANCH: ${{ matrix.branch }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} + SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} + NOTIF_EMAIL_ADDRESS: ${{ secrets.NOTIF_EMAIL_ADDRESS }} + FORCE_COLOR: "1" + NOTIF_API_URL: ${{ secrets.NOTIF_API_URL }} + NOTIF_API_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} + NOTIF_API_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} + NOTIF_API_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} + NOTIF_API_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} + NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} + CLOUDFLARE_EMAIL: ${{ secrets.CLOUDFLARE_EMAIL }} + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + CLOUDFLARE_PROJECT_NAME: ${{ secrets.CLOUDFLARE_PROJECT_NAME }} + CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE: ${{ secrets.CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE }} + CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN: ${{ secrets.CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN }} + CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST: ${{ secrets.CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST }} - run: | - echo "Monitoring sfdx-hardis: Metadata Backup for \"$CONFIG_BRANCH\"" - sfdx hardis:auth:login - sfdx hardis:org:monitor:backup + run: | + echo "Monitoring sfdx-hardis: Metadata Backup for \"$CONFIG_BRANCH\"" + sf hardis:auth:login + sf hardis:org:monitor:backup - # Push new commit if applicable - # (for now works only on PR from same repository, not from forks) - - name: Prepare commit - run: chown -Rc $UID .git/ + # Push new commit if applicable + # (for now works only on PR from same repository, not from forks) + - name: Prepare commit + run: chown -Rc $UID .git/ - - name: Get current date - run: echo "BUILD_DATE=$(date -u +'%Y-%m-%d %H:%M')" >> ${GITHUB_ENV} + - name: Get current date + run: echo "BUILD_DATE=$(date -u +'%Y-%m-%d %H:%M')" >> ${GITHUB_ENV} - - name: Commit and push - uses: stefanzweifel/git-auto-commit-action@v5 - with: - branch: ${{ matrix.branch }} - commit_message: "Org state on ${{ env.BUILD_DATE }} for ${{ matrix.branch }} [skip ci]" - commit_user_name: sfdx-hardis-bot - commit_user_email: contact@cloudity.com + - name: Commit and push + uses: stefanzweifel/git-auto-commit-action@v5 + with: + branch: ${{ matrix.branch }} + commit_message: "Org state on ${{ env.BUILD_DATE }} for ${{ matrix.branch }} [skip ci]" + commit_user_name: sfdx-hardis-bot + commit_user_email: contact@cloudity.com ###################### ### Run Apex Tests ### @@ -129,9 +134,10 @@ jobs: name: Apex tests needs: backup permissions: write-all + timeout-minutes: 360 strategy: fail-fast: false - max-parallel: 10 + max-parallel: 10 # Set to 1 if you have issues with command npm install --no-cache @salesforce/cli --global matrix: # MANUAL: Add your monitored git branches here branch: @@ -140,66 +146,66 @@ jobs: - monitoring_myclient__recette_sandbox - monitoring_myclient__preprod_sandbox steps: - # Checkout repo - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Faster code checkout fetching only latest commit - ref: ${{ matrix.branch }} - persist-credentials: true - # Setup node - - name: Setup Node - uses: actions/setup-node@v3 - with: - node-version: "20" - # SFDX & plugins - - name: Install SFDX and plugins - run: | - npm install --no-cache @salesforce/cli --global - sf plugins install --force @salesforce/plugin-packaging - echo 'y' | sfdx plugins:install --force sfdx-hardis - echo 'y' | sfdx plugins:install --force sfdx-essentials - echo 'y' | sfdx plugins:install --force sfdx-git-delta - sf version --verbose --json - # Login & check deploy with test classes & code coverage - - name: Login & Run apex tests - env: - # MANUAL: Update variables below to add authentication variables you need - SFDX_CLIENT_ID_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT}} - SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX}} - SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX}} - SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary - SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary - CI_COMMIT_REF_NAME: ${{ matrix.branch }} - ORG_ALIAS: ${{ matrix.branch }} - CONFIG_BRANCH: ${{ matrix.branch }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} - NOTIF_EMAIL_ADDRESS: ${{ secrets.NOTIF_EMAIL_ADDRESS }} - FORCE_COLOR: "1" - NOTIF_API_URL: ${{ secrets.NOTIF_API_URL }} - NOTIF_API_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} - NOTIF_API_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} - NOTIF_API_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} - NOTIF_API_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} - NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} - run: | - echo "Run apex tests against \"$CONFIG_BRANCH\"" - git pull origin ${{ matrix.branch }} - sfdx hardis:auth:login - sfdx hardis:org:test:apex + # Checkout repo + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Faster code checkout fetching only latest commit + ref: ${{ matrix.branch }} + persist-credentials: true + # Setup node + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + # SFDX & plugins + - name: Install SFDX and plugins + run: | + npm install --no-cache @salesforce/cli --global + sf plugins install --force @salesforce/plugin-packaging + echo 'y' | sf plugins install --force sfdx-hardis + echo 'y' | sf plugins install --force sfdx-git-delta + sf version --verbose --json + # Login & check deploy with test classes & code coverage + - name: Login & Run apex tests + env: + # MANUAL: Update variables below to add authentication variables you need + SFDX_CLIENT_ID_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT}} + SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX}} + SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX}} + SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary + SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary + CI_COMMIT_REF_NAME: ${{ matrix.branch }} + ORG_ALIAS: ${{ matrix.branch }} + CONFIG_BRANCH: ${{ matrix.branch }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} + SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} + NOTIF_EMAIL_ADDRESS: ${{ secrets.NOTIF_EMAIL_ADDRESS }} + FORCE_COLOR: "1" + NOTIF_API_URL: ${{ secrets.NOTIF_API_URL }} + NOTIF_API_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} + NOTIF_API_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} + NOTIF_API_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} + NOTIF_API_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} + NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} + run: | + echo "Run apex tests against \"$CONFIG_BRANCH\"" + git pull origin ${{ matrix.branch }} + sf hardis:auth:login + sf hardis:org:test:apex - - name: Upload artifacts - if: success() || failure() - uses: actions/upload-artifact@v3 - with: - name: Hardis Apex Tests reports - path: | - hardis-report + - name: Upload artifacts + if: success() || failure() + uses: actions/upload-artifact@v4 + with: + name: Hardis Apex Tests reports + include-hidden-files: "true" + path: | + hardis-report ########################################################### ## Run MegaLinter to detect quality and security issues ### @@ -209,16 +215,28 @@ jobs: runs-on: ubuntu-latest needs: backup permissions: read-all + timeout-minutes: 360 + strategy: + fail-fast: false + max-parallel: 10 # Set to 1 if you have issues with race conditions + matrix: + # MANUAL: Add your monitored git branches here + branch: + - monitoring_myclient + - monitoring_myclient__integ_sandbox + - monitoring_myclient__recette_sandbox + - monitoring_myclient__preprod_sandbox steps: - # Git Checkout - - name: Checkout Code + # Checkout repo + - name: Checkout code uses: actions/checkout@v4 with: - token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} - fetch-depth: 0 + fetch-depth: 0 # Faster code checkout fetching only latest commit + ref: ${{ matrix.branch }} + persist-credentials: true - name: Git pull - run: git pull origin ${{ matrix.branch }} + run: git pull origin ${{ matrix.branch }} # Mega-Linter - name: Mega-Linter @@ -231,20 +249,22 @@ jobs: # https://megalinter.io/latest/config-file/ VALIDATE_ALL_CODEBASE: true # Set ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} to validate only diff with master branch GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - NOTIF_API_URL: ${{ secrets.NOTIF_API_URL }} - NOTIF_API_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} - NOTIF_API_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} - NOTIF_API_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} - NOTIF_API_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} - NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} + API_REPORTER: "true" + API_REPORTER_URL: ${{ secrets.NOTIF_API_URL }} + API_REPORTER_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} + API_REPORTER_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} + API_REPORTER_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} + API_REPORTER_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} + API_REPORTER_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} # ADD YOUR CUSTOM ENV VARIABLES HERE TO OVERRIDE VALUES OF .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY # Upload Mega-Linter artifacts - name: Archive production artifacts if: success() || failure() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Mega-Linter reports + include-hidden-files: "true" path: | megalinter-reports mega-linter.log @@ -255,9 +275,10 @@ jobs: name: Monitoring needs: backup permissions: read-all + timeout-minutes: 360 strategy: fail-fast: false - max-parallel: 10 + max-parallel: 10 # Set to 1 if you have issues with command npm install --no-cache @salesforce/cli --global matrix: # MANUAL: Add your monitored git branches here branch: @@ -266,64 +287,64 @@ jobs: - monitoring_myclient__recette_sandbox - monitoring_myclient__preprod_sandbox steps: - # Checkout repo - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Faster code checkout fetching only latest commit - ref: ${{ matrix.branch }} - persist-credentials: true - # Setup node - - name: Setup Node - uses: actions/setup-node@v3 - with: - node-version: "20" - # SFDX & plugins - - name: Install SFDX and plugins - run: | - npm install --no-cache @salesforce/cli --global - sf plugins install --force @salesforce/plugin-packaging - echo 'y' | sfdx plugins:install --force sfdx-hardis - echo 'y' | sfdx plugins:install --force sfdx-essentials - echo 'y' | sfdx plugins:install --force sfdx-git-delta - sf version --verbose --json - # Login & check deploy with test classes & code coverage - - name: Login & Run monitoring checks - env: - # MANUAL: Update variables below to add authentication variables you need - SFDX_CLIENT_ID_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT}} - SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX}} - SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX}} - SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX}} - SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary - SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary - CI_COMMIT_REF_NAME: ${{ matrix.branch }} - ORG_ALIAS: ${{ matrix.branch }} - CONFIG_BRANCH: ${{ matrix.branch }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} - NOTIF_EMAIL_ADDRESS: ${{ secrets.NOTIF_EMAIL_ADDRESS }} - FORCE_COLOR: "1" - NOTIF_API_URL: ${{ secrets.NOTIF_API_URL }} - NOTIF_API_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} - NOTIF_API_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} - NOTIF_API_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} - NOTIF_API_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} - NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} + # Checkout repo + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ matrix.branch }} + persist-credentials: true + # Setup node + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + # SFDX & plugins + - name: Install SFDX and plugins + run: | + npm install --no-cache @salesforce/cli --global + sf plugins install --force @salesforce/plugin-packaging + echo 'y' | sf plugins install --force sfdx-hardis + echo 'y' | sf plugins install --force sfdx-git-delta + sf version --verbose --json + # Login & check deploy with test classes & code coverage + - name: Login & Run monitoring checks + env: + # MANUAL: Update variables below to add authentication variables you need + SFDX_CLIENT_ID_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT}} + SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__RECETTE_SANDBOX}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__RECETTE_SANDBOX}} + SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_ID_MONITORING_MYCLIENT__PREPROD_SANDBOX}} + SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX: ${{ secrets.SFDX_CLIENT_KEY_MONITORING_MYCLIENT__PREPROD_SANDBOX}} + SFDX_DEPLOY_WAIT_MINUTES: 120 # Override if necessary + SFDX_TEST_WAIT_MINUTES: 120 # Override if necessary + CI_COMMIT_REF_NAME: ${{ matrix.branch }} + ORG_ALIAS: ${{ matrix.branch }} + CONFIG_BRANCH: ${{ matrix.branch }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} + SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} + NOTIF_EMAIL_ADDRESS: ${{ secrets.NOTIF_EMAIL_ADDRESS }} + FORCE_COLOR: "1" + NOTIF_API_URL: ${{ secrets.NOTIF_API_URL }} + NOTIF_API_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_BASIC_AUTH_USERNAME }} + NOTIF_API_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_BASIC_AUTH_PASSWORD }} + NOTIF_API_METRICS_URL: ${{ secrets.NOTIF_API_METRICS_URL }} + NOTIF_API_METRICS_BASIC_AUTH_USERNAME: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_USERNAME }} + NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: ${{ secrets.NOTIF_API_METRICS_BASIC_AUTH_PASSWORD }} - run: | - echo "Run Monitoring checks against \"$CONFIG_BRANCH\"" - git pull origin ${{ matrix.branch }} - sfdx hardis:auth:login - sfdx hardis:org:monitor:all + run: | + echo "Run Monitoring checks against \"$CONFIG_BRANCH\"" + git pull origin ${{ matrix.branch }} + sf hardis:auth:login + sf hardis:org:monitor:all - - name: Upload artifacts - if: success() || failure() - uses: actions/upload-artifact@v3 - with: - name: Hardis Monitoring reports - path: | - hardis-report \ No newline at end of file + - name: Upload artifacts + if: success() || failure() + uses: actions/upload-artifact@v4 + with: + name: Hardis Monitoring reports + include-hidden-files: "true" + path: | + hardis-report diff --git a/defaults/monitoring/.gitlab-ci.yml b/defaults/monitoring/.gitlab-ci.yml index d55dbdca1..11a89694f 100644 --- a/defaults/monitoring/.gitlab-ci.yml +++ b/defaults/monitoring/.gitlab-ci.yml @@ -8,6 +8,7 @@ variables: FORCE_COLOR: "1" + GIT_FETCH_EXTRA_FLAGS: --depth 10000 # Pipeline stages stages: @@ -15,7 +16,7 @@ stages: - monitor # Use sfdx-hardis docker image to always be up to date with latest version -image: hardisgroupcom/sfdx-hardis:latest # test with alpha +image: hardisgroupcom/sfdx-hardis:latest # If rate limits reached, use ghcr.io/hardisgroupcom/sfdx-hardis:latest ############################################## ### Sfdx Sources Backup + Push new commit #### @@ -30,8 +31,8 @@ backup: - git config --global user.email "${GITLAB_USER_EMAIL}" script: # Extract metadata folders (+other checks ^^) - - sfdx hardis:auth:login - - sfdx hardis:org:monitor:backup + - sf hardis:auth:login + - sf hardis:org:monitor:backup # Commit and push new state - git status - git add --all @@ -58,8 +59,8 @@ apex_tests: - git pull origin "${CI_COMMIT_REF_NAME}" script: # Login & run apex tests - - sfdx hardis:auth:login - - sfdx hardis:org:test:apex + - sf hardis:auth:login + - sf hardis:org:test:apex artifacts: when: always paths: @@ -79,8 +80,8 @@ monitoring_tools: - git pull origin "${CI_COMMIT_REF_NAME}" script: # Login & run apex tests - - sfdx hardis:auth:login - - sfdx hardis:org:monitor:all + - sf hardis:auth:login + - sf hardis:org:monitor:all artifacts: when: always paths: diff --git a/defaults/monitoring/README.md b/defaults/monitoring/README.md index 9de1287e3..de419d896 100644 --- a/defaults/monitoring/README.md +++ b/defaults/monitoring/README.md @@ -2,7 +2,7 @@ All documentation here -> https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/ -Just use the command [sfdx hardis:org:configure:monitoring](https://sfdx-hardis.cloudity.com/) in case your monitored org is a sandbox that has been refreshed ! +Just use the command [sf hardis:org:configure:monitoring](https://sfdx-hardis.cloudity.com/) in case your monitored org is a sandbox that has been refreshed ! If you have any issue, question or remark, please notify us on [sfdx-hardis monitoring](https://github.com/hardisgroupcom/sfdx-hardis/issues) diff --git a/defaults/monitoring/azure-pipelines.yml b/defaults/monitoring/azure-pipelines.yml index 5907de579..da55ca0ad 100644 --- a/defaults/monitoring/azure-pipelines.yml +++ b/defaults/monitoring/azure-pipelines.yml @@ -11,12 +11,12 @@ # Doc & support: https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/ schedules: -- cron: "0 4 * * *" # Cron format -> https://crontab.cronhub.io/ - always: "true" - branches: - # MANUAL: Add your monitored branches here - include: - - monitoring_my_client__integ_sandbox + - cron: "0 4 * * *" # Cron format -> https://crontab.cronhub.io/ + always: "true" + branches: + # MANUAL: Add your monitored branches here + include: + - monitoring_my_client__integ_sandbox pool: vmImage: ubuntu-latest @@ -28,279 +28,284 @@ variables: value: $[replace(variables['Build.SourceBranch'], 'refs/heads/', '')] jobs: -############################################## -### Sfdx Sources Backup + Push new commit #### -############################################## -- job: BackupSfdxHardis - pool: - vmImage: 'ubuntu-latest' + ############################################## + ### Sfdx Sources Backup + Push new commit #### + ############################################## + - job: BackupSfdxHardis + pool: + vmImage: "ubuntu-latest" - steps: - - checkout: self - persistCredentials: "true" - displayName: Git checkout + steps: + - checkout: self + persistCredentials: "true" + displayName: Git checkout + fetchDepth: "0" - - script: | - git config --global user.email "contact@cloudity.com" - git config --global user.name "sfdx-hardis monitoring" - workingDirectory: $(System.DefaultWorkingDirectory) - displayName: Git config + - script: | + git config --global user.email "contact@cloudity.com" + git config --global user.name "sfdx-hardis monitoring" + workingDirectory: $(System.DefaultWorkingDirectory) + displayName: Git config - - script: | - npm install @salesforce/cli -g - sf plugins install @salesforce/plugin-packaging - echo y | sfdx plugins:install sfdx-hardis - echo y | sfdx plugins:install sfdx-essentials - sf version --verbose --json - workingDirectory: $(System.DefaultWorkingDirectory) - displayName: Install @salesforce/cli & sfdx-hardis + - script: | + npm install @salesforce/cli -g + sf plugins install @salesforce/plugin-packaging + echo y | sf plugins install sfdx-hardis + sf version --verbose --json + workingDirectory: $(System.DefaultWorkingDirectory) + displayName: Install @salesforce/cli & sfdx-hardis - - script: | - git checkout -b "$BRANCH_NAME" - displayName: Checkout git branch latest commit - env: - CI: "true" + - script: | + git checkout -b "$BRANCH_NAME" + displayName: Checkout git branch latest commit + env: + CI: "true" - - script: | - sfdx hardis:auth:login - sfdx hardis:org:monitor:backup - displayName: sfdx-hardis login & Backup - env: - # MANUAL: Add your branch related variables here - SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX) - SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX) - CI_COMMIT_REF_NAME: $(BRANCH_NAME) - CONFIG_BRANCH: $(BRANCH_NAME) - ORG_ALIAS: $(BRANCH_NAME) - SLACK_TOKEN: $(SLACK_TOKEN) - SLACK_CHANNEL_ID: $(SLACK_CHANNEL_ID) - NOTIF_EMAIL_ADDRESS: $(NOTIF_EMAIL_ADDRESS) - CI: "true" - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) - SYSTEM_COLLECTIONURI: $(System.CollectionUri) - SYSTEM_TEAMPROJECT: $(System.TeamProject) - SYSTEM_JOB_DISPLAY_NAME: $(System.JobDisplayName) - SYSTEM_JOB_ID: $(System.JobId) - SYSTEM_PULLREQUEST_PULLREQUESTID: $(System.PullRequest.PullRequestId) - BUILD_REPOSITORY_ID: $(Build.Repository.ID) - BUILD_REPOSITORYNAME: $(Build.Repository.Name) - BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) - BUILD_BUILD_ID: $(Build.BuildId) - MONITORING_BACKUP_SKIP_METADATA_TYPES: $(MONITORING_BACKUP_SKIP_METADATA_TYPES) - NOTIFICATIONS_DISABLE: $(NOTIFICATIONS_DISABLE) - MONITORING_DISABLE: $(MONITORING_DISABLE) - NOTIF_API_URL: $(NOTIF_API_URL) - NOTIF_API_BASIC_AUTH_USERNAME: $(NOTIF_API_BASIC_AUTH_USERNAME) - NOTIF_API_BASIC_AUTH_PASSWORD: $(NOTIF_API_BASIC_AUTH_PASSWORD) - NOTIF_API_METRICS_URL: $(NOTIF_API_METRICS_URL) - NOTIF_API_METRICS_BASIC_AUTH_USERNAME: $(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) - NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: $(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) + - script: | + sf hardis:auth:login + sf hardis:org:monitor:backup + displayName: sfdx-hardis login & Backup + env: + # MANUAL: Add your branch related variables here + SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX) + SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX) + CI_COMMIT_REF_NAME: $(BRANCH_NAME) + CONFIG_BRANCH: $(BRANCH_NAME) + ORG_ALIAS: $(BRANCH_NAME) + SLACK_TOKEN: $(SLACK_TOKEN) + SLACK_CHANNEL_ID: $(SLACK_CHANNEL_ID) + NOTIF_EMAIL_ADDRESS: $(NOTIF_EMAIL_ADDRESS) + CI: "true" + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) + SYSTEM_COLLECTIONURI: $(System.CollectionUri) + SYSTEM_TEAMPROJECT: $(System.TeamProject) + SYSTEM_JOB_DISPLAY_NAME: $(System.JobDisplayName) + SYSTEM_JOB_ID: $(System.JobId) + SYSTEM_PULLREQUEST_PULLREQUESTID: $(System.PullRequest.PullRequestId) + BUILD_REPOSITORY_ID: $(Build.Repository.ID) + BUILD_REPOSITORYNAME: $(Build.Repository.Name) + BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) + BUILD_BUILD_ID: $(Build.BuildId) + MONITORING_BACKUP_SKIP_METADATA_TYPES: $(MONITORING_BACKUP_SKIP_METADATA_TYPES) + NOTIFICATIONS_DISABLE: $(NOTIFICATIONS_DISABLE) + MONITORING_DISABLE: $(MONITORING_DISABLE) + NOTIF_API_URL: $(NOTIF_API_URL) + NOTIF_API_BASIC_AUTH_USERNAME: $(NOTIF_API_BASIC_AUTH_USERNAME) + NOTIF_API_BASIC_AUTH_PASSWORD: $(NOTIF_API_BASIC_AUTH_PASSWORD) + NOTIF_API_METRICS_URL: $(NOTIF_API_METRICS_URL) + NOTIF_API_METRICS_BASIC_AUTH_USERNAME: $(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) + NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: $(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) + CLOUDFLARE_EMAIL: $(CLOUDFLARE_EMAIL) + CLOUDFLARE_API_TOKEN: $(CLOUDFLARE_API_TOKEN) + CLOUDFLARE_ACCOUNT_ID: $(CLOUDFLARE_ACCOUNT_ID) + CLOUDFLARE_PROJECT_NAME: $(CLOUDFLARE_PROJECT_NAME) + CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE: $(CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE) + CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN: $(CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN) + CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST: $(CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST) - - script: | - git status - git add --all - git commit -m "Org state on $(date -u +'%Y-%m-%d %H:%M') for $BRANCH_NAME [skip ci]" || echo "No changes to commit" - git push --set-upstream origin "$BRANCH_NAME" - displayName: Add new commit to Monitoring Repo - workingDirectory: $(System.DefaultWorkingDirectory) - env: - CI: "true" + - script: | + git status + git add --all + git commit -m "Org state on $(date -u +'%Y-%m-%d %H:%M') for $BRANCH_NAME [skip ci]" || echo "No changes to commit" + git push --set-upstream origin "$BRANCH_NAME" + displayName: Add new commit to Monitoring Repo + workingDirectory: $(System.DefaultWorkingDirectory) + env: + CI: "true" - - publish: $(System.DefaultWorkingDirectory)/hardis-report/ - artifact: hardis-report-backup - condition: succeededOrFailed() - continueOnError: "true" - displayName: Publish logs as artifacts + - publish: $(System.DefaultWorkingDirectory)/hardis-report/ + artifact: hardis-report-backup + condition: succeededOrFailed() + continueOnError: "true" + displayName: Publish logs as artifacts -###################### -### Run Apex Tests ### -###################### -- job: ApexTestsSfdxHardis - dependsOn: BackupSfdxHardis - pool: - vmImage: 'ubuntu-latest' - timeoutInMinutes: "120" + ###################### + ### Run Apex Tests ### + ###################### + - job: ApexTestsSfdxHardis + dependsOn: BackupSfdxHardis + pool: + vmImage: "ubuntu-latest" + timeoutInMinutes: "120" - steps: - - checkout: self - persistCredentials: "true" - displayName: Git checkout + steps: + - checkout: self + persistCredentials: "true" + displayName: Git checkout - - script: | - npm install @salesforce/cli -g - sf plugins install @salesforce/plugin-packaging - echo y | sfdx plugins:install sfdx-hardis - echo y | sfdx plugins:install sfdx-essentials - sf version --verbose --json - workingDirectory: $(System.DefaultWorkingDirectory) - displayName: Install @salesforce/cli & sfdx-hardis + - script: | + npm install @salesforce/cli -g + sf plugins install @salesforce/plugin-packaging + echo y | sf plugins install sfdx-hardis + sf version --verbose --json + workingDirectory: $(System.DefaultWorkingDirectory) + displayName: Install @salesforce/cli & sfdx-hardis - - script: | - git pull origin "${BRANCH_NAME}" - displayName: Pull git branch latest commit - env: - CI: "true" + - script: | + git pull origin "${BRANCH_NAME}" + displayName: Pull git branch latest commit + env: + CI: "true" - - script: | - sfdx hardis:auth:login - sfdx hardis:org:test:apex - continueOnError: "true" - displayName: sfdx-hardis login & Apex tests - env: - # MANUAL: Add your branch related variables here - SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX) - SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX) - CI_COMMIT_REF_NAME: $(BRANCH_NAME) - CONFIG_BRANCH: $(BRANCH_NAME) - ORG_ALIAS: $(BRANCH_NAME) - SLACK_TOKEN: $(SLACK_TOKEN) - SLACK_CHANNEL_ID: $(SLACK_CHANNEL_ID) - NOTIF_EMAIL_ADDRESS: $(NOTIF_EMAIL_ADDRESS) - CI: "true" - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) - SYSTEM_COLLECTIONURI: $(System.CollectionUri) - SYSTEM_TEAMPROJECT: $(System.TeamProject) - SYSTEM_JOB_DISPLAY_NAME: $(System.JobDisplayName) - SYSTEM_JOB_ID: $(System.JobId) - SYSTEM_PULLREQUEST_PULLREQUESTID: $(System.PullRequest.PullRequestId) - BUILD_REPOSITORY_ID: $(Build.Repository.ID) - BUILD_REPOSITORYNAME: $(Build.Repository.Name) - BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) - BUILD_BUILD_ID: $(Build.BuildId) - MONITORING_BACKUP_SKIP_METADATA_TYPES: $(MONITORING_BACKUP_SKIP_METADATA_TYPES) - NOTIFICATIONS_DISABLE: $(NOTIFICATIONS_DISABLE) - MONITORING_DISABLE: $(MONITORING_DISABLE) - NOTIF_API_URL: $(NOTIF_API_URL) - NOTIF_API_BASIC_AUTH_USERNAME: $(NOTIF_API_BASIC_AUTH_USERNAME) - NOTIF_API_BASIC_AUTH_PASSWORD: $(NOTIF_API_BASIC_AUTH_PASSWORD) - NOTIF_API_METRICS_URL: $(NOTIF_API_METRICS_URL) - NOTIF_API_METRICS_BASIC_AUTH_USERNAME: $(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) - NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: $(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) + - script: | + sf hardis:auth:login + sf hardis:org:test:apex + continueOnError: "true" + displayName: sfdx-hardis login & Apex tests + env: + # MANUAL: Add your branch related variables here + SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX) + SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX) + CI_COMMIT_REF_NAME: $(BRANCH_NAME) + CONFIG_BRANCH: $(BRANCH_NAME) + ORG_ALIAS: $(BRANCH_NAME) + SLACK_TOKEN: $(SLACK_TOKEN) + SLACK_CHANNEL_ID: $(SLACK_CHANNEL_ID) + NOTIF_EMAIL_ADDRESS: $(NOTIF_EMAIL_ADDRESS) + CI: "true" + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) + SYSTEM_COLLECTIONURI: $(System.CollectionUri) + SYSTEM_TEAMPROJECT: $(System.TeamProject) + SYSTEM_JOB_DISPLAY_NAME: $(System.JobDisplayName) + SYSTEM_JOB_ID: $(System.JobId) + SYSTEM_PULLREQUEST_PULLREQUESTID: $(System.PullRequest.PullRequestId) + BUILD_REPOSITORY_ID: $(Build.Repository.ID) + BUILD_REPOSITORYNAME: $(Build.Repository.Name) + BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) + BUILD_BUILD_ID: $(Build.BuildId) + MONITORING_BACKUP_SKIP_METADATA_TYPES: $(MONITORING_BACKUP_SKIP_METADATA_TYPES) + NOTIFICATIONS_DISABLE: $(NOTIFICATIONS_DISABLE) + MONITORING_DISABLE: $(MONITORING_DISABLE) + NOTIF_API_URL: $(NOTIF_API_URL) + NOTIF_API_BASIC_AUTH_USERNAME: $(NOTIF_API_BASIC_AUTH_USERNAME) + NOTIF_API_BASIC_AUTH_PASSWORD: $(NOTIF_API_BASIC_AUTH_PASSWORD) + NOTIF_API_METRICS_URL: $(NOTIF_API_METRICS_URL) + NOTIF_API_METRICS_BASIC_AUTH_USERNAME: $(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) + NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: $(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) - - publish: $(System.DefaultWorkingDirectory)/hardis-report/ - condition: succeededOrFailed() - continueOnError: "true" - artifact: hardis-report-apex-tests - displayName: Publish logs as artifacts + - publish: $(System.DefaultWorkingDirectory)/hardis-report/ + condition: succeededOrFailed() + continueOnError: "true" + artifact: hardis-report-apex-tests + displayName: Publish logs as artifacts -############################################################ -### Run MegaLinter to detect quality and security issues ### -############################################################ -- job: MegaLinter - dependsOn: BackupSfdxHardis - pool: - vmImage: ubuntu-latest - steps: - - checkout: self - persistCredentials: "true" - displayName: Git checkout + ############################################################ + ### Run MegaLinter to detect quality and security issues ### + ############################################################ + - job: MegaLinter + dependsOn: BackupSfdxHardis + pool: + vmImage: ubuntu-latest + steps: + - checkout: self + persistCredentials: "true" + displayName: Git checkout - - script: | - git config --global user.email "contact@cloudity.com" - git config --global user.name "sfdx-hardis monitoring" - git pull origin "${BRANCH_NAME}" - workingDirectory: $(System.DefaultWorkingDirectory) - displayName: Git config & pull latest commit + - script: | + git config --global user.email "contact@cloudity.com" + git config --global user.name "sfdx-hardis monitoring" + git pull origin "${BRANCH_NAME}" + workingDirectory: $(System.DefaultWorkingDirectory) + displayName: Git config & pull latest commit - # Pull MegaLinter docker image - - script: docker pull oxsecurity/megalinter-salesforce:latest - displayName: Pull MegaLinter - # Run MegaLinter - - script: | - docker run -v $(System.DefaultWorkingDirectory):/tmp/lint \ - --env-file <(env | grep -e SYSTEM_ -e BUILD_ -e TF_ -e AGENT_) \ - -e CI=true \ - -e SYSTEM_ACCESSTOKEN=$(System.AccessToken) \ - -e GIT_AUTHORIZATION_BEARER=$(System.AccessToken) \ - -e API_REPORTER=true \ - -e NOTIF_API_URL=$(NOTIF_API_URL) \ - -e NOTIF_API_BASIC_AUTH_USERNAME=$(NOTIF_API_BASIC_AUTH_USERNAME) \ - -e NOTIF_API_BASIC_AUTH_PASSWORD=$(NOTIF_API_BASIC_AUTH_PASSWORD) \ - -e NOTIF_API_METRICS_URL=$(NOTIF_API_METRICS_URL) \ - -e NOTIF_API_METRICS_BASIC_AUTH_USERNAME=$(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) \ - -e NOTIF_API_METRICS_BASIC_AUTH_PASSWORD=$(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) \ - oxsecurity/megalinter-salesforce:latest - continueOnError: "true" - displayName: Run MegaLinter + # Pull MegaLinter docker image + - script: docker pull oxsecurity/megalinter-salesforce:latest + displayName: Pull MegaLinter + # Run MegaLinter + - script: | + docker run -v $(System.DefaultWorkingDirectory):/tmp/lint \ + --env-file <(env | grep -e SYSTEM_ -e BUILD_ -e TF_ -e AGENT_) \ + -e CI=true \ + -e SYSTEM_ACCESSTOKEN=$(System.AccessToken) \ + -e GIT_AUTHORIZATION_BEARER=$(System.AccessToken) \ + -e API_REPORTER=true \ + -e NOTIF_API_URL=$(NOTIF_API_URL) \ + -e NOTIF_API_BASIC_AUTH_USERNAME=$(NOTIF_API_BASIC_AUTH_USERNAME) \ + -e NOTIF_API_BASIC_AUTH_PASSWORD=$(NOTIF_API_BASIC_AUTH_PASSWORD) \ + -e NOTIF_API_METRICS_URL=$(NOTIF_API_METRICS_URL) \ + -e NOTIF_API_METRICS_BASIC_AUTH_USERNAME=$(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) \ + -e NOTIF_API_METRICS_BASIC_AUTH_PASSWORD=$(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) \ + oxsecurity/megalinter-salesforce:latest + continueOnError: "true" + displayName: Run MegaLinter - # Publish Megalinter reports - - publish: $(System.DefaultWorkingDirectory)/megalinter-reports/ - condition: succeededOrFailed() - continueOnError: "true" - artifact: megalinter-reports - displayName: Publish reports + # Publish Megalinter reports + - publish: $(System.DefaultWorkingDirectory)/megalinter-reports/ + condition: succeededOrFailed() + continueOnError: "true" + artifact: megalinter-reports + displayName: Publish reports -################################### -### Run other monitoring checks ### -################################### -- job: MonitoringChecksSfdxHardis - dependsOn: BackupSfdxHardis - pool: - vmImage: 'ubuntu-latest' + ################################### + ### Run other monitoring checks ### + ################################### + - job: MonitoringChecksSfdxHardis + dependsOn: BackupSfdxHardis + pool: + vmImage: "ubuntu-latest" - steps: - - checkout: self - persistCredentials: "true" - displayName: Git checkout + steps: + - checkout: self + persistCredentials: "true" + displayName: Git checkout - - script: | - npm install @salesforce/cli -g - sf plugins install @salesforce/plugin-packaging - echo y | sfdx plugins:install sfdx-hardis - echo y | sfdx plugins:install sfdx-essentials - sf version --verbose --json - workingDirectory: $(System.DefaultWorkingDirectory) - displayName: Install @salesforce/cli & sfdx-hardis + - script: | + npm install @salesforce/cli -g + sf plugins install @salesforce/plugin-packaging + echo y | sf plugins install sfdx-hardis + sf version --verbose --json + workingDirectory: $(System.DefaultWorkingDirectory) + displayName: Install @salesforce/cli & sfdx-hardis - - script: | - git config --global user.email "contact@cloudity.com" - git config --global user.name "sfdx-hardis monitoring" - git pull origin "${BRANCH_NAME}" - displayName: Pull git branch latest commit - env: - CI: "true" + - script: | + git config --global user.email "contact@cloudity.com" + git config --global user.name "sfdx-hardis monitoring" + git pull origin "${BRANCH_NAME}" + displayName: Pull git branch latest commit + env: + CI: "true" - - script: | - sfdx hardis:auth:login - sfdx hardis:org:monitor:all - displayName: sfdx-hardis login & other checks - continueOnError: "true" - env: - # MANUAL: Add your branch related variables here - SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX) - SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX) - CI_COMMIT_REF_NAME: $(BRANCH_NAME) - CONFIG_BRANCH: $(BRANCH_NAME) - ORG_ALIAS: $(BRANCH_NAME) - SLACK_TOKEN: $(SLACK_TOKEN) - SLACK_CHANNEL_ID: $(SLACK_CHANNEL_ID) - NOTIF_EMAIL_ADDRESS: $(NOTIF_EMAIL_ADDRESS) - CI: "true" - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) - SYSTEM_COLLECTIONURI: $(System.CollectionUri) - SYSTEM_TEAMPROJECT: $(System.TeamProject) - SYSTEM_JOB_DISPLAY_NAME: $(System.JobDisplayName) - SYSTEM_JOB_ID: $(System.JobId) - SYSTEM_PULLREQUEST_PULLREQUESTID: $(System.PullRequest.PullRequestId) - BUILD_REPOSITORY_ID: $(Build.Repository.ID) - BUILD_REPOSITORYNAME: $(Build.Repository.Name) - BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) - BUILD_BUILD_ID: $(Build.BuildId) - MONITORING_BACKUP_SKIP_METADATA_TYPES: $(MONITORING_BACKUP_SKIP_METADATA_TYPES) - NOTIFICATIONS_DISABLE: $(NOTIFICATIONS_DISABLE) - MONITORING_DISABLE: $(MONITORING_DISABLE) - NOTIF_API_URL: $(NOTIF_API_URL) - NOTIF_API_BASIC_AUTH_USERNAME: $(NOTIF_API_BASIC_AUTH_USERNAME) - NOTIF_API_BASIC_AUTH_PASSWORD: $(NOTIF_API_BASIC_AUTH_PASSWORD) - NOTIF_API_METRICS_URL: $(NOTIF_API_METRICS_URL) - NOTIF_API_METRICS_BASIC_AUTH_USERNAME: $(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) - NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: $(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) + - script: | + sf hardis:auth:login + sf hardis:org:monitor:all + displayName: sfdx-hardis login & other checks + continueOnError: "true" + env: + # MANUAL: Add your branch related variables here + SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_ID_MONITORING_MY_CLIENT__INTEG_SANDBOX) + SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX: $(SFDX_CLIENT_KEY_MONITORING_MY_CLIENT__INTEG_SANDBOX) + CI_COMMIT_REF_NAME: $(BRANCH_NAME) + CONFIG_BRANCH: $(BRANCH_NAME) + ORG_ALIAS: $(BRANCH_NAME) + SLACK_TOKEN: $(SLACK_TOKEN) + SLACK_CHANNEL_ID: $(SLACK_CHANNEL_ID) + NOTIF_EMAIL_ADDRESS: $(NOTIF_EMAIL_ADDRESS) + CI: "true" + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + CI_SFDX_HARDIS_AZURE_TOKEN: $(System.AccessToken) + SYSTEM_COLLECTIONURI: $(System.CollectionUri) + SYSTEM_TEAMPROJECT: $(System.TeamProject) + SYSTEM_JOB_DISPLAY_NAME: $(System.JobDisplayName) + SYSTEM_JOB_ID: $(System.JobId) + SYSTEM_PULLREQUEST_PULLREQUESTID: $(System.PullRequest.PullRequestId) + BUILD_REPOSITORY_ID: $(Build.Repository.ID) + BUILD_REPOSITORYNAME: $(Build.Repository.Name) + BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) + BUILD_BUILD_ID: $(Build.BuildId) + MONITORING_BACKUP_SKIP_METADATA_TYPES: $(MONITORING_BACKUP_SKIP_METADATA_TYPES) + NOTIFICATIONS_DISABLE: $(NOTIFICATIONS_DISABLE) + MONITORING_DISABLE: $(MONITORING_DISABLE) + NOTIF_API_URL: $(NOTIF_API_URL) + NOTIF_API_BASIC_AUTH_USERNAME: $(NOTIF_API_BASIC_AUTH_USERNAME) + NOTIF_API_BASIC_AUTH_PASSWORD: $(NOTIF_API_BASIC_AUTH_PASSWORD) + NOTIF_API_METRICS_URL: $(NOTIF_API_METRICS_URL) + NOTIF_API_METRICS_BASIC_AUTH_USERNAME: $(NOTIF_API_METRICS_BASIC_AUTH_USERNAME) + NOTIF_API_METRICS_BASIC_AUTH_PASSWORD: $(NOTIF_API_METRICS_BASIC_AUTH_PASSWORD) - - publish: $(System.DefaultWorkingDirectory)/hardis-report/ - artifact: hardis-report-monitoring - condition: succeededOrFailed() - continueOnError: "true" - displayName: Publish logs as artifacts \ No newline at end of file + - publish: $(System.DefaultWorkingDirectory)/hardis-report/ + artifact: hardis-report-monitoring + condition: succeededOrFailed() + continueOnError: "true" + displayName: Publish logs as artifacts diff --git a/defaults/monitoring/bitbucket-pipelines.yml b/defaults/monitoring/bitbucket-pipelines.yml index 202dec996..edf627468 100644 --- a/defaults/monitoring/bitbucket-pipelines.yml +++ b/defaults/monitoring/bitbucket-pipelines.yml @@ -3,7 +3,7 @@ # Doc & support: https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/ -image: node:18 +image: node:20 definitions: services: docker: @@ -16,13 +16,14 @@ pipelines: ############################################## - step: name: Backup sfdx-hardis + clone: + depth: full script: # Install SF Cli & dependencies - npm install --no-cache @salesforce/cli --global - sf plugins install @salesforce/plugin-packaging - - echo 'y' | sfdx plugins:install sfdx-hardis - - echo 'y' | sfdx plugins:install sfdx-essentials - - echo 'y' | sfdx plugins:install sfdx-git-delta + - echo 'y' | sf plugins install sfdx-hardis + - echo 'y' | sf plugins install sfdx-git-delta - sf version --verbose --json - export BRANCH_NAME=$(echo "$BITBUCKET_BRANCH" | sed 's/refs\/heads\///') - export CI_COMMIT_REF_NAME=$BRANCH_NAME @@ -30,9 +31,9 @@ pipelines: - export ORG_ALIAS=$BRANCH_NAME - export FORCE_COLOR=1 # Login - - sfdx hardis:auth:login + - sf hardis:auth:login # Backup - - sfdx hardis:org:monitor:backup + - sf hardis:org:monitor:backup # Commit & push - git status - git add --all @@ -51,9 +52,8 @@ pipelines: # Install SF Cli & dependencies - npm install --no-cache @salesforce/cli --global - sf plugins install @salesforce/plugin-packaging - - echo 'y' | sfdx plugins:install sfdx-hardis - - echo 'y' | sfdx plugins:install sfdx-essentials - - echo 'y' | sfdx plugins:install sfdx-git-delta + - echo 'y' | sf plugins install sfdx-hardis + - echo 'y' | sf plugins install sfdx-git-delta - sf version --verbose --json - export BRANCH_NAME=$(echo "$BITBUCKET_BRANCH" | sed 's/refs\/heads\///') - export CI_COMMIT_REF_NAME=$BRANCH_NAME @@ -63,9 +63,9 @@ pipelines: # Get latest commit of the branch - git pull origin "${BRANCH_NAME}" # Login - - sfdx hardis:auth:login + - sf hardis:auth:login # Apex tests - - sfdx hardis:org:test:apex + - sf hardis:org:test:apex artifacts: - hardis-report/** @@ -91,9 +91,8 @@ pipelines: # Install SF Cli & dependencies - npm install --no-cache @salesforce/cli --global - sf plugins install @salesforce/plugin-packaging - - echo 'y' | sfdx plugins:install sfdx-hardis - - echo 'y' | sfdx plugins:install sfdx-essentials - - echo 'y' | sfdx plugins:install sfdx-git-delta + - echo 'y' | sf plugins install sfdx-hardis + - echo 'y' | sf plugins install sfdx-git-delta - sf version --verbose --json - export BRANCH_NAME=$(echo "$BITBUCKET_BRANCH" | sed 's/refs\/heads\///') - export CI_COMMIT_REF_NAME=$BRANCH_NAME @@ -103,8 +102,8 @@ pipelines: # Get latest commit of the branch - git pull origin "${BRANCH_NAME}" # Login - - sfdx hardis:auth:login + - sf hardis:auth:login # Other monitoring tools - - sfdx hardis:org:monitor:all + - sf hardis:org:monitor:all artifacts: - hardis-report/** diff --git a/defaults/monitoring/manifest/package-skip-items.xml b/defaults/monitoring/manifest/package-skip-items.xml index f4fba3246..be9a88ce2 100644 --- a/defaults/monitoring/manifest/package-skip-items.xml +++ b/defaults/monitoring/manifest/package-skip-items.xml @@ -74,6 +74,13 @@ LightningMessageChannel + + + + +## All Environment Variables + +Here is a comprehensive list of all environment variables that can be used in sfdx-hardis source code. + +This list has been generated with GitHub Copilot so if you see any incoherence please raise an issue :) + +## Table of Contents + +1. [Custom sfdx-hardis Variables](#custom-sfdx-hardis-variables) + - [Salesforce Configuration](#salesforce-configuration) + - [Deployment Control](#deployment-control) + - [Monitoring & Debugging](#monitoring--debugging) + - [System Configuration](#system-configuration) + - [Bulk API Settings](#bulk-api-settings) + +2. [Tool-Specific Variables](#tool-specific-variables) + - [Azure DevOps](#azure-devops) + - [GitLab](#gitlab) + - [GitHub](#github) + - [Bitbucket](#bitbucket) + - [JIRA Integration](#jira-integration) + - [Slack Integration](#slack-integration) + - [AI Provider (OpenAI)](#ai-provider-openai) + - [Email Notifications](#email-notifications) + - [Browser Automation](#browser-automation) + - [Generic Ticketing](#generic-ticketing) + - [Generic CI/CD](#generic-cicd) + +--- + +## Custom sfdx-hardis Variables + +These variables control specific behaviors and configurations within sfdx-hardis itself. + +### Salesforce Configuration + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|----------------------------------|----------------------------------------------------------------------|----------------------------------------------|-----------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------| +| **CI_SCRATCH_MODE** | Mode for scratch org in CI (e.g., 'deploy') | `undefined` | `'deploy'`, any string (e.g., `'test'`, `'dev'`) | [`src/common/utils/orgUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/orgUtils.ts) | +| **DEVHUB_ALIAS** | Alias for the Salesforce DevHub org | `undefined` | Any valid org alias (e.g., `'MyDevHub'`, `'production-devhub'`) | Multiple files in config, hooks, and auth | +| **INSTANCE_URL** | Salesforce instance URL for authentication | `undefined` | Valid Salesforce instance URLs (e.g., `'https://mycompany.my.salesforce.com'`, `'https://test.salesforce.com'`) | [`src/common/utils/authUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/authUtils.ts) | +| **NOT_IMPACTING_METADATA_TYPES** | Comma-separated list of metadata types that don't impact deployments | Predefined list | Comma-separated metadata type names (e.g., `'Document,StaticResource,Report'`) | [`src/config/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/config/index.ts) | +| **ORG_ALIAS** | Alias for the target Salesforce org | `undefined` | Any valid org alias (e.g., `'staging'`, `'production'`, `'sandbox1'`) | Multiple files in hooks and auth | +| **SFDX_API_VERSION** | Salesforce API version to use | `'62.0'` | Valid Salesforce API versions (e.g., `'62.0'`) | [`src/config/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/config/index.ts) | +| **SFDX_AUTH_URL_DEV_HUB** | Salesforce auth URL for DevHub | `undefined` | Valid Salesforce auth URLs (e.g., `'force://PlatformCLI::5Aep8614XXXXXXXXXXXX...'`) | [`src/common/utils/authUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/authUtils.ts) | +| **SFDX_AUTH_URL_TECHNICAL_ORG** | Salesforce auth URL for technical org | `undefined` | Valid Salesforce auth URLs (e.g., `'force://PlatformCLI::5Aep8614XXXXXXXXXXXX...'`) | [`src/commands/hardis/auth/login.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/commands/hardis/auth/login.ts) | +| **SFDX_CLIENT_ID** | Salesforce connected app client ID | `undefined` | Valid connected app client IDs (e.g., `'3MVG9CEn_O3jvv0XXXXXXXXXXXX...'`) | [`src/common/utils/authUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/authUtils.ts) | +| **SFDX_CLIENT_KEY** | Salesforce connected app client key | `undefined` | Valid connected app client keys (e.g., `'/path/to/server.key'`, certificate content) | [`src/common/utils/authUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/authUtils.ts) | +| **SFDX_ENV** | Salesforce CLI environment setting | Set to `'development'` when debug is enabled | `'development'`, `'production'` | [`src/hooks/init/log.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/hooks/init/log.ts) | +| **SFDX_XML_INDENT** | Indentation string for XML formatting | `' '` (4 spaces) | Any string (e.g., `' '` for 4 spaces, `'\t'` for tabs, `' '` for 2 spaces) | [`src/common/utils/xmlUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/xmlUtils.ts) | +| **SKIP_TECHNICAL_ORG** | Skip technical org connection | `undefined` | `'true'`, `'false'` | [`src/common/utils/orgUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/orgUtils.ts) | +| **TARGET_USERNAME** | Target Salesforce username for operations | `undefined` | Valid Salesforce usernames (e.g., `'admin@mycompany.com'`, `'test-user@example.com.sandbox'`) | [`src/common/utils/authUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/authUtils.ts) | +| **TECHNICAL_ORG_ALIAS** | Alias for technical Salesforce org | `undefined` | Any valid org alias (e.g., `'technical-org'`, `'monitoring-org'`) | [`src/commands/hardis/auth/login.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/commands/hardis/auth/login.ts) | + +### Deployment Control + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|----------------------------------------------|-----------------------------------------------------------------------------|---------------|-------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| +| **AUTO_UPDATE** | Enable automatic update of .gitignore and .forceignore files when not in CI | `undefined` | `'true'`, `'false'` | Found in changelog | +| **SFDX_DEPLOY_WAIT_MINUTES** | Minutes to wait for deployment completion | `120` | Positive integers (e.g., `'120'`, `'60'`) | Multiple files in deployUtils and Azure config | +| **SFDX_DISABLE_FLOW_DIFF** | Disable Flow Visual Git Diff calculation in PR comments | `undefined` | `'true'`, `'false'` | [`src/common/utils/gitUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/gitUtils.ts) | +| **SFDX_HARDIS_DEPLOY_BEFORE_MERGE** | Deploy before merge in CI/CD | `undefined` | `'true'`, `'false'` | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts) | +| **SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES** | Ignore splitting of package.xml files during deployment | `'true'` | `'true'`, `'false'` | [`src/common/utils/deployUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/deployUtils.ts) | + +### Monitoring & Debugging + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|-------------------------------------------|-------------------------------------------------|---------------|---------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------| +| **DEBUG** | Enable debug logging output | `undefined` | `'true'`, `'false'` | [`src/common/websocketClient.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/websocketClient.ts) | +| **DEBUG_DEPLOY** | Enable debug logging for deployment operations | `undefined` | `'true'`, `'false'` | [`src/common/utils/orgUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/orgUtils.ts) | +| **MONITORING_BACKUP_SKIP_METADATA_TYPES** | Metadata types to skip during backup monitoring | `undefined` | Comma-separated metadata type names (e.g., `'Document,Report,Dashboard'`) | Found in changelog and documentation | +| **SFDX_HARDIS_DEBUG_ENV** | Enable debug environment for sfdx-hardis | `undefined` | `'true'`, `'false'` | [`src/hooks/init/log.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/hooks/init/log.ts) | +| **SFDX_HARDIS_MONITORING** | Indicates if running a monitoring job | `undefined` | `'true'`, `'false'` | [`src/common/utils/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/index.ts) | + +### System Configuration + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|--------------------------------|-------------------------------------------|-------------------------------------|-----------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------| +| **CONFIG_BRANCH** | Override for configuration branch name | Current git branch | Any valid git branch name (e.g., `'main'`, `'develop'`, `'config-override'`) | [`src/config/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/config/index.ts) | +| **FORCE_COLOR** | Control color output in terminal commands | `'0'` | `'0'`, `'1'`, `'2'`, `'3'` | [`src/common/utils/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/index.ts) | +| **GIT_FETCH_EXTRA_FLAGS** | Extra flags for git fetch operations | `undefined` | Valid git fetch flags (e.g., `'--depth=1'`, `'--quiet'`, `'--prune'`) | Found in documentation | +| **MERMAID_MODES** | Modes for Mermaid diagram generation | `undefined` | Mermaid mode values (e.g., `'dark'`, `'light'`, `'forest'`) | Found in Dockerfile | +| **NODE_OPTIONS** | Node.js runtime options | Cleared if contains `--inspect-brk` | Valid Node.js options (e.g., `'--max-old-space-size=4096'`, `'--experimental-modules'`) | [`src/common/utils/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/index.ts) | +| **PROJECT_NAME** | Name of the sfdx-hardis project | `undefined` | Any project name string (e.g., `'My Salesforce Project'`, `'CRM-Development'`) | [`src/config/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/config/index.ts) | +| **SFDX_HARDIS_WEBSOCKET_PORT** | Port for sfdx-hardis WebSocket server | `2702` | Valid port numbers (e.g., `2702`) | [`src/common/websocketClient.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/websocketClient.ts) | + +### Bulk API Settings + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|-----------------------------|----------------------------------|---------------|-----------------|------------------------------------------------------------------------------------------------------------------------| +| **BULKAPIV2_POLL_INTERVAL** | Override BulkApiV2 Poll interval | `5000` | Any number | [`src/common/utils/apiUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/apiUtils.ts) | +| **BULKAPIV2_POLL_TIMEOUT** | Override BulkApiV2 Poll Timeout | `60000` | Any number | [`src/common/utils/apiUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/apiUtils.ts) | + +--- + +## Tool-Specific Variables + +These variables integrate sfdx-hardis with external tools and platforms. + +### Azure DevOps + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|--------------------------------------|----------------------------------------------------------|----------------------|-------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **BUILD_BUILD_ID** | Azure build ID for CI/CD pipeline identification | `undefined` | Numeric build IDs (e.g., `'12345'`, `'987654'`) | [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts) | +| **BUILD_REPOSITORY_ID** | Azure repository ID for accessing repository information | `undefined` | Valid Azure repository GUIDs (e.g., `'550e8400-e29b-41d4-a716-446655440000'`) | [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts), [`src/common/ticketProvider/azureBoardsProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/azureBoardsProvider.ts) | +| **BUILD_REPOSITORYNAME** | Azure repository name for building URLs and references | `undefined` | Valid Azure repository names (e.g., `'my-salesforce-project'`, `'CRM-Repository'`) | [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts) | +| **BUILD_SOURCEBRANCHNAME** | Azure source branch name for the current build | `undefined` | Valid git branch names (e.g., `'main'`, `'feature/new-component'`, `'develop'`) | [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts) | +| **CI_SFDX_HARDIS_AZURE_TOKEN** | Custom Azure DevOps token for sfdx-hardis integration | `SYSTEM_ACCESSTOKEN` | Valid Azure DevOps personal access tokens (e.g., `'eyJ0eXAiOiJKV1QiLCJhbGciOiXXXXXXXXXXXX...'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts), [`src/common/ticketProvider/azureBoardsProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/azureBoardsProvider.ts) | +| **SYSTEM_ACCESSTOKEN** | Azure DevOps system access token | `undefined` | Valid Azure DevOps access tokens (e.g., `'eyJ0eXAiOiJKV1QiLCJhbGciOiXXXXXXXXXXXX...'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts), [`src/common/ticketProvider/azureBoardsProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/azureBoardsProvider.ts) | +| **SYSTEM_COLLECTIONURI** | Azure DevOps collection URI | `undefined` | Valid Azure DevOps collection URIs (e.g., `'https://dev.azure.com/myorganization/'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts), [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts), [`src/common/ticketProvider/azureBoardsProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/azureBoardsProvider.ts) | +| **SYSTEM_JOB_DISPLAY_NAME** | Azure DevOps job display name | `undefined` | Valid job display names (e.g., `'Build and Test'`, `'Deploy to Staging'`) | [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts) | +| **SYSTEM_JOB_ID** | Azure DevOps job ID | `undefined` | Valid Azure job IDs (e.g., `'550e8400-e29b-41d4-a716-446655440000'`) | [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts) | +| **SYSTEM_PULLREQUEST_PULLREQUESTID** | Azure DevOps pull request ID | `undefined` | Valid pull request IDs (e.g., `'123'`, `'456'`) | [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts) | +| **SYSTEM_TEAMPROJECT** | Azure DevOps team project name | `undefined` | Valid Azure DevOps project names (e.g., `'MyProject'`, `'Salesforce-Development'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts), [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts), [`src/common/ticketProvider/azureBoardsProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/azureBoardsProvider.ts) | + +### GitLab + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|---------------------------------|-------------------------------------------------|----------------|------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------| +| **CI_JOB_TOKEN** | GitLab CI job token for API authentication | `undefined` | Valid GitLab job tokens (e.g., `'glcbt-64chars_token_XXXXXXXXXXXXXXXXXXXX'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts) | +| **CI_PROJECT_ID** | GitLab project ID | `undefined` | Valid GitLab project IDs (e.g., `'123456'`, `'42'`) | [`src/common/gitProvider/gitlab.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/gitlab.ts) | +| **CI_PROJECT_URL** | GitLab project URL for building branch URLs | `undefined` | Valid GitLab project URLs (e.g., `'https://gitlab.com/myuser/myproject'`) | [`src/common/gitProvider/gitlab.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/gitlab.ts) | +| **CI_SFDX_HARDIS_GITLAB_TOKEN** | Custom GitLab token for sfdx-hardis integration | `ACCESS_TOKEN` | Valid GitLab access tokens (e.g., `'glpat-XXXXXXXXXXXXXXXXXXXX'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts) | + +### GitHub + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|------------------|-------------------------------------|---------------|-------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| +| **GITHUB_TOKEN** | GitHub token for API authentication | `undefined` | Valid GitHub personal access tokens (e.g., `'ghp_XXXXXXXXXXXXXXXXXXXX'`, `'github_pat_XXXXXXXXXXXX'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts) | + +### Bitbucket + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|------------------------------------|-----------------------------------------------------------------|---------------|------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| +| **BITBUCKET_WORKSPACE** | Bitbucket workspace identifier for Bitbucket provider detection | `undefined` | Valid Bitbucket workspace names (e.g., `'mycompany'`, `'my-team-workspace'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts) | +| **CI_SFDX_HARDIS_BITBUCKET_TOKEN** | Custom Bitbucket token for sfdx-hardis integration | `undefined` | Valid Bitbucket app passwords or tokens (e.g., `'ATBBXXXXXXXXXXXXXXXXXXXX'`) | [`src/common/gitProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/index.ts) | + +### JIRA Integration + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|-----------------------|-----------------------------------------------|---------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------| +| **JIRA_EMAIL** | JIRA user email for authentication | `undefined` | Valid email addresses (e.g., `'admin@mycompany.com'`, `'jira-user@example.org'`) | [`src/common/ticketProvider/jiraProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/jiraProvider.ts) | +| **JIRA_HOST** | JIRA server hostname | `"https://define.JIRA_HOST.in.cicd.variables/"` | Valid JIRA server URLs (e.g., `'https://mycompany.atlassian.net'`, `'https://jira.mycompany.com'`) | [`src/common/ticketProvider/jiraProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/jiraProvider.ts) | +| **JIRA_PAT** | JIRA Personal Access Token for authentication | `undefined` | Valid JIRA personal access tokens (e.g., `'ATATTXXXXXXXXXXXXXXXXXXXX'`) | [`src/common/ticketProvider/jiraProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/jiraProvider.ts) | +| **JIRA_TICKET_REGEX** | Regular expression for JIRA ticket references | `"(?<=[^a-zA-Z0-9_-]|^)([A-Za-z0-9]{2,10}-\\d{1,6})(?=[^a-zA-Z0-9_-]|$)"` | Valid regular expressions (e.g., `'PROJ-\\d+'`, `'[A-Z]+-\\d{1,4}'`) | [`src/common/ticketProvider/jiraProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/jiraProvider.ts) | +| **JIRA_TOKEN** | JIRA API token for authentication | `undefined` | Valid JIRA API tokens (e.g., `'ATATT3xFfGF0T4JVXXXXXXXXXXXXXXXXXXXX'`) | [`src/common/ticketProvider/jiraProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/jiraProvider.ts) | + +### Slack Integration + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|--------------------------------------|------------------------------------------------------|---------------|------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **SLACK_CHANNEL_ID** | Slack channel ID for notifications | `undefined` | Valid Slack channel IDs (e.g., `'C1234567890'`, `'C0123456789'`) | [`src/common/notifProvider/slackProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/slackProvider.ts) | +| **SLACK_CHANNEL_ID_ERRORS_WARNINGS** | Slack channel ID for error and warning notifications | `undefined` | Valid Slack channel IDs (e.g., `'C1234567890'`, `'C9876543210'`) | [`src/common/notifProvider/slackProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/slackProvider.ts) | +| **SLACK_CHANNEL_ID_{BRANCH}** | Branch-specific Slack channel ID | `undefined` | Valid Slack channel IDs (e.g., `'C1234567890'` for main branch) | [`src/common/notifProvider/slackProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/slackProvider.ts) | +| **SLACK_TOKEN** | Slack API token for notifications | `undefined` | Valid Slack bot tokens (e.g., `'xoxb-XXXXXXXXXXXXXXXXXXXXXXXX'`) | [`src/common/notifProvider/slackProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/slackProvider.ts), [`src/common/notifProvider/utils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/utils.ts) | + +### AI Provider (OpenAI) + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|----------------------------|----------------------------------------------|---------------------------|---------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------| +| **AI_MAX_TIMEOUT_MINUTES** | Maximum timeout in minutes for AI operations | `30` (in CI), `0` (local) | Positive integers (e.g., `30`, `60`) | [`src/common/aiProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/aiProvider/index.ts) | +| **OPENAI_API_KEY** | OpenAI API key for AI operations | `undefined` | Valid OpenAI API keys (e.g., `'sk-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'`) | [`src/common/aiProvider/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/aiProvider/index.ts) | + +### Email Notifications + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|----------------------------------|------------------------------------------------------------|---------------|--------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **MS_TEAMS_WEBHOOK_URL** | Microsoft Teams webhook URL for notifications (deprecated) | `undefined` | Valid MS Teams webhook URLs (e.g., `'https://outlook.office.com/webhook/XXXXXXXXXXXX...'`) | [`src/common/notifProvider/utils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/utils.ts) | +| **NOTIF_EMAIL_ADDRESS** | Email address for notifications | `undefined` | Valid email addresses (e.g., `'notifications@mycompany.com'`, `'alerts@example.org'`) | [`src/common/notifProvider/emailProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/emailProvider.ts), [`src/common/notifProvider/utils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/utils.ts) | +| **NOTIF_EMAIL_ADDRESS_{BRANCH}** | Branch-specific email address for notifications | `undefined` | Valid email addresses (e.g., `'prod-alerts@mycompany.com'` for main branch) | [`src/common/notifProvider/emailProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/notifProvider/emailProvider.ts) | + +### Browser Automation + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|-------------------------------|--------------------------------------------------|---------------|-----------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------| +| **CHROMIUM_PATH** | Path to Chromium executable for Puppeteer | `undefined` | Valid file system paths (e.g., `'/usr/bin/chromium'`, `'C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe'`) | Found in deployment documentation | +| **PUPPETEER_EXECUTABLE_PATH** | Path to Chrome/Chromium executable for Puppeteer | Auto-detected | Valid file system paths (e.g., `'/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'`) | [`src/common/utils/orgConfigUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/orgConfigUtils.ts) | + +### Generic Ticketing + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|--------------------------------------------|---------------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------| +| **GENERIC_TICKETING_PROVIDER_REGEX** | Regular expression for generic ticketing provider ticket references | `undefined` | Valid regular expressions (e.g., `'TICKET-\\d+'`, `'[A-Z]{2,5}-\\d{1,6}'`, `'#\\d+'`) | [`src/common/ticketProvider/genericProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/genericProvider.ts) | +| **GENERIC_TICKETING_PROVIDER_URL_BUILDER** | URL template for generic ticketing provider | `undefined` | Valid URL templates with placeholders (e.g., `'https://tickets.mycompany.com/{ticketId}'`, `'https://helpdesk.example.com/ticket/{ticketId}'`) | [`src/common/ticketProvider/genericProvider.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/ticketProvider/genericProvider.ts) | + +### Generic CI/CD + +| Variable Name | Description | Default Value | Possible Values | Usage Location | +|------------------------|--------------------------------------------------------------|--------------------|------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **CI** | Indicates if running in a Continuous Integration environment | `undefined` | `'true'`, `'false'` | [`src/common/utils/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/index.ts) | +| **CI_COMMIT_REF_NAME** | Current git branch name in CI environment | Current git branch | Valid git branch names (e.g., `'main'`, `'develop'`, `'feature/new-feature'`, `'hotfix/urgent-fix'`) | [`src/common/utils/index.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/index.ts), [`src/common/utils/filesUtils.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/filesUtils.ts), [`src/common/gitProvider/gitlab.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/gitlab.ts), [`src/common/gitProvider/azureDevops.ts`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/gitProvider/azureDevops.ts) | + +--- + +## Summary + +This documentation covers **64 environment variables** used throughout sfdx-hardis: + +- **Custom sfdx-hardis Variables**: 26 variables controlling native behavior +- **Tool-Specific Variables**: 38 variables for external integrations + +The variables are organized by functionality to help developers and administrators understand their purpose and configure them appropriately for their environments. diff --git a/docs/articles-videos.md b/docs/articles-videos.md new file mode 100644 index 000000000..4f1f572e5 --- /dev/null +++ b/docs/articles-videos.md @@ -0,0 +1,124 @@ +### Web Articles + +Here are some articles about [sfdx-hardis](https://sfdx-hardis.cloudity.com/) + +- English + +[![Conga Deployment Cheat Sheet](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-conga-banner.jpg)](https://nicolas.vuillamy.fr/how-to-deploy-conga-composer-configuration-using-salesforce-cli-plugins-c2899641f36b) +[![Questions/Answers](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-questions-answers.jpg)](https://nicolas.vuillamy.fr/what-devops-experts-want-to-know-about-salesforce-ci-cd-with-sfdx-hardis-q-a-1f412db34476) +[![Salesforce Developers Podcast](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-sfdev.jpg)](https://developer.salesforce.com/podcast/2023/06/sfdx) +[![sfdx-hardis: A release management tool for open-source](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-cicd-salesforcedevopsnet.jpg)](https://salesforcedevops.net/index.php/2023/03/01/sfdx-hardis-open-source-salesforce-release-management/) +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) +[![Handle Salesforce API versions Deprecation like a pro](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deprecated-api.jpg)](https://nicolas.vuillamy.fr/handle-salesforce-api-versions-deprecation-like-a-pro-335065f52238) +[![How to mass download notes and attachments files from a Salesforce org](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-mass-download.jpg)](https://nicolas.vuillamy.fr/how-to-mass-download-notes-and-attachments-files-from-a-salesforce-org-83a028824afd) +[![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://medium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3) +[![How to detect bad words in Salesforce records using SFDX Data Loader and sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-badwords.jpg)](https://nicolas.vuillamy.fr/how-to-detect-bad-words-in-salesforce-records-using-sfdx-data-loader-and-sfdx-hardis-171db40a9bac) +[![Reactivate all the sandbox users with .invalid emails in 3 clicks](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-invalid-email.jpg)](https://nicolas.vuillamy.fr/reactivate-all-the-sandbox-users-with-invalid-emails-in-3-clicks-2265af4e3a3d) +[![Invalid scope:Mine, not allowed ? Deploy your ListViews anyway !](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-invalid-scope-mine.jpg)](https://nicolas.vuillamy.fr/invalid-scope-mine-not-allowed-deploy-your-listviews-anyway-443aceca8ac7) + +- French + - [Versions d'API Salesforce décommissionnées: Que faire ?](https://leblog.hardis-group.com/portfolio/versions-dapi-salesforce-decommissionnees-que-faire/) + - [Exporter en masse les fichiers d’une org Salesforce](https://leblog.hardis-group.com/portfolio/exporter-en-masse-les-fichiers-dune-org-salesforce/) + - [Suspendre l’accès aux utilisateurs lors d’une mise en production Salesforce](https://leblog.hardis-group.com/portfolio/suspendre-lacces-aux-utilisateurs-lors-dune-mise-en-production-salesforce/) + +### Recorded Conferences + +#### Dreamforce Sessions + +- Dreamforce 2024 - Save the Day by Monitoring Your Org with Open-Source Tools (with Olga Shirikova) + +[![Dreamforce 2024: Save the Day by Monitoring Your Org with Open-Source Tools](https://img.youtube.com/vi/NxiLiYeo11A/0.jpg)](https://www.youtube.com/watch?v=NxiLiYeo11A){target=blank} + +- Dreamforce 2023 - Easy Salesforce CI/CD with open-source and clicks only thanks to sfdx-hardis! (with Jean-Pierre Rizzi) + +[![Dreamforce 2023: Easy Salesforce CI/CD with open-source](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs){target=blank} + +#### Community Events + +- Wir Sind Ohana 2024 - Automate the Monitoring of your Salesforce orgs with open-source tools only! (with Yosra Saidani) + +[![Wir Sind Ohana 2024: Automate Monitoring with Open-Source](https://img.youtube.com/vi/xGbT6at7RZ0/0.jpg)](https://www.youtube.com/watch?v=xGbT6at7RZ0){target=blank} + +### Podcasts + +- Apex Hours 2025 - Org monitoring with Grafana + AI generated doc + +[![Apex Hours 2025: Org monitoring with Grafana + AI generated doc](https://img.youtube.com/vi/oDaCh66pRcI/0.jpg)](https://www.youtube.com/watch?v=oDaCh66pRcI){target=blank} + +- Salesforce Way Podcast #102 - Sfdx-hardis with Nicolas Vuillamy + +[![Salesforce Way Podcast: Sfdx-hardis](https://img.youtube.com/vi/sfdx-hardis/0.jpg)](https://salesforceway.com/podcast/sfdx-hardis/){target=blank} + +- Salesforce Developers Podcast Episode 182: SFDX-Hardis with Nicolas Vuillamy + +[![Salesforce Developers Podcast](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-sfdev.jpg)](https://developer.salesforce.com/podcast/2023/06/sfdx){target=blank} + +### sfdx-hardis Usage + +#### Features Overview + +- sfdx-hardis 2025 new features overview + +[![sfdx-hardis 2025 new features](https://img.youtube.com/vi/JRKH5COUVQ0/0.jpg)](https://youtu.be/JRKH5COUVQ0){target=blank} + +- SFDX-HARDIS – A demo with Nicolas Vuillamy from Cloudity + +[![SalesforceDevOps.net Demo](https://img.youtube.com/vi/qP6MaZUGzik/0.jpg)](https://www.youtube.com/watch?v=qP6MaZUGzik){target=blank} + +#### Installation & Setup + +- Complete installation tutorial for sfdx-hardis - [📖 Documentation](https://sfdx-hardis.cloudity.com/installation/) + +[![Installation Tutorial](https://img.youtube.com/vi/LA8m-t7CjHA/0.jpg)](https://www.youtube.com/watch?v=LA8m-t7CjHA){target=blank} + +#### CI/CD Workflows + +- Complete CI/CD workflow for Salesforce projects - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) + +[![Dreamforce demo video: Easy Salesforce CI/CD with sfdx-hardis and open-source only !](https://img.youtube.com/vi/zEYqTd2txU4/0.jpg)](https://www.youtube.com/watch?v=zEYqTd2txU4){target=blank} + +- How to start a new User Story in sandbox - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-create-new-task/) + +[![Create New User Story](https://img.youtube.com/vi/WOqssZwjPhw/0.jpg)](https://www.youtube.com/watch?v=WOqssZwjPhw){target=blank} + +- How to commit updates and create merge requests - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/) + +[![Publish User Story Tutorial](https://img.youtube.com/vi/Ik6whtflmfY/0.jpg)](https://www.youtube.com/watch?v=Ik6whtflmfY){target=blank} + +- How to resolve git merge conflicts in Visual Studio Code - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-validate-merge-request/) + +[![Merge Conflicts Resolution](https://img.youtube.com/vi/lz5OuKzvadQ/0.jpg)](https://www.youtube.com/watch?v=lz5OuKzvadQ){target=blank} + +- How to install packages in your org - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-work-on-task-install-packages/) + +[![Install Packages Tutorial](https://img.youtube.com/vi/5-MgqoSLUls/0.jpg)](https://www.youtube.com/watch?v=5-MgqoSLUls){target=blank} + +- Configure CI server authentication to Salesforce orgs - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/) + +[![Configure CI Authentication](https://img.youtube.com/vi/OzREUu5utVI/0.jpg)](https://www.youtube.com/watch?v=OzREUu5utVI){target=blank} + +#### Monitoring + +- How to configure monitoring for your Salesforce org - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-monitoring-config-home/) + +[![Org Monitoring Setup](https://img.youtube.com/vi/bcVdN0XItSc/0.jpg)](https://www.youtube.com/watch?v=bcVdN0XItSc){target=blank} + +#### Integrations + +- Configure Slack integration for deployment notifications - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-slack/) + +[![Slack Integration](https://img.youtube.com/vi/se292ABGUmI/0.jpg)](https://www.youtube.com/watch?v=se292ABGUmI){target=blank} + +- How to create a Personal Access Token in GitLab - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-clone-repository/) + +[![GitLab Personal Access Token](https://img.youtube.com/vi/9y5VmmYHuIg/0.jpg)](https://www.youtube.com/watch?v=9y5VmmYHuIg){target=blank} + +#### Documentation + +- How to generate AI-enhanced Salesforce project documentation - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-project-doc-generate/) + +[![Generate Project Documentation](https://img.youtube.com/vi/ZrVPN3jp1Ac/0.jpg)](https://www.youtube.com/watch?v=ZrVPN3jp1Ac){target=blank} + +- Host your documentation on Cloudflare free tier - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-project-doc-cloudflare/) + +[![Cloudflare Doc Hosting Setup](https://img.youtube.com/vi/AUipbKjgsDI/0.jpg)](https://www.youtube.com/watch?v=AUipbKjgsDI){target=blank} diff --git a/docs/assets/images/DevOpsPipelineUI.png b/docs/assets/images/DevOpsPipelineUI.png new file mode 100644 index 000000000..ab49cec62 Binary files /dev/null and b/docs/assets/images/DevOpsPipelineUI.png differ diff --git a/docs/assets/images/ProductivityCommands.png b/docs/assets/images/ProductivityCommands.png new file mode 100644 index 000000000..1b2d52802 Binary files /dev/null and b/docs/assets/images/ProductivityCommands.png differ diff --git a/docs/assets/images/animation-install-packages.gif b/docs/assets/images/animation-install-packages.gif new file mode 100644 index 000000000..1db74347a Binary files /dev/null and b/docs/assets/images/animation-install-packages.gif differ diff --git a/docs/assets/images/az-tech-work-item.png b/docs/assets/images/az-tech-work-item.png new file mode 100644 index 000000000..05d7d98bb Binary files /dev/null and b/docs/assets/images/az-tech-work-item.png differ diff --git a/docs/assets/images/build-run-branches.png b/docs/assets/images/build-run-branches.png index 673f883d4..7cd4666b7 100644 Binary files a/docs/assets/images/build-run-branches.png and b/docs/assets/images/build-run-branches.png differ diff --git a/docs/assets/images/change-sets-vs-cicd.png b/docs/assets/images/change-sets-vs-cicd.png index ba8cb222f..c1f0a155f 100644 Binary files a/docs/assets/images/change-sets-vs-cicd.png and b/docs/assets/images/change-sets-vs-cicd.png differ diff --git a/docs/assets/images/ci-cd-schema-main.jpg b/docs/assets/images/ci-cd-schema-main.jpg index 6eb5437ca..14658b90f 100644 Binary files a/docs/assets/images/ci-cd-schema-main.jpg and b/docs/assets/images/ci-cd-schema-main.jpg differ diff --git a/docs/assets/images/cloudity-map.jpg b/docs/assets/images/cloudity-map.jpg new file mode 100644 index 000000000..44bb7e96a Binary files /dev/null and b/docs/assets/images/cloudity-map.jpg differ diff --git a/docs/assets/images/detect-inactive-metadata.gif b/docs/assets/images/detect-inactive-metadata.gif new file mode 100644 index 000000000..1065094c3 Binary files /dev/null and b/docs/assets/images/detect-inactive-metadata.gif differ diff --git a/docs/assets/images/doc-fieldusage.png b/docs/assets/images/doc-fieldusage.png new file mode 100644 index 000000000..8b3c1b2c5 Binary files /dev/null and b/docs/assets/images/doc-fieldusage.png differ diff --git a/docs/assets/images/extension-demo.gif b/docs/assets/images/extension-demo.gif index 76a4f7e96..a4bc9de4d 100644 Binary files a/docs/assets/images/extension-demo.gif and b/docs/assets/images/extension-demo.gif differ diff --git a/docs/assets/images/flow-visual-git-diff-2.jpg b/docs/assets/images/flow-visual-git-diff-2.jpg new file mode 100644 index 000000000..d2ea3836c Binary files /dev/null and b/docs/assets/images/flow-visual-git-diff-2.jpg differ diff --git a/docs/assets/images/flow-visual-git-diff.jpg b/docs/assets/images/flow-visual-git-diff.jpg new file mode 100644 index 000000000..452695afc Binary files /dev/null and b/docs/assets/images/flow-visual-git-diff.jpg differ diff --git a/docs/assets/images/foundations.png b/docs/assets/images/foundations.png new file mode 100644 index 000000000..76e419321 Binary files /dev/null and b/docs/assets/images/foundations.png differ diff --git a/docs/assets/images/github-pull-request-1.png b/docs/assets/images/github-pull-request-1.png new file mode 100644 index 000000000..fb81959f2 Binary files /dev/null and b/docs/assets/images/github-pull-request-1.png differ diff --git a/docs/assets/images/github-pull-request-2.png b/docs/assets/images/github-pull-request-2.png new file mode 100644 index 000000000..868317b89 Binary files /dev/null and b/docs/assets/images/github-pull-request-2.png differ diff --git a/docs/assets/images/github-pull-request-create.png b/docs/assets/images/github-pull-request-create.png new file mode 100644 index 000000000..9b631d188 Binary files /dev/null and b/docs/assets/images/github-pull-request-create.png differ diff --git a/docs/assets/images/grafana-screenshot-1.jpg b/docs/assets/images/grafana-screenshot-1.jpg new file mode 100644 index 000000000..ec7866955 Binary files /dev/null and b/docs/assets/images/grafana-screenshot-1.jpg differ diff --git a/docs/assets/images/grafana-screenshot.jpg b/docs/assets/images/grafana-screenshot.jpg new file mode 100644 index 000000000..f7ce35827 Binary files /dev/null and b/docs/assets/images/grafana-screenshot.jpg differ diff --git a/docs/assets/images/install-dependencies-highlight.png b/docs/assets/images/install-dependencies-highlight.png new file mode 100644 index 000000000..83534b8a7 Binary files /dev/null and b/docs/assets/images/install-dependencies-highlight.png differ diff --git a/docs/assets/images/install-dependencies-screenshot.png b/docs/assets/images/install-dependencies-screenshot.png new file mode 100644 index 000000000..eaa4a3c08 Binary files /dev/null and b/docs/assets/images/install-dependencies-screenshot.png differ diff --git a/docs/assets/images/multi-org-query-demo.gif b/docs/assets/images/multi-org-query-demo.gif new file mode 100644 index 000000000..fe98fca90 Binary files /dev/null and b/docs/assets/images/multi-org-query-demo.gif differ diff --git a/docs/assets/images/play-dreamforce-session.png b/docs/assets/images/play-dreamforce-session.png new file mode 100644 index 000000000..2793c08d7 Binary files /dev/null and b/docs/assets/images/play-dreamforce-session.png differ diff --git a/docs/assets/images/play-install-tuto.png b/docs/assets/images/play-install-tuto.png new file mode 100644 index 000000000..888da241f Binary files /dev/null and b/docs/assets/images/play-install-tuto.png differ diff --git a/docs/assets/images/project-documentation.gif b/docs/assets/images/project-documentation.gif new file mode 100644 index 000000000..6f7686b60 Binary files /dev/null and b/docs/assets/images/project-documentation.gif differ diff --git a/docs/assets/images/screenshot-agentforce-config-1.jpg b/docs/assets/images/screenshot-agentforce-config-1.jpg new file mode 100644 index 000000000..9429b6148 Binary files /dev/null and b/docs/assets/images/screenshot-agentforce-config-1.jpg differ diff --git a/docs/assets/images/screenshot-agentforce-config-2.jpg b/docs/assets/images/screenshot-agentforce-config-2.jpg new file mode 100644 index 000000000..236a4b3e2 Binary files /dev/null and b/docs/assets/images/screenshot-agentforce-config-2.jpg differ diff --git a/docs/assets/images/screenshot-cloudflare-doc.jpg b/docs/assets/images/screenshot-cloudflare-doc.jpg new file mode 100644 index 000000000..8988c3d62 Binary files /dev/null and b/docs/assets/images/screenshot-cloudflare-doc.jpg differ diff --git a/docs/assets/images/screenshot-doc-apex.png b/docs/assets/images/screenshot-doc-apex.png new file mode 100644 index 000000000..45c313795 Binary files /dev/null and b/docs/assets/images/screenshot-doc-apex.png differ diff --git a/docs/assets/images/screenshot-doc-branches-strategy.jpg b/docs/assets/images/screenshot-doc-branches-strategy.jpg new file mode 100644 index 000000000..207306e34 Binary files /dev/null and b/docs/assets/images/screenshot-doc-branches-strategy.jpg differ diff --git a/docs/assets/images/screenshot-email-config.jpg b/docs/assets/images/screenshot-email-config.jpg new file mode 100644 index 000000000..fca67613c Binary files /dev/null and b/docs/assets/images/screenshot-email-config.jpg differ diff --git a/docs/assets/images/screenshot-flow-doc.jpg b/docs/assets/images/screenshot-flow-doc.jpg new file mode 100644 index 000000000..48eb27310 Binary files /dev/null and b/docs/assets/images/screenshot-flow-doc.jpg differ diff --git a/docs/assets/images/screenshot-html-doc.jpg b/docs/assets/images/screenshot-html-doc.jpg new file mode 100644 index 000000000..309fe050a Binary files /dev/null and b/docs/assets/images/screenshot-html-doc.jpg differ diff --git a/docs/assets/images/screenshot-monitoring-audittrail-excel.jpg b/docs/assets/images/screenshot-monitoring-audittrail-excel.jpg new file mode 100644 index 000000000..285632290 Binary files /dev/null and b/docs/assets/images/screenshot-monitoring-audittrail-excel.jpg differ diff --git a/docs/assets/images/screenshot-monitoring-audittrail-local.jpg b/docs/assets/images/screenshot-monitoring-audittrail-local.jpg new file mode 100644 index 000000000..59f9f29b4 Binary files /dev/null and b/docs/assets/images/screenshot-monitoring-audittrail-local.jpg differ diff --git a/docs/assets/images/screenshot-monitoring-audittrail.jpg b/docs/assets/images/screenshot-monitoring-audittrail.jpg index f58af4350..e163a9cbe 100644 Binary files a/docs/assets/images/screenshot-monitoring-audittrail.jpg and b/docs/assets/images/screenshot-monitoring-audittrail.jpg differ diff --git a/docs/assets/images/screenshot-monitoring-release-updates-grafana.jpg b/docs/assets/images/screenshot-monitoring-release-updates-grafana.jpg new file mode 100644 index 000000000..8f8fbf017 Binary files /dev/null and b/docs/assets/images/screenshot-monitoring-release-updates-grafana.jpg differ diff --git a/docs/assets/images/screenshot-monitoring-release-updates.jpg b/docs/assets/images/screenshot-monitoring-release-updates.jpg new file mode 100644 index 000000000..f04ac1c9a Binary files /dev/null and b/docs/assets/images/screenshot-monitoring-release-updates.jpg differ diff --git a/docs/assets/images/screenshot-monitoring-unused-apex-grafana.jpg b/docs/assets/images/screenshot-monitoring-unused-apex-grafana.jpg new file mode 100644 index 000000000..edc5a23c1 Binary files /dev/null and b/docs/assets/images/screenshot-monitoring-unused-apex-grafana.jpg differ diff --git a/docs/assets/images/screenshot-monitoring-unused-apex.jpg b/docs/assets/images/screenshot-monitoring-unused-apex.jpg new file mode 100644 index 000000000..8d730e9b1 Binary files /dev/null and b/docs/assets/images/screenshot-monitoring-unused-apex.jpg differ diff --git a/docs/assets/images/screenshot-object-diagram.jpg b/docs/assets/images/screenshot-object-diagram.jpg new file mode 100644 index 000000000..76a9a1ef6 Binary files /dev/null and b/docs/assets/images/screenshot-object-diagram.jpg differ diff --git a/docs/assets/images/screenshot-project-doc-2.jpg b/docs/assets/images/screenshot-project-doc-2.jpg new file mode 100644 index 000000000..02230219b Binary files /dev/null and b/docs/assets/images/screenshot-project-doc-2.jpg differ diff --git a/docs/assets/images/screenshot-project-doc-profile.gif b/docs/assets/images/screenshot-project-doc-profile.gif new file mode 100644 index 000000000..f1e49c427 Binary files /dev/null and b/docs/assets/images/screenshot-project-doc-profile.gif differ diff --git a/docs/assets/images/screenshot-project-doc.jpg b/docs/assets/images/screenshot-project-doc.jpg new file mode 100644 index 000000000..1c81a53c9 Binary files /dev/null and b/docs/assets/images/screenshot-project-doc.jpg differ diff --git a/docs/assets/images/screenshot-security-artifacts-1.jpg b/docs/assets/images/screenshot-security-artifacts-1.jpg new file mode 100644 index 000000000..51843ee9a Binary files /dev/null and b/docs/assets/images/screenshot-security-artifacts-1.jpg differ diff --git a/docs/assets/images/screenshot-teams-email-1.jpg b/docs/assets/images/screenshot-teams-email-1.jpg new file mode 100644 index 000000000..04b06fc79 Binary files /dev/null and b/docs/assets/images/screenshot-teams-email-1.jpg differ diff --git a/docs/assets/images/screenshot-teams-email-2.jpg b/docs/assets/images/screenshot-teams-email-2.jpg new file mode 100644 index 000000000..1e9e60c5d Binary files /dev/null and b/docs/assets/images/screenshot-teams-email-2.jpg differ diff --git a/docs/assets/images/sfdx-hardis-banner.png b/docs/assets/images/sfdx-hardis-banner.png index fa77cb965..a0cd9e4c9 100644 Binary files a/docs/assets/images/sfdx-hardis-banner.png and b/docs/assets/images/sfdx-hardis-banner.png differ diff --git a/docs/cloudity-consulting-partner.md b/docs/cloudity-consulting-partner.md new file mode 100644 index 000000000..b25bf97b1 --- /dev/null +++ b/docs/cloudity-consulting-partner.md @@ -0,0 +1,26 @@ +--- +title: Work with Cloudity +description: See how Cloudity can help you as a Salesforce Consulting Partner +--- + +## Your Expert Salesforce Partner + +
+ description +
+ +___ + +At Cloudity, we love **sharing our technical expertise**, such as sfdx-hardis as Open-Source, but that's **just a small part of everything we do** ! + +We provide **end-to-end digitalization services for companies**, combining **technical skills** and **business expertise** to deliver **solutions that set the bar high for quality**. With the backing of our talented people, we’re aiming to grow our influence and spread our culture based on trust, ambition and empowerment. + +By end-2025, our goal is to grow our workforce to more than 500 employees, to sustain profitability and to make Cloudity a go-to market player for top companies and for our partners, Salesforce and Meta. + +With experts from Hardis Group, Carrenet and Cloudity having joined forces, we now have more than 350 employees in four countries, serving customers across 30 countries (Europe + US). + +Want to know more ? Contact us on our [**WebSite**](https://cloudity.com/) ! + +
+ description +
diff --git a/docs/commands.md b/docs/commands.md index db15d60eb..7e9bacbae 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -4,155 +4,211 @@ | Command | Title | |:----------------------------------------------|:------| -| [**hardis:auth:login**](hardis/auth/login.md) | Login | +| [**hardis:auth:login**](hardis/auth/login.md) | | ## hardis:cache -| Command | Title | -|:------------------------------------------------|:------------------------| -| [**hardis:cache:clear**](hardis/cache/clear.md) | Clear sfdx-hardis cache | +| Command | Title | +|:------------------------------------------------|:------| +| [**hardis:cache:clear**](hardis/cache/clear.md) | | ## hardis:config -| Command | Title | -|:----------------------------------------------|:-------------------------------| -| [**hardis:config:get**](hardis/config/get.md) | Deploy metadata sources to org | +| Command | Title | +|:----------------------------------------------|:------| +| [**hardis:config:get**](hardis/config/get.md) | | + +## hardis:deploy + +| Command | Title | +|:--------------------------------------------------------|:------| +| [**hardis:deploy:quick**](hardis/deploy/quick.md) | | +| [**hardis:deploy:start**](hardis/deploy/start.md) | | +| [**hardis:deploy:validate**](hardis/deploy/validate.md) | | ## hardis:doc -| Command | Title | -|:----------------------------------------------------------------------------|:-----------------------------------| -| [**hardis:doc:extract:permsetgroups**](hardis/doc/extract/permsetgroups.md) | Generate project documentation | -| [**hardis:doc:plugin:generate**](hardis/doc/plugin/generate.md) | Generate SFDX Plugin Documentation | +| Command | Title | +|:----------------------------------------------------------------------------|:------| +| [**hardis:doc:extract:permsetgroups**](hardis/doc/extract/permsetgroups.md) | | +| [**hardis:doc:fieldusage**](hardis/doc/fieldusage.md) | | +| [**hardis:doc:flow2markdown**](hardis/doc/flow2markdown.md) | | +| [**hardis:doc:mkdocs-to-cf**](hardis/doc/mkdocs-to-cf.md) | | +| [**hardis:doc:mkdocs-to-salesforce**](hardis/doc/mkdocs-to-salesforce.md) | | +| [**hardis:doc:override-prompts**](hardis/doc/override-prompts.md) | | +| [**hardis:doc:packagexml2markdown**](hardis/doc/packagexml2markdown.md) | | +| [**hardis:doc:plugin:generate**](hardis/doc/plugin/generate.md) | | +| [**hardis:doc:project2markdown**](hardis/doc/project2markdown.md) | | + +## hardis:git + +| Command | Title | +|:----------------------------------------------------------------------------|:------| +| [**hardis:git:pull-requests:extract**](hardis/git/pull-requests/extract.md) | | ## hardis:lint -| Command | Title | -|:----------------------------------------------------------------------|:-------------------------------------------| -| [**hardis:lint:access**](hardis/lint/access.md) | check permission access | -| [**hardis:lint:metadatastatus**](hardis/lint/metadatastatus.md) | check inactive metadatas | -| [**hardis:lint:missingattributes**](hardis/lint/missingattributes.md) | check missing description on custom fields | -| [**hardis:lint:unusedmetadatas**](hardis/lint/unusedmetadatas.md) | check unused labels and custom permissions | +| Command | Title | +|:----------------------------------------------------------------------|:------| +| [**hardis:lint:access**](hardis/lint/access.md) | | +| [**hardis:lint:metadatastatus**](hardis/lint/metadatastatus.md) | | +| [**hardis:lint:missingattributes**](hardis/lint/missingattributes.md) | | +| [**hardis:lint:unusedmetadatas**](hardis/lint/unusedmetadatas.md) | | ## hardis:mdapi -| Command | Title | -|:--------------------------------------------------|:-----------------------------------------------------------------------------------------------| -| [**hardis:mdapi:deploy**](hardis/mdapi/deploy.md) | sfdx-hardis wrapper for sfdx force:mdapi:deploy that displays tips to solve deployment errors. | +| Command | Title | +|:--------------------------------------------------|:------| +| [**hardis:mdapi:deploy**](hardis/mdapi/deploy.md) | | ## hardis:misc -| Command | Title | -|:----------------------------------------------------|:------------| -| [**hardis:misc:toml2csv**](hardis/misc/toml2csv.md) | TOML to CSV | +| Command | Title | +|:--------------------------------------------------------------------------------------|:------| +| [**hardis:misc:custom-label-translations**](hardis/misc/custom-label-translations.md) | | +| [**hardis:misc:purge-references**](hardis/misc/purge-references.md) | | +| [**hardis:misc:servicenow-report**](hardis/misc/servicenow-report.md) | | +| [**hardis:misc:toml2csv**](hardis/misc/toml2csv.md) | | ## hardis:org -| Command | Title | -|:--------------------------------------------------------------------------------------|:------------------------------------------------------| -| [**hardis:org:configure:data**](hardis/org/configure/data.md) | Configure Data project | -| [**hardis:org:configure:files**](hardis/org/configure/files.md) | Configure File export project | -| [**hardis:org:configure:monitoring**](hardis/org/configure/monitoring.md) | Configure org monitoring | -| [**hardis:org:connect**](hardis/org/connect.md) | Connect to an org | -| [**hardis:org:create**](hardis/org/create.md) | Create sandbox org | -| [**hardis:org:data:delete**](hardis/org/data/delete.md) | Delete data | -| [**hardis:org:data:export**](hardis/org/data/export.md) | Export data | -| [**hardis:org:data:import**](hardis/org/data/import.md) | Import data | -| [**hardis:org:diagnose:audittrail**](hardis/org/diagnose/audittrail.md) | Diagnose content of Setup Audit Trail | -| [**hardis:org:diagnose:legacyapi**](hardis/org/diagnose/legacyapi.md) | Check for legacy API use | -| [**hardis:org:diagnose:licenses**](hardis/org/diagnose/licenses.md) | List licenses subscribed and used in a Salesforce org | -| [**hardis:org:diagnose:unusedlicenses**](hardis/org/diagnose/unusedlicenses.md) | Detect unused Permission Set Licenses | -| [**hardis:org:diagnose:unusedusers**](hardis/org/diagnose/unusedusers.md) | Detect unused Users in Salesforce | -| [**hardis:org:files:export**](hardis/org/files/export.md) | Export files | -| [**hardis:org:fix:listviewmine**](hardis/org/fix/listviewmine.md) | Fix listviews with | -| [**hardis:org:generate:packagexmlfull**](hardis/org/generate/packagexmlfull.md) | Generate Full Org package.xml | -| [**hardis:org:monitor:all**](hardis/org/monitor/all.md) | Monitor org | -| [**hardis:org:monitor:backup**](hardis/org/monitor/backup.md) | Backup DX sources | -| [**hardis:org:monitor:limits**](hardis/org/monitor/limits.md) | Check org limits | -| [**hardis:org:purge:apexlog**](hardis/org/purge/apexlog.md) | Purge Apex Logs | -| [**hardis:org:purge:flow**](hardis/org/purge/flow.md) | Purge Flow versions | -| [**hardis:org:retrieve:packageconfig**](hardis/org/retrieve/packageconfig.md) | Retrieve package configuration from an org | -| [**hardis:org:retrieve:sources:analytics**](hardis/org/retrieve/sources/analytics.md) | Retrieve CRM Analytics configuration from an org | -| [**hardis:org:retrieve:sources:dx**](hardis/org/retrieve/sources/dx.md) | Retrieve sfdx sources from org | -| [**hardis:org:retrieve:sources:dx2**](hardis/org/retrieve/sources/dx2.md) | Retrieve sfdx sources from org (2) | -| [**hardis:org:retrieve:sources:metadata**](hardis/org/retrieve/sources/metadata.md) | Retrieve sfdx sources from org | -| [**hardis:org:retrieve:sources:retrofit**](hardis/org/retrieve/sources/retrofit.md) | Retrofit changes from an org | -| [**hardis:org:select**](hardis/org/select.md) | Select org | -| [**hardis:org:test:apex**](hardis/org/test/apex.md) | Run apex tests | -| [**hardis:org:user:activateinvalid**](hardis/org/user/activateinvalid.md) | Reactivate sandbox invalid users | -| [**hardis:org:user:freeze**](hardis/org/user/freeze.md) | Freeze user logins | -| [**hardis:org:user:unfreeze**](hardis/org/user/unfreeze.md) | Unfreeze user logins | +| Command | Title | +|:----------------------------------------------------------------------------------------------|:------| +| [**hardis:org:community:update**](hardis/org/community/update.md) | | +| [**hardis:org:configure:data**](hardis/org/configure/data.md) | | +| [**hardis:org:configure:files**](hardis/org/configure/files.md) | | +| [**hardis:org:configure:monitoring**](hardis/org/configure/monitoring.md) | | +| [**hardis:org:connect**](hardis/org/connect.md) | | +| [**hardis:org:create**](hardis/org/create.md) | | +| [**hardis:org:data:delete**](hardis/org/data/delete.md) | | +| [**hardis:org:data:export**](hardis/org/data/export.md) | | +| [**hardis:org:data:import**](hardis/org/data/import.md) | | +| [**hardis:org:diagnose:audittrail**](hardis/org/diagnose/audittrail.md) | | +| [**hardis:org:diagnose:instanceupgrade**](hardis/org/diagnose/instanceupgrade.md) | | +| [**hardis:org:diagnose:legacyapi**](hardis/org/diagnose/legacyapi.md) | | +| [**hardis:org:diagnose:licenses**](hardis/org/diagnose/licenses.md) | | +| [**hardis:org:diagnose:releaseupdates**](hardis/org/diagnose/releaseupdates.md) | | +| [**hardis:org:diagnose:unused-apex-classes**](hardis/org/diagnose/unused-apex-classes.md) | | +| [**hardis:org:diagnose:unused-connected-apps**](hardis/org/diagnose/unused-connected-apps.md) | | +| [**hardis:org:diagnose:unusedlicenses**](hardis/org/diagnose/unusedlicenses.md) | | +| [**hardis:org:diagnose:unusedusers**](hardis/org/diagnose/unusedusers.md) | | +| [**hardis:org:files:export**](hardis/org/files/export.md) | | +| [**hardis:org:files:import**](hardis/org/files/import.md) | | +| [**hardis:org:fix:listviewmine**](hardis/org/fix/listviewmine.md) | | +| [**hardis:org:generate:packagexmlfull**](hardis/org/generate/packagexmlfull.md) | | +| [**hardis:org:monitor:all**](hardis/org/monitor/all.md) | | +| [**hardis:org:monitor:backup**](hardis/org/monitor/backup.md) | | +| [**hardis:org:monitor:limits**](hardis/org/monitor/limits.md) | | +| [**hardis:org:multi-org-query**](hardis/org/multi-org-query.md) | | +| [**hardis:org:purge:apexlog**](hardis/org/purge/apexlog.md) | | +| [**hardis:org:purge:flow**](hardis/org/purge/flow.md) | | +| [**hardis:org:refresh:after-refresh**](hardis/org/refresh/after-refresh.md) | | +| [**hardis:org:refresh:before-refresh**](hardis/org/refresh/before-refresh.md) | | +| [**hardis:org:retrieve:packageconfig**](hardis/org/retrieve/packageconfig.md) | | +| [**hardis:org:retrieve:sources:analytics**](hardis/org/retrieve/sources/analytics.md) | | +| [**hardis:org:retrieve:sources:dx**](hardis/org/retrieve/sources/dx.md) | | +| [**hardis:org:retrieve:sources:dx2**](hardis/org/retrieve/sources/dx2.md) | | +| [**hardis:org:retrieve:sources:metadata**](hardis/org/retrieve/sources/metadata.md) | | +| [**hardis:org:retrieve:sources:retrofit**](hardis/org/retrieve/sources/retrofit.md) | | +| [**hardis:org:select**](hardis/org/select.md) | | +| [**hardis:org:test:apex**](hardis/org/test/apex.md) | | +| [**hardis:org:user:activateinvalid**](hardis/org/user/activateinvalid.md) | | +| [**hardis:org:user:freeze**](hardis/org/user/freeze.md) | | +| [**hardis:org:user:unfreeze**](hardis/org/user/unfreeze.md) | | ## hardis:package -| Command | Title | -|:------------------------------------------------------------------------|:-----------------------------------| -| [**hardis:package:create**](hardis/package/create.md) | Create a new package | -| [**hardis:package:install**](hardis/package/install.md) | Install packages in an org | -| [**hardis:package:mergexml**](hardis/package/mergexml.md) | Merge package.xml files | -| [**hardis:package:version:create**](hardis/package/version/create.md) | Create a new version of a package | -| [**hardis:package:version:list**](hardis/package/version/list.md) | Create a new version of a package | -| [**hardis:package:version:promote**](hardis/package/version/promote.md) | Promote new versions of package(s) | +| Command | Title | +|:------------------------------------------------------------------------|:------| +| [**hardis:package:create**](hardis/package/create.md) | | +| [**hardis:package:install**](hardis/package/install.md) | | +| [**hardis:package:mergexml**](hardis/package/mergexml.md) | | +| [**hardis:package:version:create**](hardis/package/version/create.md) | | +| [**hardis:package:version:list**](hardis/package/version/list.md) | | +| [**hardis:package:version:promote**](hardis/package/version/promote.md) | | + +## hardis:packagexml + +| Command | Title | +|:------------------------------------------------------------|:------| +| [**hardis:packagexml:append**](hardis/packagexml/append.md) | | +| [**hardis:packagexml:remove**](hardis/packagexml/remove.md) | | ## hardis:project -| Command | Title | -|:----------------------------------------------------------------------------------------------|:----------------------------------------------------------------| -| [**hardis:project:audit:apiversion**](hardis/project/audit/apiversion.md) | Audit Metadatas API Version | -| [**hardis:project:audit:callincallout**](hardis/project/audit/callincallout.md) | Audit CallIns and CallOuts | -| [**hardis:project:audit:duplicatefiles**](hardis/project/audit/duplicatefiles.md) | Find duplicate sfdx files | -| [**hardis:project:audit:remotesites**](hardis/project/audit/remotesites.md) | Audit Remote Sites | -| [**hardis:project:clean:emptyitems**](hardis/project/clean/emptyitems.md) | Clean retrieved empty items in dx sources | -| [**hardis:project:clean:flowpositions**](hardis/project/clean/flowpositions.md) | Clean Flow Positions | -| [**hardis:project:clean:hiddenitems**](hardis/project/clean/hiddenitems.md) | Clean retrieved hidden items in dx sources | -| [**hardis:project:clean:listviews**](hardis/project/clean/listviews.md) | Replace Mine by Everything in ListViews | -| [**hardis:project:clean:manageditems**](hardis/project/clean/manageditems.md) | Clean retrieved managed items in dx sources | -| [**hardis:project:clean:minimizeprofiles**](hardis/project/clean/minimizeprofiles.md) | Clean profiles of Permission Set attributes | -| [**hardis:project:clean:orgmissingitems**](hardis/project/clean/orgmissingitems.md) | Clean SFDX items using target org definition | -| [**hardis:project:clean:references**](hardis/project/clean/references.md) | Clean references in dx sources | -| [**hardis:project:clean:retrievefolders**](hardis/project/clean/retrievefolders.md) | Retrieve dashboards, documents and report folders in DX sources | -| [**hardis:project:clean:standarditems**](hardis/project/clean/standarditems.md) | Clean retrieved standard items in dx sources | -| [**hardis:project:clean:systemdebug**](hardis/project/clean/systemdebug.md) | Clean System debug | -| [**hardis:project:clean:xml**](hardis/project/clean/xml.md) | Clean retrieved empty items in dx sources | -| [**hardis:project:configure:auth**](hardis/project/configure/auth.md) | Configure authentication | -| [**hardis:project:convert:profilestopermsets**](hardis/project/convert/profilestopermsets.md) | Convert Profiles into Permission Sets | -| [**hardis:project:create**](hardis/project/create.md) | Login | -| [**hardis:project:deploy:sources:dx**](hardis/project/deploy/sources/dx.md) | Deploy sfdx sources to org | -| [**hardis:project:deploy:sources:metadata**](hardis/project/deploy/sources/metadata.md) | Deploy metadata sources to org | -| [**hardis:project:fix:profiletabs**](hardis/project/fix/profiletabs.md) | Fix profiles to add tabs that are not retrieved by SF CLI | -| [**hardis:project:fix:v53flexipages**](hardis/project/fix/v53flexipages.md) | Fix flexipages for v53 | -| [**hardis:project:generate:gitdelta**](hardis/project/generate/gitdelta.md) | Generate Git Delta | -| [**hardis:project:lint**](hardis/project/lint.md) | Lint | -| [**hardis:project:metadata:findduplicates**](hardis/project/metadata/findduplicates.md) | XML duplicate values finder | +| Command | Title | +|:----------------------------------------------------------------------------------------------|:------| +| [**hardis:project:audit:apiversion**](hardis/project/audit/apiversion.md) | | +| [**hardis:project:audit:callincallout**](hardis/project/audit/callincallout.md) | | +| [**hardis:project:audit:duplicatefiles**](hardis/project/audit/duplicatefiles.md) | | +| [**hardis:project:audit:remotesites**](hardis/project/audit/remotesites.md) | | +| [**hardis:project:clean:emptyitems**](hardis/project/clean/emptyitems.md) | | +| [**hardis:project:clean:filter-xml-content**](hardis/project/clean/filter-xml-content.md) | | +| [**hardis:project:clean:flowpositions**](hardis/project/clean/flowpositions.md) | | +| [**hardis:project:clean:hiddenitems**](hardis/project/clean/hiddenitems.md) | | +| [**hardis:project:clean:listviews**](hardis/project/clean/listviews.md) | | +| [**hardis:project:clean:manageditems**](hardis/project/clean/manageditems.md) | | +| [**hardis:project:clean:minimizeprofiles**](hardis/project/clean/minimizeprofiles.md) | | +| [**hardis:project:clean:orgmissingitems**](hardis/project/clean/orgmissingitems.md) | | +| [**hardis:project:clean:references**](hardis/project/clean/references.md) | | +| [**hardis:project:clean:retrievefolders**](hardis/project/clean/retrievefolders.md) | | +| [**hardis:project:clean:sensitive-metadatas**](hardis/project/clean/sensitive-metadatas.md) | | +| [**hardis:project:clean:standarditems**](hardis/project/clean/standarditems.md) | | +| [**hardis:project:clean:systemdebug**](hardis/project/clean/systemdebug.md) | | +| [**hardis:project:clean:xml**](hardis/project/clean/xml.md) | | +| [**hardis:project:configure:auth**](hardis/project/configure/auth.md) | | +| [**hardis:project:convert:profilestopermsets**](hardis/project/convert/profilestopermsets.md) | | +| [**hardis:project:create**](hardis/project/create.md) | | +| [**hardis:project:deploy:notify**](hardis/project/deploy/notify.md) | | +| [**hardis:project:deploy:quick**](hardis/project/deploy/quick.md) | | +| [**hardis:project:deploy:simulate**](hardis/project/deploy/simulate.md) | | +| [**hardis:project:deploy:smart**](hardis/project/deploy/smart.md) | | +| [**hardis:project:deploy:sources:dx**](hardis/project/deploy/sources/dx.md) | | +| [**hardis:project:deploy:sources:metadata**](hardis/project/deploy/sources/metadata.md) | | +| [**hardis:project:deploy:start**](hardis/project/deploy/start.md) | | +| [**hardis:project:deploy:validate**](hardis/project/deploy/validate.md) | | +| [**hardis:project:fix:profiletabs**](hardis/project/fix/profiletabs.md) | | +| [**hardis:project:fix:v53flexipages**](hardis/project/fix/v53flexipages.md) | | +| [**hardis:project:generate:bypass**](hardis/project/generate/bypass.md) | | +| [**hardis:project:generate:flow-git-diff**](hardis/project/generate/flow-git-diff.md) | | +| [**hardis:project:generate:gitdelta**](hardis/project/generate/gitdelta.md) | | +| [**hardis:project:lint**](hardis/project/lint.md) | | +| [**hardis:project:metadata:findduplicates**](hardis/project/metadata/findduplicates.md) | | ## hardis:scratch -| Command | Title | -|:----------------------------------------------------------------------|:-----------------------------------------| -| [**hardis:scratch:create**](hardis/scratch/create.md) | Create and initialize scratch org | -| [**hardis:scratch:delete**](hardis/scratch/delete.md) | Delete scratch orgs(s) | -| [**hardis:scratch:pool:create**](hardis/scratch/pool/create.md) | Create and configure scratch org pool | -| [**hardis:scratch:pool:localauth**](hardis/scratch/pool/localauth.md) | Authenticate locally to scratch org pool | -| [**hardis:scratch:pool:refresh**](hardis/scratch/pool/refresh.md) | Refresh scratch org pool | -| [**hardis:scratch:pool:reset**](hardis/scratch/pool/reset.md) | Reset scratch org pool | -| [**hardis:scratch:pool:view**](hardis/scratch/pool/view.md) | View scratch org pool info | -| [**hardis:scratch:pull**](hardis/scratch/pull.md) | Scratch PULL | -| [**hardis:scratch:push**](hardis/scratch/push.md) | Scratch PUSH | +| Command | Title | +|:----------------------------------------------------------------------|:------| +| [**hardis:scratch:create**](hardis/scratch/create.md) | | +| [**hardis:scratch:delete**](hardis/scratch/delete.md) | | +| [**hardis:scratch:pool:create**](hardis/scratch/pool/create.md) | | +| [**hardis:scratch:pool:localauth**](hardis/scratch/pool/localauth.md) | | +| [**hardis:scratch:pool:refresh**](hardis/scratch/pool/refresh.md) | | +| [**hardis:scratch:pool:reset**](hardis/scratch/pool/reset.md) | | +| [**hardis:scratch:pool:view**](hardis/scratch/pool/view.md) | | +| [**hardis:scratch:pull**](hardis/scratch/pull.md) | | +| [**hardis:scratch:push**](hardis/scratch/push.md) | | ## hardis:source -| Command | Title | -|:--------------------------------------------------------|:------------------------------------------------------------------------------------------------| -| [**hardis:source:deploy**](hardis/source/deploy.md) | sfdx-hardis wrapper for sfdx force:source:deploy that displays tips to solve deployment errors. | -| [**hardis:source:push**](hardis/source/push.md) | sfdx-hardis wrapper for sfdx force:source:push that displays tips to solve deployment errors. | -| [**hardis:source:retrieve**](hardis/source/retrieve.md) | sfdx-hardis wrapper for sfdx force:source:retrieve | +| Command | Title | +|:--------------------------------------------------------|:------| +| [**hardis:source:deploy**](hardis/source/deploy.md) | | +| [**hardis:source:push**](hardis/source/push.md) | | +| [**hardis:source:retrieve**](hardis/source/retrieve.md) | | ## hardis:work -| Command | Title | -|:----------------------------------------------------------------|:---------------------| -| [**hardis:work:new**](hardis/work/new.md) | New work task | -| [**hardis:work:refresh**](hardis/work/refresh.md) | Refresh work task | -| [**hardis:work:resetselection**](hardis/work/resetselection.md) | Select again | -| [**hardis:work:save**](hardis/work/save.md) | Save work task | -| [**hardis:work:ws**](hardis/work/ws.md) | WebSocket operations | +| Command | Title | +|:----------------------------------------------------------------|:------| +| [**hardis:work:new**](hardis/work/new.md) | | +| [**hardis:work:refresh**](hardis/work/refresh.md) | | +| [**hardis:work:resetselection**](hardis/work/resetselection.md) | | +| [**hardis:work:save**](hardis/work/save.md) | | +| [**hardis:work:ws**](hardis/work/ws.md) | | + +## hello:world + +| Command | Title | +|:----------------------------------|:------| +| [**hello:world**](hello/world.md) | | diff --git a/docs/contributing.md b/docs/contributing.md new file mode 100644 index 000000000..950817b87 --- /dev/null +++ b/docs/contributing.md @@ -0,0 +1,12 @@ +Everyone is welcome to contribute to sfdx-hardis (even juniors: we'll assist you !) + +- Install Node.js ([recommended version](https://nodejs.org/en/)) +- Install typescript by running `npm install typescript --global` +- Install yarn by running `npm install yarn --global` +- Install Salesforce DX by running `npm install @salesforce/cli --global` command line +- Fork this repo and clone it (or just clone if you are an internal contributor) +- At the root of the repository: + - Run `yarn` to install dependencies + - Run `sf plugins link` to link the local sfdx-hardis to SFDX CLI + - Run `tsc --watch` to transpile typescript into js everytime you update a TS file +- Debug commands using `NODE_OPTIONS=--inspect-brk sf hardis:somecommand -someparameter somevalue` diff --git a/docs/contributors.md b/docs/contributors.md new file mode 100644 index 000000000..0de51f71d --- /dev/null +++ b/docs/contributors.md @@ -0,0 +1,20 @@ +### Organization + +sfdx-hardis is primarily led by Nicolas Vuillamy & [Cloudity](https://www.cloudity.com/), but has many external contributors that we cant thank enough ! + +### Pull Requests Authors + + + + + +### Special Thanks + +- [Roman Hentschke](https://www.linkedin.com/in/derroman/), for building the BitBucket CI/CD integration +- [Leo Jokinen](https://www.linkedin.com/in/leojokinen/), for building the GitHub CI/CD integration +- [Mariia Pyvovarchuk](https://www.linkedin.com/in/mpyvo/), for her work about generating automations documentation +- [Matheus Delazeri](https://www.linkedin.com/in/matheus-delazeri-souza/), for the PDF output of documentation +- [Taha Basri](https://www.linkedin.com/in/tahabasri/), for his work about generating documentation of LWC +- [Anush Poudel](https://www.linkedin.com/in/anushpoudel/), for integrating sfdx-hardis with multiple LLMs using langchainJs +- [Sebastien Colladon](https://www.linkedin.com/in/sebastien-colladon/), for providing sfdx-git-delta which is highly used within sfdx-hardis +- [Stepan Stepanov](https://www.linkedin.com/in/stepan-stepanov-79a48734/), for implementing the deployment mode _delta with dependencies_ diff --git a/docs/events.md b/docs/events.md new file mode 100644 index 000000000..0f19814fa --- /dev/null +++ b/docs/events.md @@ -0,0 +1,53 @@ +### London's Calling '25, London + +Auto-generate your SF project Documentation site with open-source and Agentforce + +![image](https://github.com/user-attachments/assets/9b99120c-b660-4f67-b734-793148ac9d00) + +### Czech Dreamin '25, Prague + +Auto-generate your SF project Documentation site with open-source and Agentforce, with [Mariia Pyvovarchuk](https://www.linkedin.com/in/mpyvo/) + +![Czech Dreamin 2025](https://github.com/user-attachments/assets/fa7b7f12-6d6a-437c-badd-20a626bb2163) + +### Trailblazer Admin Group '25, Lyon + +Techs for Admins: Afterwork Salesforce Inspector Reloaded & sfdx-hardis, with Thomas Prouvot + +![](https://github.com/user-attachments/assets/90621fe0-6527-4a34-8a0b-c14bd6d21cbd) + +### Dreamforce 2024, San Francisco + +[Save the Day by Monitoring Your Org with Open-Source Tools](https://reg.salesforce.com/flow/plus/df24/sessioncatalog/page/catalog/session/1718915808069001Q7HH), with Olga Shirikova + +[![Dreamforce 2024 Video](https://img.youtube.com/vi/NxiLiYeo11A/0.jpg)](https://www.youtube.com/watch?v=NxiLiYeo11A) + +### Wir Sind Ohana '24, Berlin + +Automate the Monitoring of your Salesforce orgs with open-source tools only!, with Yosra Saidani + +[![Wir Sind Ohana Video](https://img.youtube.com/vi/xGbT6at7RZ0/0.jpg)](https://www.youtube.com/watch?v=xGbT6at7RZ0) + +### Polish Dreamin '24, Wroclaw, Poland + +[Easy and complete Salesforce CI/CD with open-source only!](https://coffeeforce.pl/dreamin/speaker/nicolas-vuillamy/), with Wojciech Suwiński + +![Polish Dreamin 2024](https://github.com/nvuillam/nvuillam/assets/17500430/e843cc08-bf8a-452d-b7f0-c64a314f1b60) + +### French Touch Dreamin '23, Paris + +[Automate the Monitoring of your Salesforce orgs with open-source tools only!](https://frenchtouchdreamin.com/index.php/schedule/), with Maxime Guenego + +![French Touch Dreamin 2023](https://github.com/nvuillam/nvuillam/assets/17500430/8a2e1bbf-3402-4929-966d-5f99cb13cd29) + +### Dreamforce 2023, San Francisco + +[Easy Salesforce CI/CD with open-source and clicks only thanks to sfdx-hardis!](https://reg.salesforce.com/flow/plus/df23/sessioncatalog/page/catalog/session/1684196389783001OqEl), with Jean-Pierre Rizzi + +[![Dreamforce 2023 Video](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs) + +### Yeur Dreamin' 2023, Brussels + +An easy and complete Salesforce CI/CD release management with open-source only !, with Angélique Picoreau + +[![image](https://github.com/nvuillam/nvuillam/assets/17500430/6470df20-7449-444b-a0a5-7dc22f5f6188)](https://www.linkedin.com/posts/nicolas-vuillamy_cicd-opensource-trailblazercommunity-activity-7076859027321704448-F1g-?utm_source=share&utm_medium=member_desktop) diff --git a/docs/grafana/dashboards/DASH - 00_Home.json b/docs/grafana/dashboards/DASH - 00_Home.json index 6a52b2530..a1d6d65a3 100644 --- a/docs/grafana/dashboards/DASH - 00_Home.json +++ b/docs/grafana/dashboards/DASH - 00_Home.json @@ -21,13 +21,13 @@ "type": "grafana", "id": "grafana", "name": "Grafana", - "version": "11.1.0-70958" + "version": "12.2.0-17245430286.patch2" }, { "type": "datasource", "id": "loki", "name": "Loki", - "version": "1.0.0" + "version": "12.2.0-17245430286.patch2" }, { "type": "panel", @@ -61,7 +61,6 @@ "editable": true, "fiscalYearStartMonth": 0, "graphTooltip": 0, - "id": null, "links": [], "panels": [ { @@ -100,7 +99,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "red", @@ -123,8 +122,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -132,7 +134,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -188,7 +190,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "#EAB839", @@ -216,7 +218,9 @@ "minVizWidth": 75, "orientation": "auto", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "limit": 1, "values": false @@ -225,7 +229,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -315,7 +319,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "#EAB839", @@ -343,7 +347,9 @@ "minVizWidth": 75, "orientation": "auto", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "limit": 1, "values": false @@ -352,7 +358,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -442,7 +448,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "#EAB839", @@ -470,7 +476,9 @@ "minVizWidth": 75, "orientation": "auto", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "limit": 1, "values": false @@ -479,7 +487,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -569,7 +577,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "#EAB839", @@ -597,7 +605,9 @@ "minVizWidth": 75, "orientation": "auto", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "limit": 1, "values": false @@ -606,7 +616,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -696,7 +706,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "#EAB839", @@ -724,7 +734,9 @@ "minVizWidth": 75, "orientation": "auto", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "limit": 1, "values": false @@ -733,7 +745,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -836,7 +848,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 } ] } @@ -855,8 +867,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -864,7 +879,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -919,7 +934,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "#EAB839", @@ -946,8 +961,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -955,7 +973,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1008,7 +1026,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "dark-orange", @@ -1031,8 +1049,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -1040,7 +1061,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1093,7 +1114,7 @@ "steps": [ { "color": "orange", - "value": null + "value": 0 }, { "color": "yellow", @@ -1110,7 +1131,7 @@ }, "gridPos": { "h": 4, - "w": 3, + "w": 4, "x": 12, "y": 6 }, @@ -1119,9 +1140,12 @@ "colorMode": "value", "graphMode": "area", "justifyMode": "center", - "orientation": "auto", + "orientation": "vertical", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -1129,7 +1153,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1173,7 +1197,7 @@ "links": [ { "title": "Show details", - "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=LEGACY_API&${__url_time_range}&var-indicatorLabel=Suspect Setup Actions" + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=LEGACY_API&${__url_time_range}&var-indicatorLabel=Legacy API calls" } ], "mappings": [], @@ -1182,7 +1206,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "dark-orange", @@ -1196,7 +1220,7 @@ "gridPos": { "h": 4, "w": 4, - "x": 15, + "x": 16, "y": 6 }, "id": 19, @@ -1205,8 +1229,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -1214,7 +1241,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1244,6 +1271,95 @@ ], "type": "stat" }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Unsecured Connected Apps", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=UNSECURED_CONNECTED_APPS&${__url_time_range}&var-indicatorLabel=Unsecured Connected Apps" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "red", + "value": 1 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 20, + "y": 6 + }, + "id": 37, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "direction": "backward", + "editorMode": "builder", + "expr": "{type=\"UNSECURED_CONNECTED_APPS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, { "collapsed": false, "gridPos": { @@ -1281,7 +1397,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "red", @@ -1304,8 +1420,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -1313,7 +1432,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1371,7 +1490,7 @@ "steps": [ { "color": "dark-red", - "value": null + "value": 0 }, { "color": "orange", @@ -1407,8 +1526,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -1416,7 +1538,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1457,9 +1579,9 @@ "x": 0, "y": 15 }, - "id": 10, + "id": 27, "panels": [], - "title": "Technical Debt", + "title": "Org Info", "type": "row" }, { @@ -1472,11 +1594,11 @@ "color": { "mode": "thresholds" }, - "displayName": "Deactivated Flows & VR", + "displayName": "Release Updates to check", "links": [ { "title": "Show details", - "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=METADATA_STATUS&${__url_time_range}&var-indicatorLabel=Deactivated Flow & VR" + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=RELEASE_UPDATES&${__url_time_range}&var-indicatorLabel=Release Updates to check" } ], "mappings": [], @@ -1485,7 +1607,7 @@ "steps": [ { "color": "green", - "value": null + "value": 0 }, { "color": "#EAB839", @@ -1493,7 +1615,7 @@ }, { "color": "red", - "value": 80 + "value": 10 } ] } @@ -1501,19 +1623,22 @@ "overrides": [] }, "gridPos": { - "h": 4, - "w": 6, + "h": 3, + "w": 5, "x": 0, "y": 16 }, - "id": 6, + "id": 34, "options": { "colorMode": "value", "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -1521,7 +1646,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1529,7 +1654,7 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "editorMode": "builder", - "expr": "{type=\"METADATA_STATUS\", orgIdentifier=\"$org\"} |= ``", + "expr": "{type=\"RELEASE_UPDATES\", orgIdentifier=\"$org\"} |= ``", "maxLines": 1, "queryType": "range", "refId": "A" @@ -1556,61 +1681,51 @@ "type": "loki", "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, + "description": "", "fieldConfig": { "defaults": { "color": { "mode": "thresholds" }, - "displayName": "Attributes without permissions", - "links": [ - { - "title": "Show detailsl", - "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=LINT_ACCESS&${__url_time_range}&var-indicatorLabel=Attributes without permissions" - } - ], "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null - }, - { - "color": "#EAB839", - "value": 1 - }, - { - "color": "red", - "value": 80 + "color": "blue", + "value": 0 } ] - } + }, + "unit": "days" }, "overrides": [] }, "gridPos": { - "h": 4, - "w": 6, - "x": 6, + "h": 3, + "w": 4, + "x": 5, "y": 16 }, - "id": 4, + "id": 29, "options": { "colorMode": "value", "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], - "fields": "", + "calcs": [ + "lastNotNull" + ], + "fields": "/^InstanceName$/", "values": false }, "showPercentChange": false, "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1618,12 +1733,13 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "editorMode": "builder", - "expr": "{type=\"LINT_ACCESS\", orgIdentifier=\"$org\"} |= ``", + "expr": "{type=\"ORG_INFO\", orgIdentifier=\"$org\"} |= ``", "maxLines": 1, "queryType": "range", "refId": "A" } ], + "title": "SF Instance", "transformations": [ { "id": "extractFields", @@ -1631,11 +1747,34 @@ "format": "json", "jsonPaths": [ { - "path": "metric" + "path": "_logElements" } ], + "keepTime": false, + "replace": true, "source": "Line" } + }, + { + "id": "extractFields", + "options": { + "format": "auto", + "replace": true, + "source": "_logElements" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "InstanceName" + } + ], + "replace": true, + "source": "0" + } } ], "type": "stat" @@ -1645,61 +1784,51 @@ "type": "loki", "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, + "description": "", "fieldConfig": { "defaults": { "color": { "mode": "thresholds" }, - "displayName": "Unused Metadatas", - "links": [ - { - "title": "Show details", - "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=UNUSED_METADATAS&${__url_time_range}&var-indicatorLabel=Unused Metadatas" - } - ], "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null - }, - { - "color": "#EAB839", - "value": 1 - }, - { - "color": "red", - "value": 80 + "color": "blue", + "value": 0 } ] - } + }, + "unit": "days" }, "overrides": [] }, "gridPos": { - "h": 4, - "w": 6, - "x": 12, + "h": 3, + "w": 4, + "x": 9, "y": 16 }, - "id": 3, + "id": 33, "options": { "colorMode": "value", "graphMode": "area", - "justifyMode": "center", + "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], - "fields": "", + "calcs": [ + "lastNotNull" + ], + "fields": "/^OrganizationType$/", "values": false }, "showPercentChange": false, "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1707,12 +1836,13 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "editorMode": "builder", - "expr": "{type=\"UNUSED_METADATAS\", orgIdentifier=\"$org\"} |= ``", + "expr": "{type=\"ORG_INFO\", orgIdentifier=\"$org\"} |= ``", "maxLines": 1, "queryType": "range", "refId": "A" } ], + "title": "Org Type", "transformations": [ { "id": "extractFields", @@ -1720,11 +1850,34 @@ "format": "json", "jsonPaths": [ { - "path": "metric" + "path": "_logElements" } ], + "keepTime": false, + "replace": true, "source": "Line" } + }, + { + "id": "extractFields", + "options": { + "format": "auto", + "replace": true, + "source": "_logElements" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "OrganizationType" + } + ], + "replace": true, + "source": "0" + } } ], "type": "stat" @@ -1734,57 +1887,51 @@ "type": "loki", "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, + "description": "", "fieldConfig": { "defaults": { "color": { "mode": "thresholds" }, - "displayName": "Fields without description", - "links": [ - { - "title": "Show details", - "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=MISSING_ATTRIBUTES&${__url_time_range}&var-indicatorLabel=Fields without description" - } - ], "mappings": [], "thresholds": { "mode": "absolute", "steps": [ { - "color": "green", - "value": null - }, - { - "color": "#EAB839", - "value": 1 + "color": "blue", + "value": 0 } ] - } + }, + "unit": "days" }, "overrides": [] }, "gridPos": { - "h": 4, - "w": 6, - "x": 18, + "h": 3, + "w": 5, + "x": 13, "y": 16 }, - "id": 5, + "id": 32, "options": { "colorMode": "value", "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], - "fields": "", + "calcs": [ + "lastNotNull" + ], + "fields": "/.*/", "values": false }, "showPercentChange": false, "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1792,12 +1939,13 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "editorMode": "builder", - "expr": "{type=\"MISSING_ATTRIBUTES\", orgIdentifier=\"$org\"} |= ``", + "expr": "{type=\"ORG_INFO\", orgIdentifier=\"$org\"} |= ``", "maxLines": 1, "queryType": "range", "refId": "A" } ], + "title": "Next Version", "transformations": [ { "id": "extractFields", @@ -1805,27 +1953,751 @@ "format": "json", "jsonPaths": [ { - "path": "metric" + "path": "_logElements" } ], + "keepTime": false, + "replace": true, "source": "Line" } - } - ], - "type": "stat" - }, - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 20 - }, - "id": 23, - "panels": [], - "title": "Licenses", - "type": "row" + }, + { + "id": "extractFields", + "options": { + "format": "auto", + "replace": true, + "source": "_logElements" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "maintenanceNextUpgrade" + } + ], + "keepTime": false, + "replace": true, + "source": "0" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "alias": "Next Version", + "path": "name" + } + ], + "keepTime": false, + "replace": true, + "source": "maintenanceNextUpgrade" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": 0 + } + ] + }, + "unit": "days" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 5, + "x": 18, + "y": 16 + }, + "id": 31, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "/.*/", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"ORG_INFO\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "title": "Next Upgrade", + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "_logElements" + } + ], + "keepTime": false, + "replace": true, + "source": "Line" + } + }, + { + "id": "extractFields", + "options": { + "format": "auto", + "replace": true, + "source": "_logElements" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "maintenanceNextUpgrade" + } + ], + "keepTime": false, + "replace": true, + "source": "0" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "alias": "Upgrade Date", + "path": "plannedStartTime" + } + ], + "keepTime": false, + "replace": true, + "source": "maintenanceNextUpgrade" + } + }, + { + "id": "convertFieldType", + "options": { + "conversions": [ + { + "dateFormat": "", + "destinationType": "time", + "targetField": "Upgrade Date" + } + ], + "fields": {} + } + } + ], + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 19 + }, + "id": 10, + "panels": [], + "title": "Technical Debt", + "type": "row" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Deactivated Flows & VR", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=METADATA_STATUS&${__url_time_range}&var-indicatorLabel=Deactivated Flow & VR" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "#EAB839", + "value": 1 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 0, + "y": 20 + }, + "id": 6, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"METADATA_STATUS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Unused Connected Apps", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=CONNECTED_APPS&${__url_time_range}&var-indicatorLabel=Unused Connected Apps" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "#EAB839", + "value": 1 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 4, + "y": 20 + }, + "id": 36, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"CONNECTED_APPS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Unused Apex Classes", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=UNUSED_APEX_CLASSES&${__url_time_range}&var-indicatorLabel=Unused Apex Classes" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "#EAB839", + "value": 1 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 8, + "y": 20 + }, + "id": 35, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"UNUSED_APEX_CLASSES\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Attributes without permissions", + "links": [ + { + "title": "Show detailsl", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=LINT_ACCESS&${__url_time_range}&var-indicatorLabel=Attributes without permissions" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "#EAB839", + "value": 1 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 12, + "y": 20 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"LINT_ACCESS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Unused Metadatas", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=UNUSED_METADATAS&${__url_time_range}&var-indicatorLabel=Unused Metadatas" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "#EAB839", + "value": 1 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 3, + "x": 16, + "y": 20 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"UNUSED_METADATAS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Fields without description", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=MISSING_ATTRIBUTES&${__url_time_range}&var-indicatorLabel=Fields without description" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": 0 + }, + { + "color": "#EAB839", + "value": 1 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 19, + "y": 20 + }, + "id": 5, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"MISSING_ATTRIBUTES\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 24 + }, + "id": 23, + "panels": [], + "title": "Licenses & Packages", + "type": "row" }, { "datasource": { @@ -1842,7 +2714,11 @@ "cellOptions": { "type": "auto" }, - "inspect": false + "inspect": false, + "tooltip": { + "placement": "auto" + }, + "wrapHeaderText": false }, "mappings": [], "thresholds": { @@ -1850,7 +2726,7 @@ "steps": [ { "color": "blue", - "value": null + "value": 0 } ] } @@ -1891,14 +2767,38 @@ "value": 160 } ] + }, + { + "matcher": { + "id": "byName", + "options": "Total available" + }, + "properties": [ + { + "id": "custom.width", + "value": 220 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Used" + }, + "properties": [ + { + "id": "custom.width", + "value": 181 + } + ] } ] }, "gridPos": { - "h": 13, + "h": 11, "w": 12, "x": 0, - "y": 21 + "y": 25 }, "id": 25, "options": { @@ -1906,7 +2806,9 @@ "footer": { "countRows": false, "fields": "", - "reducer": ["sum"], + "reducer": [ + "sum" + ], "show": false }, "showHeader": true, @@ -1917,7 +2819,7 @@ } ] }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -1957,7 +2859,9 @@ { "id": "reduce", "options": { - "reducers": ["allValues"] + "reducers": [ + "allValues" + ] } }, { @@ -2033,7 +2937,11 @@ "cellOptions": { "type": "auto" }, - "inspect": false + "inspect": false, + "tooltip": { + "placement": "auto" + }, + "wrapHeaderText": false }, "mappings": [], "thresholds": { @@ -2041,7 +2949,7 @@ "steps": [ { "color": "blue", - "value": null + "value": 0 } ] } @@ -2082,14 +2990,38 @@ "value": 160 } ] + }, + { + "matcher": { + "id": "byName", + "options": "Total available" + }, + "properties": [ + { + "id": "custom.width", + "value": 199 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Used" + }, + "properties": [ + { + "id": "custom.width", + "value": 190 + } + ] } ] }, "gridPos": { - "h": 13, + "h": 11, "w": 12, "x": 12, - "y": 21 + "y": 25 }, "id": 24, "options": { @@ -2097,7 +3029,9 @@ "footer": { "countRows": false, "fields": "", - "reducer": ["sum"], + "reducer": [ + "sum" + ], "show": false }, "showHeader": true, @@ -2108,7 +3042,7 @@ } ] }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "12.2.0-17245430286.patch2", "targets": [ { "datasource": { @@ -2148,7 +3082,9 @@ { "id": "reduce", "options": { - "reducers": ["allValues"] + "reducers": [ + "allValues" + ] } }, { @@ -2208,11 +3144,251 @@ } ], "type": "table" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false, + "tooltip": { + "placement": "auto" + }, + "wrapHeaderText": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": 0 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "MasterLabel" + }, + "properties": [ + { + "id": "custom.width", + "value": 248 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "UsedLicenses" + }, + "properties": [ + { + "id": "custom.width", + "value": 148 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "TotalLicenses" + }, + "properties": [ + { + "id": "custom.width", + "value": 160 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Package name" + }, + "properties": [ + { + "id": "custom.width", + "value": 276 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Namespace" + }, + "properties": [ + { + "id": "custom.width", + "value": 338 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Version Name" + }, + "properties": [ + { + "id": "custom.width", + "value": 284 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Installed package" + }, + "properties": [ + { + "id": "custom.width", + "value": 431 + } + ] + } + ] + }, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 36 + }, + "id": 26, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": false, + "displayName": "Installed package" + } + ] + }, + "pluginVersion": "12.2.0-17245430286.patch2", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{orgIdentifier=\"$org\", type=\"BACKUP\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "installedPackages" + } + ], + "keepTime": false, + "replace": true, + "source": "Line" + } + }, + { + "id": "extractFields", + "options": { + "keepTime": false, + "replace": true, + "source": "installedPackages" + } + }, + { + "id": "reduce", + "options": { + "labelsToFields": false, + "reducers": [ + "allValues" + ] + } + }, + { + "id": "extractFields", + "options": { + "format": "auto", + "replace": true, + "source": "All values" + } + }, + { + "id": "extractFields", + "options": { + "replace": true, + "source": "0" + } + }, + { + "id": "organize", + "options": { + "excludeByName": { + "Name": true, + "SubscriberPackageVersionId": true, + "type": true + }, + "includeByName": {}, + "indexByName": { + "MasterLabel": 0, + "Name": 3, + "TotalLicenses": 2, + "UsedLicenses": 1, + "type": 4 + }, + "renameByName": { + "MasterLabel": "User License", + "SubscriberPackageName": "Installed package", + "SubscriberPackageNamespace": "Namespace", + "SubscriberPackageVersionId": "Version Id", + "SubscriberPackageVersionName": "Version Name", + "SubscriberPackageVersionNumber": "Version Number", + "TotalLicenses": "Total available", + "UsedLicenses": "Used" + } + } + } + ], + "type": "table" } ], "refresh": "", - "schemaVersion": 39, - "tags": ["sfdx-hardis", "salesforce", "monitoring"], + "schemaVersion": 41, + "tags": [ + "sfdx-hardis", + "salesforce", + "monitoring" + ], "templating": { "list": [ { @@ -2222,10 +3398,8 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "definition": "", - "hide": 0, "includeAll": false, "label": "Salesforce Org", - "multi": false, "name": "org", "options": [], "query": { @@ -2236,7 +3410,6 @@ }, "refresh": 1, "regex": "^[^.]*$", - "skipUrlSync": false, "sort": 1, "type": "query" } @@ -2246,11 +3419,11 @@ "from": "now-7d", "to": "now" }, - "timeRangeUpdatedDuringEditOrView": false, "timepicker": {}, "timezone": "browser", "title": "DASH - 00_Home", "uid": "sfdx-hardis-today-summary", - "version": 91, - "weekStart": "" -} + "version": 117, + "weekStart": "", + "id": null +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DASH - Licenses for all orgs.json b/docs/grafana/dashboards/DASH - Licenses for all orgs.json new file mode 100644 index 000000000..c9cdab457 --- /dev/null +++ b/docs/grafana/dashboards/DASH - Licenses for all orgs.json @@ -0,0 +1,498 @@ +{ + "__inputs": [ + { + "name": "DS_GRAFANACLOUD-CLOUDITY-LOGS", + "label": "grafanacloud-cloudity-logs", + "description": "", + "type": "datasource", + "pluginId": "loki", + "pluginName": "Loki" + } + ], + "__elements": {}, + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "11.3.0-75420" + }, + { + "type": "datasource", + "id": "loki", + "name": "Loki", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "table", + "name": "Table", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 2, + "panels": [], + "repeat": "org", + "title": "$org", + "type": "row" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "MasterLabel" + }, + "properties": [ + { + "id": "custom.width", + "value": 248 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "UsedLicenses" + }, + "properties": [ + { + "id": "custom.width", + "value": 148 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "TotalLicenses" + }, + "properties": [ + { + "id": "custom.width", + "value": 160 + } + ] + } + ] + }, + "gridPos": { + "h": 14, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 1, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "Used" + } + ] + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{orgIdentifier=\"$org\", type=\"LICENSES\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "_logElements" + } + ], + "keepTime": false, + "replace": true, + "source": "Line" + } + }, + { + "id": "extractFields", + "options": { + "keepTime": false, + "replace": true, + "source": "_logElements" + } + }, + { + "id": "reduce", + "options": { + "reducers": [ + "allValues" + ] + } + }, + { + "id": "extractFields", + "options": { + "format": "auto", + "replace": true, + "source": "All values" + } + }, + { + "id": "extractFields", + "options": { + "replace": true, + "source": "0" + } + }, + { + "id": "filterByValue", + "options": { + "filters": [ + { + "config": { + "id": "equal", + "options": { + "value": "PermissionSetLicense" + } + }, + "fieldName": "type" + } + ], + "match": "any", + "type": "include" + } + }, + { + "id": "organize", + "options": { + "excludeByName": { + "Name": true, + "type": true + }, + "includeByName": {}, + "indexByName": { + "MasterLabel": 0, + "Name": 3, + "TotalLicenses": 2, + "UsedLicenses": 1, + "type": 4 + }, + "renameByName": { + "MasterLabel": "Permission Set License", + "TotalLicenses": "Total available", + "UsedLicenses": "Used" + } + } + } + ], + "type": "table" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "MasterLabel" + }, + "properties": [ + { + "id": "custom.width", + "value": 248 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "UsedLicenses" + }, + "properties": [ + { + "id": "custom.width", + "value": 148 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "TotalLicenses" + }, + "properties": [ + { + "id": "custom.width", + "value": 160 + } + ] + } + ] + }, + "gridPos": { + "h": 14, + "w": 12, + "x": 12, + "y": 1 + }, + "id": 3, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "Used" + } + ] + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{orgIdentifier=\"$org\", type=\"LICENSES\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "_logElements" + } + ], + "keepTime": false, + "replace": true, + "source": "Line" + } + }, + { + "id": "extractFields", + "options": { + "keepTime": false, + "replace": true, + "source": "_logElements" + } + }, + { + "id": "reduce", + "options": { + "reducers": [ + "allValues" + ] + } + }, + { + "id": "extractFields", + "options": { + "format": "auto", + "replace": true, + "source": "All values" + } + }, + { + "id": "extractFields", + "options": { + "replace": true, + "source": "0" + } + }, + { + "id": "filterByValue", + "options": { + "filters": [ + { + "config": { + "id": "equal", + "options": { + "value": "UserLicense" + } + }, + "fieldName": "type" + } + ], + "match": "any", + "type": "include" + } + }, + { + "id": "organize", + "options": { + "excludeByName": { + "Name": true, + "type": true + }, + "includeByName": {}, + "indexByName": { + "MasterLabel": 0, + "Name": 3, + "TotalLicenses": 2, + "UsedLicenses": 1, + "type": 4 + }, + "renameByName": { + "MasterLabel": "User License", + "TotalLicenses": "Total available", + "UsedLicenses": "Used" + } + } + } + ], + "type": "table" + } + ], + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "definition": "", + "includeAll": true, + "multi": true, + "name": "org", + "options": [], + "query": { + "label": "orgIdentifier", + "refId": "LokiVariableQueryEditor-VariableQuery", + "stream": "{source=\"sfdx-hardis\"}", + "type": 1 + }, + "refresh": 1, + "regex": "^(?!.*\\..*)(?!.*sandbox.*).*$", + "sort": 1, + "type": "query" + } + ] + }, + "time": { + "from": "now-30d", + "to": "now" + }, + "timepicker": {}, + "timezone": "browser", + "title": "DASH - Licenses for all orgs", + "uid": "sfdx-hardis-licenses-all-orgs", + "version": 12, + "weekStart": "" +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DASH - Licenses.json b/docs/grafana/dashboards/DASH - Licenses.json new file mode 100644 index 000000000..95e93acc5 --- /dev/null +++ b/docs/grafana/dashboards/DASH - Licenses.json @@ -0,0 +1,900 @@ +{ + "__inputs": [ + { + "name": "DS_GRAFANACLOUD-CLOUDITY-LOGS", + "label": "grafanacloud-cloudity-logs", + "description": "", + "type": "datasource", + "pluginId": "loki", + "pluginName": "Loki" + } + ], + "__elements": {}, + "__requires": [ + { + "type": "panel", + "id": "gauge", + "name": "Gauge", + "version": "" + }, + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "11.3.0-75420" + }, + { + "type": "datasource", + "id": "loki", + "name": "Loki", + "version": "1.0.0" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 41, + "panels": [], + "title": "All Orgs", + "type": "row" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "$org", + "fieldMinMax": true, + "links": [ + { + "title": "View all licenses for ${__data.fields[\"Salesforce Org Identifier\"]}", + "url": "/d/sfdx-hardis-licenses-all-orgs/dash-licenses-for-all-orgs?var-org=${__data.fields[\"Salesforce Org Identifier\"]}" + } + ], + "mappings": [], + "max": 5000, + "min": 0, + "noValue": "No stat", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "semi-dark-blue", + "value": null + }, + { + "color": "light-blue", + "value": 20 + }, + { + "color": "light-green", + "value": 50 + }, + { + "color": "light-yellow", + "value": 100 + }, + { + "color": "light-orange", + "value": 500 + }, + { + "color": "light-red", + "value": 1000 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "MasterLabel" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "UsedLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "TotalLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "usedLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "activeLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Line" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "labels" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Salesforce Org Identifier" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Active Licenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Analytics Cloud Integration User" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Salesforce Licenses" + }, + "properties": [] + } + ] + }, + "gridPos": { + "h": 4, + "w": 24, + "x": 0, + "y": 1 + }, + "id": 1, + "maxPerRow": 6, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.2.0-73451", + "repeat": "org", + "repeatDirection": "h", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"LICENSES\", orgIdentifier=\"$org\"}", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "licenses" + } + ], + "keepTime": false, + "replace": false, + "source": "Line" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "Salesforce" + } + ], + "replace": true, + "source": "licenses" + } + } + ], + "type": "gauge" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 29 + }, + "id": 80, + "panels": [], + "title": "Total", + "type": "row" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Salesforce Licenses", + "fieldMinMax": true, + "links": [ + { + "title": "View all licenses for ${__data.fields[\"Salesforce Org Identifier\"]}", + "url": "/d/sfdx-hardis-licenses-all-orgs/dash-licenses-for-all-orgs?var-org=${__data.fields[\"Salesforce Org Identifier\"]}" + } + ], + "mappings": [], + "max": 5000, + "min": 0, + "noValue": "No stat", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "light-yellow", + "value": null + }, + { + "color": "semi-dark-yellow", + "value": 20 + }, + { + "color": "super-light-green", + "value": 50 + }, + { + "color": "light-green", + "value": 100 + }, + { + "color": "super-light-purple", + "value": 500 + }, + { + "color": "dark-purple", + "value": 1000 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "MasterLabel" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "UsedLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "TotalLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "usedLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "activeLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Line" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "labels" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Salesforce Org Identifier" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Active Licenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Analytics Cloud Integration User" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Salesforce Licenses" + }, + "properties": [] + } + ] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 0, + "y": 30 + }, + "id": 40, + "maxPerRow": 6, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "/.*/", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"LICENSES\"}", + "maxLines": 1000, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "licenses" + } + ], + "keepTime": false, + "replace": false, + "source": "Line" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "Salesforce" + }, + { + "path": "Salesforce Platform" + } + ], + "keepTime": false, + "replace": false, + "source": "licenses" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "orgIdentifier" + } + ], + "keepTime": false, + "replace": false, + "source": "labels" + } + }, + { + "id": "filterByValue", + "options": { + "filters": [ + { + "config": { + "id": "substring", + "options": { + "value": "sandbox" + } + }, + "fieldName": "orgIdentifier" + } + ], + "match": "any", + "type": "exclude" + } + }, + { + "id": "groupBy", + "options": { + "fields": { + "Salesforce": { + "aggregations": [ + "lastNotNull" + ], + "operation": "aggregate" + }, + "Salesforce Platform": { + "aggregations": [ + "lastNotNull" + ], + "operation": "aggregate" + }, + "orgIdentifier": { + "aggregations": [], + "operation": "groupby" + } + } + } + }, + { + "id": "calculateField", + "options": { + "mode": "reduceRow", + "reduce": { + "include": [ + "Salesforce (lastNotNull)" + ], + "reducer": "sum" + }, + "replaceFields": false + } + }, + { + "id": "organize", + "options": { + "excludeByName": { + "Salesforce (lastNotNull)": true, + "Salesforce Platform (lastNotNull)": true, + "orgIdentifier": true + }, + "includeByName": {}, + "indexByName": {}, + "renameByName": {} + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Salesforce Platform Licenses", + "fieldMinMax": true, + "links": [ + { + "title": "View all licenses for ${__data.fields[\"Salesforce Org Identifier\"]}", + "url": "/d/sfdx-hardis-licenses-all-orgs/dash-licenses-for-all-orgs?var-org=${__data.fields[\"Salesforce Org Identifier\"]}" + } + ], + "mappings": [], + "max": 5000, + "min": 0, + "noValue": "No stat", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "light-yellow", + "value": null + }, + { + "color": "semi-dark-yellow", + "value": 20 + }, + { + "color": "super-light-green", + "value": 50 + }, + { + "color": "light-green", + "value": 100 + }, + { + "color": "super-light-purple", + "value": 500 + }, + { + "color": "dark-purple", + "value": 1000 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "MasterLabel" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "UsedLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "TotalLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "usedLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "activeLicenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Line" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "labels" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Salesforce Org Identifier" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Active Licenses" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Analytics Cloud Integration User" + }, + "properties": [] + }, + { + "matcher": { + "id": "byName", + "options": "Salesforce Licenses" + }, + "properties": [] + } + ] + }, + "gridPos": { + "h": 4, + "w": 4, + "x": 4, + "y": 30 + }, + "id": 119, + "maxPerRow": 6, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"LICENSES\"}", + "maxLines": 1000, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "orgIdentifier" + } + ], + "keepTime": false, + "replace": false, + "source": "labels" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "licenses" + } + ], + "keepTime": false, + "replace": false, + "source": "Line" + } + }, + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "Salesforce" + }, + { + "path": "Salesforce Platform" + } + ], + "keepTime": false, + "replace": false, + "source": "licenses" + } + }, + { + "id": "groupBy", + "options": { + "fields": { + "Salesforce": { + "aggregations": [ + "lastNotNull" + ], + "operation": "aggregate" + }, + "Salesforce Platform": { + "aggregations": [ + "lastNotNull" + ], + "operation": "aggregate" + }, + "orgIdentifier": { + "aggregations": [], + "operation": "groupby" + } + } + } + }, + { + "id": "filterByValue", + "options": { + "filters": [ + { + "config": { + "id": "equal", + "options": { + "value": "Not Found" + } + }, + "fieldName": "Salesforce Platform (lastNotNull)" + }, + { + "config": { + "id": "substring", + "options": { + "value": "sandbox" + } + }, + "fieldName": "orgIdentifier" + } + ], + "match": "any", + "type": "exclude" + } + }, + { + "id": "calculateField", + "options": { + "mode": "reduceRow", + "reduce": { + "include": [ + "Salesforce Platform (lastNotNull)" + ], + "reducer": "sum" + }, + "replaceFields": false + } + }, + { + "id": "organize", + "options": { + "excludeByName": { + "Salesforce (lastNotNull)": true, + "Salesforce Platform (lastNotNull)": true, + "orgIdentifier": false + }, + "includeByName": {}, + "indexByName": {}, + "renameByName": {} + } + } + ], + "type": "gauge" + } + ], + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "definition": "", + "includeAll": true, + "multi": true, + "name": "org", + "options": [], + "query": { + "label": "orgIdentifier", + "refId": "LokiVariableQueryEditor-VariableQuery", + "stream": "", + "type": 1 + }, + "refresh": 1, + "regex": "^(?!.*\\..*)(?!.*sandbox.*).*$", + "type": "query" + } + ] + }, + "time": { + "from": "now-30d", + "to": "now" + }, + "timepicker": {}, + "timezone": "browser", + "title": "DASH - Licenses", + "uid": "ddp6qgcjgk1dsd", + "version": 13, + "weekStart": "" +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DASH - Tech Debt for all orgs.json b/docs/grafana/dashboards/DASH - Tech Debt for all orgs.json new file mode 100644 index 000000000..9b07136ab --- /dev/null +++ b/docs/grafana/dashboards/DASH - Tech Debt for all orgs.json @@ -0,0 +1,478 @@ +{ + "__inputs": [ + { + "name": "DS_GRAFANACLOUD-CLOUDITY-LOGS", + "label": "grafanacloud-cloudity-logs", + "description": "", + "type": "datasource", + "pluginId": "loki", + "pluginName": "Loki" + } + ], + "__elements": {}, + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "11.3.0-75420" + }, + { + "type": "datasource", + "id": "loki", + "name": "Loki", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "stat", + "name": "Stat", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 10, + "panels": [], + "repeat": "org", + "title": "$org", + "type": "row" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Deactivated Flows & VR", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=METADATA_STATUS&${__url_time_range}&var-indicatorLabel=Deactivated Flow & VR" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 1 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 1 + }, + "id": 6, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"METADATA_STATUS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Attributes without permissions", + "links": [ + { + "title": "Show detailsl", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=LINT_ACCESS&${__url_time_range}&var-indicatorLabel=Attributes without permissions" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 1 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 1 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"LINT_ACCESS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Unused Metadatas", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=UNUSED_METADATAS&${__url_time_range}&var-indicatorLabel=Unused Metadatas" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 1 + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 1 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"UNUSED_METADATAS\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "Fields without description", + "links": [ + { + "title": "Show details", + "url": "/d/sfdx-hardis-indicator-details/02-indicator-details?${org:queryparam}&var-type=MISSING_ATTRIBUTES&${__url_time_range}&var-indicatorLabel=Fields without description" + } + ], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 1 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 1 + }, + "id": 5, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "center", + "orientation": "auto", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.0-75420", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "editorMode": "builder", + "expr": "{type=\"MISSING_ATTRIBUTES\", orgIdentifier=\"$org\"} |= ``", + "maxLines": 1, + "queryType": "range", + "refId": "A" + } + ], + "transformations": [ + { + "id": "extractFields", + "options": { + "format": "json", + "jsonPaths": [ + { + "path": "metric" + } + ], + "source": "Line" + } + } + ], + "type": "stat" + } + ], + "refresh": "", + "schemaVersion": 39, + "tags": [ + "sfdx-hardis", + "salesforce", + "monitoring" + ], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "loki", + "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" + }, + "definition": "", + "includeAll": true, + "label": "Salesforce Org", + "multi": true, + "name": "org", + "options": [], + "query": { + "label": "orgIdentifier", + "refId": "LokiVariableQueryEditor-VariableQuery", + "stream": "{source=\"sfdx-hardis\"}", + "type": 1 + }, + "refresh": 1, + "regex": "^[^.]*$", + "sort": 1, + "type": "query" + } + ] + }, + "time": { + "from": "now-7d", + "to": "now" + }, + "timepicker": {}, + "timezone": "browser", + "title": "DASH - Tech Debt for all orgs", + "uid": "sfdx-hardis-tech-debt-all-orgs", + "version": 7, + "weekStart": "" +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DTL - Active Users.json b/docs/grafana/dashboards/DTL - Active Users.json index fb9e11b00..f2375ef57 100644 --- a/docs/grafana/dashboards/DTL - Active Users.json +++ b/docs/grafana/dashboards/DTL - Active Users.json @@ -29,7 +29,7 @@ "type": "grafana", "id": "grafana", "name": "Grafana", - "version": "11.1.0-70903" + "version": "11.3.0-75420" }, { "type": "datasource", @@ -92,6 +92,7 @@ ], "panels": [ { + "collapsed": false, "gridPos": { "h": 1, "w": 24, @@ -101,7 +102,6 @@ "id": 6, "panels": [], "repeat": "org", - "repeatDirection": "h", "title": "$org", "type": "row" }, @@ -147,8 +147,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -156,7 +159,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -209,6 +212,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 20, "gradientMode": "none", @@ -268,7 +272,7 @@ "sort": "none" } }, - "pluginVersion": "11.1.0-69950", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -340,8 +344,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "/^_dateTime$/", "values": false }, @@ -352,7 +359,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -627,7 +634,9 @@ "footer": { "countRows": false, "fields": "", - "reducer": ["sum"], + "reducer": [ + "sum" + ], "show": false }, "showHeader": true, @@ -638,7 +647,7 @@ } ] }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -681,7 +690,9 @@ "includeTimeField": false, "labelsToFields": false, "mode": "seriesToRows", - "reducers": ["allValues"] + "reducers": [ + "allValues" + ] } }, { @@ -710,7 +721,9 @@ "aggregations": [] }, "Profile.UserLicense.LicenseDefinitionKey": { - "aggregations": ["count"], + "aggregations": [ + "count" + ], "operation": "aggregate" }, "Profile.UserLicense.Name": { @@ -750,7 +763,7 @@ "cellOptions": { "type": "auto" }, - "inspect": false + "inspect": true }, "mappings": [], "thresholds": { @@ -974,7 +987,9 @@ "footer": { "countRows": false, "fields": "", - "reducer": ["sum"], + "reducer": [ + "sum" + ], "show": false }, "showHeader": true, @@ -985,7 +1000,7 @@ } ] }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -1028,7 +1043,9 @@ "includeTimeField": false, "labelsToFields": false, "mode": "seriesToRows", - "reducers": ["allValues"] + "reducers": [ + "allValues" + ] } }, { @@ -1096,7 +1113,6 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "definition": "", - "hide": 0, "includeAll": true, "label": "Salesforce Org", "multi": true, @@ -1110,7 +1126,6 @@ }, "refresh": 1, "regex": "^(?!.*\\..*)(?!.*sandbox.*).*$", - "skipUrlSync": false, "sort": 1, "type": "query" }, @@ -1119,7 +1134,7 @@ "label": "Type", "name": "type", "query": "${VAR_TYPE}", - "skipUrlSync": false, + "skipUrlSync": true, "type": "constant", "current": { "value": "${VAR_TYPE}", @@ -1136,10 +1151,9 @@ }, { "hide": 2, - "label": "", "name": "indicatorLabel", "query": "${VAR_INDICATORLABEL}", - "skipUrlSync": false, + "skipUrlSync": true, "type": "constant", "current": { "value": "${VAR_INDICATORLABEL}", @@ -1160,13 +1174,10 @@ "from": "now-7d", "to": "now" }, - "timeRangeUpdatedDuringEditOrView": false, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"] - }, + "timepicker": {}, "timezone": "browser", "title": "DTL - Active Users", "uid": "sfdx-hardis-dtl-active-users", - "version": 5, + "version": 7, "weekStart": "" -} +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DTL - Indicator Details.json b/docs/grafana/dashboards/DTL - Indicator Details.json index 4296390cd..e062aa2b0 100644 --- a/docs/grafana/dashboards/DTL - Indicator Details.json +++ b/docs/grafana/dashboards/DTL - Indicator Details.json @@ -15,7 +15,7 @@ "type": "grafana", "id": "grafana", "name": "Grafana", - "version": "11.1.0-70903" + "version": "11.3.0-75420" }, { "type": "datasource", @@ -78,6 +78,7 @@ ], "panels": [ { + "collapsed": false, "gridPos": { "h": 1, "w": 24, @@ -87,7 +88,6 @@ "id": 6, "panels": [], "repeat": "org", - "repeatDirection": "h", "title": "$org", "type": "row" }, @@ -133,8 +133,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -142,7 +145,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -195,6 +198,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 20, "gradientMode": "none", @@ -254,7 +258,7 @@ "sort": "none" } }, - "pluginVersion": "11.1.0-69950", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -326,8 +330,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "/^_dateTime$/", "values": false }, @@ -338,7 +345,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -364,6 +371,9 @@ }, { "path": "_dateTime" + }, + { + "path": "_logElementsTruncated" } ], "keepTime": false, @@ -401,7 +411,7 @@ "cellOptions": { "type": "auto" }, - "inspect": false + "inspect": true }, "mappings": [], "thresholds": { @@ -598,6 +608,66 @@ "value": 125 } ] + }, + { + "matcher": { + "id": "byName", + "options": "StepStage" + }, + "properties": [ + { + "id": "custom.width", + "value": 97 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Status" + }, + "properties": [ + { + "id": "custom.width", + "value": 99 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Category" + }, + "properties": [ + { + "id": "custom.width", + "value": 103 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "DurableId" + }, + "properties": [ + { + "id": "custom.width", + "value": 216 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Title" + }, + "properties": [ + { + "id": "custom.width", + "value": 479 + } + ] } ] }, @@ -613,18 +683,15 @@ "footer": { "countRows": false, "fields": "", - "reducer": ["sum"], + "reducer": [ + "sum" + ], "show": false }, "showHeader": true, - "sortBy": [ - { - "desc": true, - "displayName": "Suspect" - } - ] + "sortBy": [] }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -667,7 +734,9 @@ "includeTimeField": false, "labelsToFields": false, "mode": "seriesToRows", - "reducers": ["allValues"] + "reducers": [ + "allValues" + ] } }, { @@ -735,7 +804,6 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "definition": "", - "hide": 0, "includeAll": true, "label": "Salesforce Org", "multi": true, @@ -749,7 +817,6 @@ }, "refresh": 1, "regex": "^[^.]*$", - "skipUrlSync": false, "sort": 1, "type": "query" }, @@ -763,7 +830,6 @@ "hide": 2, "includeAll": false, "label": "Type", - "multi": false, "name": "type", "options": [], "query": { @@ -774,28 +840,23 @@ }, "refresh": 1, "regex": "", - "skipUrlSync": false, - "sort": 0, "type": "query" }, { "current": { - "selected": false, - "text": "Updated metadatas in org", - "value": "Updated metadatas in org" + "text": "Suspect Setup Actions", + "value": "Suspect Setup Actions" }, "hide": 2, - "label": "", "name": "indicatorLabel", "options": [ { "selected": true, - "text": "Indicator", - "value": "Indicator" + "text": "Suspect Setup Actions", + "value": "Suspect Setup Actions" } ], - "query": "Click on another dashboard to visit this dashboard", - "skipUrlSync": false, + "query": "Suspect Setup Actions", "type": "textbox" } ] @@ -804,13 +865,10 @@ "from": "now-7d", "to": "now" }, - "timeRangeUpdatedDuringEditOrView": false, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"] - }, + "timepicker": {}, "timezone": "browser", "title": "DTL - Indicator Details", "uid": "sfdx-hardis-indicator-details", - "version": 41, + "version": 44, "weekStart": "" -} +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DTL - Indicator Evolution (Long time).json b/docs/grafana/dashboards/DTL - Indicator Evolution (Long time).json index 2573dca0f..c785b8c6c 100644 --- a/docs/grafana/dashboards/DTL - Indicator Evolution (Long time).json +++ b/docs/grafana/dashboards/DTL - Indicator Evolution (Long time).json @@ -23,7 +23,7 @@ "type": "grafana", "id": "grafana", "name": "Grafana", - "version": "11.1.0-70903" + "version": "11.3.0-75420" }, { "type": "datasource", @@ -90,7 +90,6 @@ "id": 7, "panels": [], "repeat": "org", - "repeatDirection": "h", "title": "$org", "type": "row" }, @@ -218,7 +217,6 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "definition": "", - "hide": 0, "includeAll": true, "label": "Salesforce Org", "multi": true, @@ -232,7 +230,6 @@ }, "refresh": 1, "regex": "^[^.]*$", - "skipUrlSync": false, "sort": 1, "type": "query" }, @@ -243,7 +240,6 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-PROM}" }, "definition": "metrics(_metric)", - "hide": 0, "includeAll": true, "label": "Limit Identifier", "multi": true, @@ -256,8 +252,6 @@ }, "refresh": 1, "regex": "", - "skipUrlSync": false, - "sort": 0, "type": "query" } ] @@ -266,13 +260,10 @@ "from": "now-7d", "to": "now" }, - "timeRangeUpdatedDuringEditOrView": false, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"] - }, + "timepicker": {}, "timezone": "browser", "title": "DTL - Indicator Evolution (Long time)", "uid": "sfdx-hardis-indicator-evol-prom", "version": 17, "weekStart": "" -} +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DTL - Limits Details.json b/docs/grafana/dashboards/DTL - Limits Details.json index b7f42882a..6e1e587b0 100644 --- a/docs/grafana/dashboards/DTL - Limits Details.json +++ b/docs/grafana/dashboards/DTL - Limits Details.json @@ -15,7 +15,7 @@ "type": "grafana", "id": "grafana", "name": "Grafana", - "version": "11.1.0-70958" + "version": "11.3.0-75420" }, { "type": "datasource", @@ -67,6 +67,7 @@ ], "panels": [ { + "collapsed": false, "gridPos": { "h": 1, "w": 24, @@ -76,7 +77,6 @@ "id": 3, "panels": [], "repeat": "org", - "repeatDirection": "h", "title": "$org", "type": "row" }, @@ -204,7 +204,9 @@ "countRows": false, "enablePagination": false, "fields": "", - "reducer": ["sum"], + "reducer": [ + "sum" + ], "show": false }, "showHeader": true, @@ -215,7 +217,7 @@ } ] }, - "pluginVersion": "11.1.0-70958", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -255,7 +257,9 @@ "id": "reduce", "options": { "labelsToFields": false, - "reducers": ["allValues"] + "reducers": [ + "allValues" + ] } }, { @@ -341,7 +345,6 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "definition": "", - "hide": 0, "includeAll": true, "label": "Salesforce Org", "multi": true, @@ -355,7 +358,6 @@ }, "refresh": 1, "regex": "^[^.]*$", - "skipUrlSync": false, "sort": 1, "type": "query" } @@ -365,11 +367,10 @@ "from": "now-30d", "to": "now" }, - "timeRangeUpdatedDuringEditOrView": false, "timepicker": {}, "timezone": "browser", "title": "DTL - Limits Details", "uid": "sfdx-hardis-limits-details", "version": 17, "weekStart": "" -} +} \ No newline at end of file diff --git a/docs/grafana/dashboards/DTL - Limits Evolution.json b/docs/grafana/dashboards/DTL - Limits Evolution.json index 5a753e444..98dda1baa 100644 --- a/docs/grafana/dashboards/DTL - Limits Evolution.json +++ b/docs/grafana/dashboards/DTL - Limits Evolution.json @@ -21,7 +21,7 @@ "type": "grafana", "id": "grafana", "name": "Grafana", - "version": "11.1.0-70903" + "version": "11.3.0-75420" }, { "type": "datasource", @@ -88,7 +88,6 @@ "id": 7, "panels": [], "repeat": "org", - "repeatDirection": "h", "title": "$org", "type": "row" }, @@ -136,7 +135,9 @@ "minVizWidth": 75, "orientation": "auto", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "", "values": false }, @@ -144,7 +145,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -244,6 +245,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 20, "gradientMode": "none", @@ -304,7 +306,7 @@ "sort": "none" } }, - "pluginVersion": "11.1.0-69950", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -424,8 +426,11 @@ "graphMode": "area", "justifyMode": "center", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { - "calcs": ["lastNotNull"], + "calcs": [ + "lastNotNull" + ], "fields": "/^_dateTime$/", "values": false }, @@ -436,7 +441,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.0-70903", + "pluginVersion": "11.3.0-75420", "targets": [ { "datasource": { @@ -497,7 +502,6 @@ "uid": "${DS_GRAFANACLOUD-CLOUDITY-LOGS}" }, "definition": "", - "hide": 0, "includeAll": true, "label": "Salesforce Org", "multi": true, @@ -511,7 +515,6 @@ }, "refresh": 1, "regex": "^[^.]*$", - "skipUrlSync": false, "sort": 1, "type": "query" }, @@ -525,7 +528,6 @@ "hide": 2, "includeAll": false, "label": "Type", - "multi": false, "name": "type", "options": [], "query": { @@ -536,28 +538,23 @@ }, "refresh": 1, "regex": "", - "skipUrlSync": false, - "sort": 0, "type": "query" }, { "current": { - "selected": false, "text": "DataStorageMB", "value": "DataStorageMB" }, - "hide": 0, "label": "Limit Identifier", "name": "limitId", "options": [ { "selected": true, - "text": "", - "value": "" + "text": "DataStorageMB", + "value": "DataStorageMB" } ], "query": "DataStorageMB", - "skipUrlSync": false, "type": "textbox" } ] @@ -566,13 +563,10 @@ "from": "now-7d", "to": "now" }, - "timeRangeUpdatedDuringEditOrView": false, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"] - }, + "timepicker": {}, "timezone": "browser", "title": "DTL - Limits Evolution", "uid": "sfdx-hardis-limits-evolution", "version": 24, "weekStart": "" -} +} \ No newline at end of file diff --git a/docs/hardis/auth/login.md b/docs/hardis/auth/login.md index e5a4364b8..0b1541bb1 100644 --- a/docs/hardis/auth/login.md +++ b/docs/hardis/auth/login.md @@ -1,27 +1,58 @@ - + # hardis:auth:login ## Description -Login to salesforce org + +## Command Behavior + +**Authenticates to a Salesforce org, primarily designed for CI/CD workflows.** + +This command facilitates secure and automated logins to Salesforce organizations within continuous integration and continuous delivery pipelines. It leverages pre-configured authentication details, ensuring that CI/CD processes can interact with Salesforce without manual intervention. + +Key aspects: + +- **Configuration-Driven:** It relies on authentication variables and files set up by dedicated configuration commands: + - For CI/CD repositories: [Configure Org CI Authentication](https://sfdx-hardis.cloudity.com/hardis/project/configure/auth/) + - For Monitoring repositories: [Configure Org Monitoring](https://sfdx-hardis.cloudity.com/hardis/org/configure/monitoring/) +- **Technical Org Support:** Supports authentication to a 'technical org' (e.g., for calling Agentforce from another org) by utilizing the `SFDX_AUTH_URL_TECHNICAL_ORG` environment variable. If this variable is set, the command authenticates to this org with the alias `TECHNICAL_ORG`. + +To obtain the `SFDX_AUTH_URL_TECHNICAL_ORG` value, you can run `sf org display --verbose --json` and copy the `sfdxAuthUrl` field from the output. + +
+Technical explanations + +The command's technical flow involves: + +- **Flag Parsing:** It parses command-line flags such as `instanceurl`, `devhub`, `scratchorg`, and `debug` to determine the authentication context. +- **Authentication Hook:** It triggers an internal authentication hook (`this.config.runHook('auth', ...`)) which is responsible for executing the actual authentication logic based on the provided flags (e.g., whether it's a Dev Hub or a scratch org). +- **Environment Variable Check:** It checks for the presence of `SFDX_AUTH_URL_TECHNICAL_ORG` or `TECHNICAL_ORG_ALIAS` environment variables. +- **`authOrg` Utility:** If a technical org is configured, it calls the `authOrg` utility function to perform the authentication for that specific org, ensuring it's connected and available for subsequent operations. +- **Salesforce CLI Integration:** It integrates with the Salesforce CLI's authentication mechanisms to establish and manage org connections. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| devhub
-h | boolean | Also connect associated DevHub | | | | -| instanceurl
-r | option | URL of org instance | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| scratchorg
-s | boolean | Scratch org | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| devhub
-h | boolean | Also connect associated DevHub | | | | +| flags-dir | option | undefined | | | | +| instanceurl
-r | option | URL of org instance | | | | +| json | boolean | Format output as json. | | | | +| scratchorg
-s | boolean | Scratch org | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:auth:login +$ sf hardis:auth:login +``` + +```shell +CI=true sf hardis:auth:login ``` diff --git a/docs/hardis/cache/clear.md b/docs/hardis/cache/clear.md index ed21200f8..0f2bd310b 100644 --- a/docs/hardis/cache/clear.md +++ b/docs/hardis/cache/clear.md @@ -1,24 +1,41 @@ - + # hardis:cache:clear ## Description -Clear cache generated by sfdx-hardis + +## Command Behavior + +**Clears the local cache generated by the sfdx-hardis plugin.** + +This command is designed to remove temporary files, stored configurations, and other cached data that sfdx-hardis uses to optimize its operations. Clearing the cache can be beneficial for: + +- **Troubleshooting:** Resolving unexpected behavior or inconsistencies. +- **Disk Space Management:** Freeing up storage on your local machine. +- **Ensuring Fresh Data:** Guaranteeing that the plugin operates with the most current data and configurations. + +## Technical explanations + +The command's technical implementation is straightforward: + +- **Direct Function Call:** It directly invokes the `clearCache()` function, which is imported from uri../../../common/cache/index.jsuri. +- **Cache Management Logic:** The uriclearCache()` function encapsulates the logic for identifying and removing the specific files and directories that constitute the sfdx-hardis cache. + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:cache:clear +$ sf hardis:cache:clear ``` diff --git a/docs/hardis/config/get.md b/docs/hardis/config/get.md index 562bc518e..27dc3fdaf 100644 --- a/docs/hardis/config/get.md +++ b/docs/hardis/config/get.md @@ -1,25 +1,44 @@ - + # hardis:config:get ## Description -Returns sfdx-hardis project config for a given level + +## Command Behavior + +**Retrieves and displays the sfdx-hardis configuration for a specified level.** + +This command allows you to inspect the configuration that is currently in effect for your project, which is useful for debugging and understanding how sfdx-hardis will behave. + +- **Configuration levels:** It can retrieve configuration from three different levels: + - **Project:** The configuration defined in the project's `.sfdx-hardis.yml` file. + - **Branch:** The configuration defined in a branch-specific configuration file (e.g., `.sfdx-hardis.production.yml`). + - **User:** The global user-level configuration. + +## Technical explanations + +The command's logic is straightforward: + +- **`getConfig` function:** It calls the `getConfig` utility function, passing the desired configuration level as an argument. +- **Configuration loading:** The `getConfig` function is responsible for finding the appropriate configuration file, reading its contents, and parsing it as YAML or JSON. +- **Output:** The retrieved configuration is then displayed to the user as a JSON string. + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| level
-l | option | project,branch or user | project | | project
branch
user | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:---------------------------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| level
-l | option | project,branch or user | project | | project
branch
user | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:deploy:sources:metadata +$ sf hardis:project:deploy:sources:metadata ``` diff --git a/docs/hardis/deploy/quick.md b/docs/hardis/deploy/quick.md new file mode 100644 index 000000000..c90b12f4e --- /dev/null +++ b/docs/hardis/deploy/quick.md @@ -0,0 +1,70 @@ + +# hardis:deploy:quick + +## Description + +sfdx-hardis wrapper for **sf project deploy quick** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_quick_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +```yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:-----------------------|:-------:|:--------:|:-------:| +| --job-id
-i | option | job-id | | | | +| --use-most-recent
-r | boolean | use-most-recent | | | | +| api-version
-a | option | api-version | | | | +| async | boolean | async | | | | +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| target-org
-o | option | undefined | | | | +| tests | option | tests | | | | +| wait
-w | option | wait | 33 | | | + +## Examples + + diff --git a/docs/hardis/deploy/start.md b/docs/hardis/deploy/start.md new file mode 100644 index 000000000..c8d767d2f --- /dev/null +++ b/docs/hardis/deploy/start.md @@ -0,0 +1,84 @@ + +# hardis:deploy:start + +## Description + +sfdx-hardis wrapper for **sf project deploy start** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_start_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +```yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:-------------------------|:-------:|:--------:|:-------:| +| api-version
-a | option | api-version | | | | +| async | boolean | async | | | | +| coverage-formatters | option | coverage-formatters | | | | +| debug | boolean | debug | | | | +| dry-run | boolean | dry-run | | | | +| flags-dir | option | undefined | | | | +| ignore-conflicts
-c | boolean | ignore-conflicts | | | | +| ignore-errors
-r | boolean | ignore-errors | | | | +| ignore-warnings
-g | boolean | ignore-warnings | | | | +| json | boolean | Format output as json. | | | | +| junit | boolean | junit | | | | +| manifest
-x | option | manifest | | | | +| metadata
-m | option | metadata | | | | +| metadata-dir | option | metadata-dir | | | | +| post-destructive-changes | option | post-destructive-changes | | | | +| pre-destructive-changes | option | pre-destructive-changes | | | | +| purge-on-delete | boolean | purge-on-delete | | | | +| results-dir | option | results-dir | | | | +| single-package | boolean | single-package | | | | +| source-dir
-d | option | source-dir | | | | +| target-org
-o | option | undefined | | | | +| test-level | option | test-level | | | | +| tests | option | tests | | | | +| wait
-w | option | wait | 33 | | | + +## Examples + + diff --git a/docs/hardis/deploy/validate.md b/docs/hardis/deploy/validate.md new file mode 100644 index 000000000..c6336405e --- /dev/null +++ b/docs/hardis/deploy/validate.md @@ -0,0 +1,84 @@ + +# hardis:deploy:validate + +## Description + +sfdx-hardis wrapper for **sf project deploy validate** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_validate_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +```yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:-------------------------|:-------:|:--------:|:-------:| +| api-version
-a | option | api-version | | | | +| async | boolean | async | | | | +| coverage-formatters | option | coverage-formatters | | | | +| debug | boolean | debug | | | | +| dry-run | boolean | dry-run | | | | +| flags-dir | option | undefined | | | | +| ignore-conflicts
-c | boolean | ignore-conflicts | | | | +| ignore-errors
-r | boolean | ignore-errors | | | | +| ignore-warnings
-g | boolean | ignore-warnings | | | | +| json | boolean | Format output as json. | | | | +| junit | boolean | junit | | | | +| manifest
-x | option | manifest | | | | +| metadata
-m | option | metadata | | | | +| metadata-dir | option | metadata-dir | | | | +| post-destructive-changes | option | post-destructive-changes | | | | +| pre-destructive-changes | option | pre-destructive-changes | | | | +| purge-on-delete | boolean | purge-on-delete | | | | +| results-dir | option | results-dir | | | | +| single-package | boolean | single-package | | | | +| source-dir
-d | option | source-dir | | | | +| target-org
-o | option | undefined | | | | +| test-level | option | test-level | | | | +| tests | option | tests | | | | +| wait
-w | option | wait | 33 | | | + +## Examples + + diff --git a/docs/hardis/doc/extract/permsetgroups.md b/docs/hardis/doc/extract/permsetgroups.md index a54950d30..ae531cd0a 100644 --- a/docs/hardis/doc/extract/permsetgroups.md +++ b/docs/hardis/doc/extract/permsetgroups.md @@ -1,25 +1,49 @@ - + # hardis:doc:extract:permsetgroups ## Description -Generate markdown files with project documentation + +## Command Behavior + +**Extracts and documents Salesforce Permission Set Groups and their assigned Permission Sets.** + +This command generates two types of output: a CSV file and a Markdown file, providing a clear overview of how Permission Set Groups are structured and what Permission Sets they contain within your Salesforce project. This is particularly useful for: + +- **Documentation:** Creating human-readable documentation of your permission architecture. +- **Auditing:** Understanding the composition of permission sets for security and compliance checks. +- **Analysis:** Gaining insights into how permissions are bundled and assigned in your Salesforce environment. + +The generated CSV file provides a structured, machine-readable format, while the Markdown file offers a more descriptive, human-friendly view, including the group's name, label, description, and a list of its constituent permission sets. + +## Technical explanations + +The command performs the following technical steps: + +- **File Discovery:** It uses `glob` to find all `.permissionsetgroup-meta.xml` files within the current working directory, respecting `.gitignore` patterns. +- **XML Parsing:** For each discovered Permission Set Group XML file, it parses the XML content using `parseXmlFile` to extract relevant information such as the group's name, label, description, and the names of the Permission Sets it contains. +- **Data Structuring:** The extracted data is then structured into a list of objects, making it easy to process. +- **CSV Generation:** It constructs a CSV file with two columns: 'Permission set group' and 'Permission sets'. The 'Permission sets' column lists all assigned permission sets for each group, enclosed in quotes and separated by commas. The CSV file is saved to a temporary directory or a user-specified path. +- **Markdown Generation:** It generates a Markdown file (`docs/permission-set-groups.md`) that includes a title, a table of contents, and detailed sections for each Permission Set Group. Each section lists the group's name, label, description, and a bulleted list of its assigned Permission Sets. +- **File System Operations:** It uses `fs-extra` to ensure output directories exist and to write the generated CSV and Markdown files. +- **VS Code Integration:** It uses `WebSocketClient.requestOpenFile` to automatically open the generated CSV and Markdown files in VS Code, enhancing the user experience. + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:doc:extract:permsetgroups +$ sf hardis:doc:extract:permsetgroups ``` diff --git a/docs/hardis/doc/fieldusage.md b/docs/hardis/doc/fieldusage.md new file mode 100644 index 000000000..304c439f5 --- /dev/null +++ b/docs/hardis/doc/fieldusage.md @@ -0,0 +1,54 @@ + +# hardis:doc:fieldusage + +## Description + + +## Command Behavior + +**Retrieves and displays the usage of custom fields within a Salesforce org, based on metadata dependencies.** + +This command helps identify where custom fields are referenced across various metadata components in your Salesforce environment. It's particularly useful for impact analysis before making changes to fields, or for understanding the complexity and interconnectedness of your Salesforce customizations. + +- **Targeted sObjects:** You can specify a comma-separated list of sObjects (e.g., `Account,Contact`) to narrow down the analysis to relevant objects. If no sObjects are specified, it will analyze all customizable sObjects. +- **Usage Details:** For each custom field, the command lists the metadata components (e.g., Apex Classes, Visualforce Pages, Flows, Reports) that reference it, along with their types and names. + +!['Find custom fields usage'](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/doc-fieldusage.png) + +
+Technical explanations + +The command operates by querying Salesforce's Tooling API and Metadata Component Dependency API: + +- **sObject Retrieval:** It first queries `EntityDefinition` to get a list of customizable sObjects, optionally filtered by the user's input. +- **Custom Field Identification:** For each identified sObject, it queries `CustomField` to retrieve all custom fields associated with it. +- **Dependency Lookup:** The core of the command involves querying `MetadataComponentDependency` using the IDs of the custom fields. This API provides information about which other metadata components depend on the specified fields. +- **Data Aggregation & Reporting:** The retrieved data is then processed and formatted into a tabular output, showing the sObject name, field name, field type, dependency type, and dependency name. The results are also generated into various report formats (e.g., CSV, JSON) for further analysis. +- **SOQL Queries:** It uses `soqlQuery` and `soqlQueryTooling` utilities to execute SOQL queries against the Salesforce org. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:-------------------------------------------|:-------:|:--------:|:-------:| +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| sObjects
-s | option | Comma-separated list of sObjects to filter | | | | +| target-org
-o | option | undefined | | | | + +## Examples + +```shell +$ sf hardis:doc:fieldusage +``` + +```shell +$ sf hardis:doc:fieldusage --sObjects Account,Contact,Opportunity +``` + +```shell +$ sf hardis:doc:fieldusage --target-org myOrgAlias --sObjects CustomObject__c +``` + + diff --git a/docs/hardis/doc/flow2markdown.md b/docs/hardis/doc/flow2markdown.md new file mode 100644 index 000000000..d30415dbd --- /dev/null +++ b/docs/hardis/doc/flow2markdown.md @@ -0,0 +1,71 @@ + +# hardis:doc:flow2markdown + +## Description + + +## Command Behavior + +**Generates comprehensive Markdown documentation from a Salesforce Flow metadata file.** + +This command automates the creation of human-readable documentation for Salesforce Flows, making it easier to understand their logic and behavior. It can process a single Flow file or multiple Flow files, generating a Markdown file for each. + +Key features include: + +- **Detailed Flow Description:** Extracts and presents the Flow's structure, elements, and decision logic in a clear, organized Markdown format. +- **AI-Powered Summarization (Optional):** If [AI integration](https://sfdx-hardis.cloudity.com/salesforce-ai-setup/) is configured, the documentation will include an AI-generated summary of the Flow's purpose and functionality. +- **Mermaid Diagram Generation:** Integrates with Mermaid to visualize the Flow's structure, providing a graphical representation alongside the textual description. +- **History Diff (Optional):** Can generate a Markdown file showing the historical differences of the Flow, useful for tracking changes over time. +- **PDF Export (Optional):** Allows for the generation of the documentation in PDF format for easy sharing and archiving. +- **Interactive File Selection:** If no input file is specified, the command interactively prompts the user to select Flow files. + +
+Technical explanations + +The command leverages several internal utilities and external libraries to achieve its functionality: + +- **Flow Metadata Parsing:** Reads and parses the XML content of Salesforce Flow metadata files (.flow-meta.xml). +- **Markdown Generation:** Utilizes exttt{generateFlowMarkdownFile} to transform the parsed Flow data into a structured Markdown format. +- **Mermaid Integration:** Employs exttt{generateMarkdownFileWithMermaid} to embed Mermaid diagrams within the Markdown output, which are then rendered by compatible Markdown viewers. +- **AI Integration:** If enabled, it interacts with an AI service (via exttt{describeWithAi} option) to generate a high-level summary of the Flow. +- **Git History Analysis:** For the --with-history flag, it uses exttt{generateHistoryDiffMarkdown} to analyze Git history and present changes to the Flow. +- **File System Operations:** Uses exttt{fs-extra} for file system operations like reading input files, creating output directories (e.g., exttt{docs/flows/}), and writing Markdown and PDF files. +- **Salesforce CLI Integration:** Uses exttt{@salesforce/sf-plugins-core} for command-line parsing and exttt{setConnectionVariables} for Salesforce organization context. +- **WebSocket Communication:** Interacts with a WebSocket client ( exttt{WebSocketClient.requestOpenFile}) to open the generated Markdown file in a VS Code tab, enhancing user experience. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:-------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| inputfile
-x | option | Path to Flow metadata file. If not specified, the command will prompt the user | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output markdown file. Must end with .md | | | | +| pdf | boolean | Also generate the documentation in PDF format | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| with-history | boolean | Generate a markdown file with the history diff of the Flow | | | | + +## Examples + +```shell +$ sf hardis:doc:flow2markdown +``` + +```shell +$ sf hardis:doc:flow2markdown --inputfile force-app/main/default/flows/MyFlow.flow-meta.xml +``` + +```shell +$ sf hardis:doc:flow2markdown --pdf +``` + +```shell +$ sf hardis:doc:flow2markdown --inputfile force-app/main/default/flows/MyFlow.flow-meta.xml --pdf +``` + + diff --git a/docs/hardis/doc/mkdocs-to-cf.md b/docs/hardis/doc/mkdocs-to-cf.md new file mode 100644 index 000000000..bb7b3a061 --- /dev/null +++ b/docs/hardis/doc/mkdocs-to-cf.md @@ -0,0 +1,26 @@ + +# hardis:doc:mkdocs-to-cf + +## Description + +## Command Behavior**Generates MkDocs HTML pages and uploads them to Cloudflare as a static site, secured with Cloudflare Access.**This command automates the deployment of your project's documentation (built with MkDocs) to Cloudflare Pages, making it accessible and secure. It handles the entire process from HTML generation to Cloudflare configuration.Key operations performed:- **MkDocs HTML Generation:** Builds the MkDocs project into static HTML pages. It can use a locally installed `mkdocs-material` or a `mkdocs` Docker image.- **Cloudflare Pages Project Creation/Update:** Creates a new Cloudflare Pages project if one doesn't exist for your documentation, or updates an existing one.- **Cloudflare Access Policy Assignment:** Assigns a policy to restrict access to the deployed application, ensuring only authorized users can view your documentation.- **Cloudflare Access Application Setup:** Configures a Cloudflare Access application for the deployed site, integrating it with your Zero Trust policies.- **HTML Page Upload:** Deploys the generated HTML pages to Cloudflare Pages.- **Browser Opening (Non-CI):** Opens the newly deployed website in your default browser if the command is not run in a CI/CD environment.**Prerequisite:** The documentation must have been previously generated using `sf hardis:doc:project2markdown --with-history`.**Customization:** You can override default styles by customizing your `mkdocs.yml` file.More information can be found in the [Documentation section](https://sfdx-hardis.cloudity.com/salesforce-project-documentation/).**Environment Variables for Cloudflare Configuration:**| Variable | Description | Default || :---------------------------------------- | :----------------------------------------------------------------------- | :------------------------------------: || `CLOUDFLARE_EMAIL` | Cloudflare account email | _Required_ || `CLOUDFLARE_API_TOKEN` | Cloudflare API token | _Required_ || `CLOUDFLARE_ACCOUNT_ID` | Cloudflare account ID | _Required_ || `CLOUDFLARE_PROJECT_NAME` | Project name, also used for the site URL | Built from Git branch name || `CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE` | Cloudflare default login method type | `onetimepin` || `CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN` | Cloudflare default access email domain | `@cloudity.com` || `CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST` | Comma-separated list of additional policy IDs to assign to the application | _Optional_ |
+Technical explanationsThe command orchestrates interactions with MkDocs, Cloudflare APIs, and Git:- **MkDocs Integration:** It calls `generateMkDocsHTML()` to execute the MkDocs build process, which converts Markdown files into static HTML. It checks for the presence of `mkdocs.yml` to ensure it's a valid MkDocs project.- **Cloudflare API Interaction:** It uses the `cloudflare` npm package to interact with the Cloudflare API. This involves: - **Authentication:** Initializes the Cloudflare client using `CLOUDFLARE_EMAIL`, `CLOUDFLARE_API_TOKEN`, and `CLOUDFLARE_ACCOUNT_ID` environment variables. - **Pages Project Management:** Calls `client.pages.projects.get()` to check for an existing project and `client.pages.projects.create()` to create a new one if needed. - **Access Policy Management:** Lists existing access policies (`client.zeroTrust.access.policies.list()`) and creates a new one (`client.zeroTrust.access.policies.create()`) if the required policy doesn't exist. It configures the policy with email domain restrictions and a default login method. - **Access Application Management:** Lists existing access applications (`client.zeroTrust.access.applications.list()`) and creates a new one (`client.zeroTrust.access.applications.create()`) for the deployed site. It then updates the application to associate it with the created access policy.- **Git Integration:** Retrieves the current Git branch name using `getCurrentGitBranch()` to construct the Cloudflare project name and branch for deployment.- **Wrangler CLI:** Uses the `wrangler` CLI (Cloudflare's developer tool) to deploy the generated HTML pages to Cloudflare Pages via `wrangler pages deploy`.- **Environment Variable Management:** Reads various environment variables to configure Cloudflare settings and project names.- **Error Handling:** Includes checks for missing `mkdocs.yml` and Cloudflare environment variables, throwing `SfError` when necessary.
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:doc:mkdocs-to-cf +``` + + diff --git a/docs/hardis/doc/mkdocs-to-salesforce.md b/docs/hardis/doc/mkdocs-to-salesforce.md new file mode 100644 index 000000000..f7b2eaf4e --- /dev/null +++ b/docs/hardis/doc/mkdocs-to-salesforce.md @@ -0,0 +1,66 @@ + +# hardis:doc:mkdocs-to-salesforce + +## Description + + +## Command Behavior +**Generates MkDocs HTML pages and deploys them to a Salesforce org as a static resource, Visualforce page, and Custom Tab.** + +This command provides a convenient way to host your project's documentation directly within Salesforce, making it easily accessible to users. It automates the entire process of converting your MkDocs project into a deployable Salesforce package. + +Key operations performed: + +- **MkDocs HTML Generation:** Builds the MkDocs project into static HTML pages. It can use a locally installed `mkdocs-material` or a `mkdocs` Docker image. +- **Salesforce Metadata Creation:** Creates the necessary Salesforce metadata components: + - A **Static Resource** to store the generated HTML, CSS, and JavaScript files. + - A **Visualforce Page** that embeds the static resource, allowing it to be displayed within Salesforce. + - A **Custom Tab** to provide a user-friendly entry point to the documentation from the Salesforce navigation. + - A **Permission Set** to grant access to the Visualforce page and Custom Tab. +- **Metadata Deployment:** Deploys these newly created metadata components to the specified Salesforce org. +- **Permission Set Assignment:** Assigns the newly created permission set to the current user, ensuring immediate access to the documentation. +- **Browser Opening (Non-CI):** Opens the Custom Tab in your default browser if the command is not run in a CI/CD environment. + +**Prerequisite:** The documentation must have been previously generated using `sf hardis:doc:project2markdown --with-history`. + +**Customization:** + +- You can specify the type of documentation to generate (e.g., `CICD` or `Monitoring`) using the `--type` flag. The default is `CICD`. +- You can override default styles by customizing your `mkdocs.yml` file. + +More information can be found in the [Documentation section](${CONSTANTS.DOC_URL_ROOT}/salesforce-project-documentation/). +
+Technical explanations + +The command orchestrates interactions with MkDocs, Salesforce CLI, and file system operations: + +- **MkDocs Integration:** It first modifies the `mkdocs.yml` file to ensure compatibility with Salesforce static resources (e.g., setting `use_directory_urls` to `false`). Then, it calls `generateMkDocsHTML()` to build the static HTML content. +- **Temporary SFDX Project:** It creates a temporary SFDX project using `createTempDir` and `createBlankSfdxProject` to stage the generated Salesforce metadata before deployment. +- **Metadata Generation:** It dynamically creates the XML metadata files for the Static Resource, Visualforce Page, Custom Tab, and Permission Set. The HTML content from the MkDocs build is moved into the static resource folder. +- **Salesforce CLI Deployment:** It constructs and executes a `sf project deploy start` command to deploy the generated metadata to the target Salesforce org. It intelligently adds `--test-level RunLocalTests` for production orgs and `--test-level NoTestRun` for sandboxes. +- **Permission Set Assignment:** After successful deployment, it calls `initPermissionSetAssignments` to assign the newly created permission set to the current user. +- **Browser Launch:** For non-CI environments, it uses `execCommand` to open the deployed Custom Tab in the user's default browser. +- **Error Handling and Cleanup:** It includes error handling for deployment failures (e.g., static resource size limits) and ensures that the `mkdocs.yml` file is restored to its original state after execution. +- **File System Operations:** It extensively uses `fs-extra` for file manipulation, including creating directories, moving files, and writing XML content. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------------------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| type
-t | option | Type of the documentation to generate. Default is "all" | CICD | | CICD
Monitoring | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:doc:mkdocs-to-salesforce +``` + + diff --git a/docs/hardis/doc/override-prompts.md b/docs/hardis/doc/override-prompts.md new file mode 100644 index 000000000..479752506 --- /dev/null +++ b/docs/hardis/doc/override-prompts.md @@ -0,0 +1,63 @@ + +# hardis:doc:override-prompts + +## Description + + +## Command Behavior + +**Creates local override files for AI prompt templates and variables, allowing for customization of sfdx-hardis AI interactions.** + +This command sets up a `config/prompt-templates/` folder within your project. It populates this folder with `.txt` files containing the default AI prompt templates and variables used by sfdx-hardis. This enables you to tailor the AI's behavior and responses to your organization's specific needs, terminology, and coding standards. + +Key functionalities: + +- **Template Customization:** Modify templates used for generating documentation, solving deployment errors, and describing Salesforce metadata. +- **Variable Customization:** Adjust common instruction patterns (e.g., role definitions, formatting requirements, security cautions) that are reused across multiple templates. +- **Persistent Overrides:** Once created, these local files will override the default sfdx-hardis templates and variables, and they will not be overwritten by future sfdx-hardis updates unless explicitly requested with the `--overwrite` flag. + +**Important:** After running this command, you can modify any of the `.txt` files in `config/prompt-templates/` to customize the AI's behavior. + +Available templates: +- PROMPT_SOLVE_DEPLOYMENT_ERROR\n- PROMPT_DESCRIBE_FLOW\n- PROMPT_DESCRIBE_FLOW_DIFF\n- PROMPT_DESCRIBE_OBJECT\n- PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD\n- PROMPT_DESCRIBE_APEX\n- PROMPT_DESCRIBE_PAGE\n- PROMPT_DESCRIBE_PACKAGE\n- PROMPT_DESCRIBE_PROFILE\n- PROMPT_DESCRIBE_PERMISSION_SET\n- PROMPT_DESCRIBE_PERMISSION_SET_GROUP\n- PROMPT_DESCRIBE_ASSIGNMENT_RULES\n- PROMPT_DESCRIBE_APPROVAL_PROCESS\n- PROMPT_DESCRIBE_LWC\n- PROMPT_DESCRIBE_AUTORESPONSE_RULES\n- PROMPT_DESCRIBE_ESCALATION_RULES\n- PROMPT_DESCRIBE_ROLES + +Available variables: +- VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC\n- VARIABLE_FORMATTING_REQUIREMENTS\n- VARIABLE_ADDITIONAL_INSTRUCTIONS + +More info on [AI Prompts documentation](https://sfdx-hardis.cloudity.com/salesforce-ai-prompts/) + +
+Technical explanations + +The command's technical implementation involves: + +- **Directory Creation:** Ensures the `config/prompt-templates/` directory exists using `fs.ensureDirSync()`. +- **File Copying:** Iterates through predefined `PROMPT_TEMPLATES` and `PROMPT_VARIABLES` objects. For each template/variable, it extracts the English text content and writes it to a corresponding `.txt` file in the `config/prompt-templates/` directory. +- **Overwrite Logic:** Checks if a file already exists. If the `--overwrite` flag is provided, it overwrites the existing file; otherwise, it skips the file and logs a message. +- **User Feedback:** Provides detailed logs about created, overwritten, and skipped files, along with instructions on how to use the customized prompts and variables. +- **Dynamic Content:** The description itself dynamically lists available templates and variables by iterating over `PROMPT_TEMPLATES` and `PROMPT_VARIABLES` objects. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| overwrite | boolean | Overwrite existing template files if they already exist | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:doc:override-prompts +``` + +```shell +$ sf hardis:doc:override-prompts --overwrite +``` + + diff --git a/docs/hardis/doc/packagexml2markdown.md b/docs/hardis/doc/packagexml2markdown.md new file mode 100644 index 000000000..176eeb577 --- /dev/null +++ b/docs/hardis/doc/packagexml2markdown.md @@ -0,0 +1,59 @@ + +# hardis:doc:packagexml2markdown + +## Description + + +## Command Behavior + +**Generates a Markdown documentation file from a Salesforce `package.xml` file.** + +This command provides a convenient way to visualize and document the metadata components defined within a `package.xml` file. It's particularly useful for: + +- **Understanding Project Scope:** Quickly grasp what metadata types and components are included in a specific deployment or retrieval. +- **Documentation:** Create human-readable documentation of your project's metadata structure. +- **Collaboration:** Share a clear overview of metadata changes with team members or stakeholders. + +Key features: + +- **Flexible Input:** You can specify the path to a `package.xml` file using the `--inputfile` flag. If not provided, the command will automatically look for `package.xml` files in the `manifest` folder. +- **Customizable Output:** You can force the path and name of the output Markdown file using the `--outputfile` flag. +- **VS Code Integration:** Automatically opens the generated Markdown file in a new VS Code tab for immediate review. + +
+Technical explanations + +The command's technical implementation involves: + +- **XML Parsing:** It reads the content of the specified `package.xml` file and parses its XML structure to extract the metadata types and their members. +- **Markdown Generation:** It utilizes the `DocBuilderPackageXML.generatePackageXmlMarkdown` utility to transform the parsed `package.xml` data into a structured Markdown format. This utility handles the formatting and organization of the metadata information. +- **File System Operations:** It uses `fs-extra` (implicitly through `DocBuilderPackageXML`) to read the input `package.xml` and write the generated Markdown file. +- **WebSocket Communication:** It interacts with a WebSocket client (`WebSocketClient.requestOpenFile`) to open the generated Markdown file in a VS Code tab, enhancing user experience. +- **Salesforce Org Context:** It can optionally use the `target-org` flag to provide context, such as the instance URL, which might be used for generating links or additional information within the Markdown. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:-------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| inputfile
-x | option | Path to package.xml file. If not specified, the command will look in manifest folder | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .md | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:doc:packagexml2markdown +``` + +```shell +$ sf hardis:doc:packagexml2markdown --inputfile manifest/package-all.xml +``` + + diff --git a/docs/hardis/doc/plugin/generate.md b/docs/hardis/doc/plugin/generate.md index ab9bbb410..0e97a5e5f 100644 --- a/docs/hardis/doc/plugin/generate.md +++ b/docs/hardis/doc/plugin/generate.md @@ -1,35 +1,65 @@ - + # hardis:doc:plugin:generate ## Description -Generate Markdown documentation ready for HTML conversion with mkdocs -After the first run, you need to update manually: +## Command Behavior -- mkdocs.yml -- .github/workflows/build-deploy-docs.yml -- docs/javascripts/gtag.js , if you want Google Analytics tracking +**Generates Markdown documentation for an SF CLI plugin, ready for conversion into HTML with MkDocs.** -Then, activate Github pages, with "gh_pages" as target branch +This command automates the creation of comprehensive documentation for your Salesforce CLI plugin. It processes your plugin's commands and their flags to generate structured Markdown files, which can then be used with MkDocs to produce a professional-looking website. -At each merge into master/main branch, the GitHub Action build-deploy-docs will rebuild documentation and publish it in GitHub pages +Key functionalities: + +- **Command Documentation:** Generates a dedicated Markdown file for each command, including its description, parameters (flags), and examples. +- **Index and Commands Pages:** Creates an `index.md` and `commands.md` file that list all available commands, providing an overview and easy navigation. +- **MkDocs Integration:** Sets up the basic MkDocs project structure and updates the `mkdocs.yml` navigation to include the generated command documentation. +- **Default File Copying:** Copies essential MkDocs configuration files and GitHub Actions workflows to your project, streamlining the setup for continuous documentation deployment. + +**Post-Generation Steps:** + +After the initial run, you will need to manually update: + +- `mkdocs.yml`: Customize the project title, theme, and other MkDocs settings. +- `.github/workflows/build-deploy-docs.yml`: Configure the GitHub Actions workflow for automatic documentation deployment. +- `docs/javascripts/gtag.js`: If desired, set up Google Analytics tracking. + +Finally, activate GitHub Pages with `gh_pages` as the target branch. This will enable automatic documentation rebuilding and publishing to GitHub Pages upon each merge into your `master`/`main` branch. + +
+Technical explanations + +The command's technical implementation involves: + +- **Plugin Configuration Loading:** It loads the SF CLI plugin's configuration using `@oclif/core`'s `Config.load()`, which provides access to all registered commands and their metadata. +- **Command Iteration:** It iterates through each command defined in the plugin's configuration. +- **Markdown File Generation:** For each command, it constructs a Markdown file (`.md`) containing: + - The command ID as the main heading. + - The command's `description` property. + - A table of parameters (flags), including their name, type, description, default value, required status, and available options. It dynamically extracts this information from the command's `flags` property. + - Code blocks for each example provided in the command's `examples` property. +- **Navigation Structure:** It builds a nested JavaScript object (`commandsNav`) that mirrors the command hierarchy, which is then converted to YAML and inserted into `mkdocs.yml` to create the navigation menu. +- **Index and Commands Page Generation:** It reads the project's `README.md` and extracts relevant sections to create the `index.md` file. It also generates a separate `commands.md` file listing all commands. +- **File System Operations:** It uses `fs-extra` to create directories, copy default MkDocs files (`defaults/mkdocs`), and write the generated Markdown and YAML files. +- **YAML Serialization:** It uses `js-yaml` to serialize the navigation object into YAML format for `mkdocs.yml`. +
## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:doc:plugin:generate +$ sf hardis:doc:plugin:generate ``` diff --git a/docs/hardis/doc/project2markdown.md b/docs/hardis/doc/project2markdown.md new file mode 100644 index 000000000..53c4fd8a0 --- /dev/null +++ b/docs/hardis/doc/project2markdown.md @@ -0,0 +1,122 @@ + +# hardis:doc:project2markdown + +## Description + +Generates a markdown documentation from a SFDX project + +- Objects (with fields, validation rules, relationships and dependencies) +- Automations + - Approval Processes + - Assignment Rules + - AutoResponse Rules + - Escalation Rules + - Flows +- Authorizations + - Profiles + - Permission Set Groups + - Permission Sets +- Code + - Apex + - Lightning Web Components +- Lightning Pages +- Packages +- SFDX-Hardis Config +- Branches & Orgs +- Manifests + +Can work on any sfdx project, no need for it to be a sfdx-hardis flavored one. + +Generates markdown files will be written in **docs** folder (except README.md where a link to doc index is added) + +- You can customize the pages following [mkdocs-material setup documentation](https://squidfunk.github.io/mkdocs-material/setup/) +- You can manually add new markdown files in the "docs" folder to extend this documentation and add references to them in "mkdocs.yml" +- You can also add images in folder "docs/assets" and embed them in markdown files. + +To read Flow documentations if your markdown reader doesn't handle MermaidJS syntax, this command could require @mermaid-js/mermaid-cli + +- Run `npm install @mermaid-js/mermaid-cli --global` if puppeteer works in your environment +- It can also be run as a docker image + +Both modes will be tried by default, but you can also force one of them by defining environment variable `MERMAID_MODES=docker` or `MERMAID_MODES=cli` + +_sfdx-hardis docker image is alpine-based and does not succeed to run mermaid/puppeteer: if you can help, please submit a PR !_ + +If Flow history doc always display a single state, you probably need to update your workflow configuration: + +- on Gitlab: Env variable [`GIT_FETCH_EXTRA_FLAGS: --depth 10000`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.gitlab-ci.yml#L11) +- on GitHub: [`fetch-depth: 0`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.github/workflows/org-monitoring.yml#L58) +- on Azure: [`fetchDepth: "0"`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/azure-pipelines.yml#L39) +- on Bitbucket: [`step: clone: depth: full`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/bitbucket-pipelines.yml#L18) + +![Screenshot flow doc](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-flow-doc.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-project-doc.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-project-doc-2.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-object-diagram.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-project-doc-profile.gif) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-doc-apex.png) + +If it is a sfdx-hardis CI/CD project, a diagram of the branches and orgs strategy will be generated. + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-doc-branches-strategy.jpg) + +If [AI integration](https://sfdx-hardis.cloudity.com/salesforce-ai-setup/) is configured, documentation will contain a summary of the Flow. + +- Use variable PROMPTS_LANGUAGE (ex: PROMPTS_LANGUAGE=fr) to force language for LLM calls (default:en) + +If you have a complex strategy, you might need to input property **mergeTargets** in branch-scoped sfdx-hardis.yml file to have a correct diagram. + +Define DO_NOT_OVERWRITE_INDEX_MD=true to avoid overwriting the index.md file in docs folder, useful if you want to keep your own index.md file. + +## Doc HTML Pages + +To read the documentation as HTML pages, run the following code (you need [**Python**](https://www.python.org/downloads/) on your computer) + +```python +pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || python -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || py -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists +mkdocs serve -v || python -m mkdocs serve -v || py -m mkdocs serve -v +``` + +To just generate HTML pages that you can host anywhere, run `mkdocs build -v || python -m mkdocs build -v || py -m mkdocs build -v` + + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| diff-only | boolean | Generate documentation only for changed files (used for monitoring) | | | | +| flags-dir | option | undefined | | | | +| hide-apex-code | boolean | Hide Apex code in the generated documentation for Apex classes. | | | | +| json | boolean | Format output as json. | | | | +| pdf | boolean | Also generate the documentation in PDF format | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| with-history | boolean | Generate a markdown file with the history diff of the Flow | | | | + +## Examples + +```shell +$ sf hardis:doc:project2markdown +``` + +```shell +$ sf hardis:doc:project2markdown --with-history +``` + +```shell +$ sf hardis:doc:project2markdown --with-history --pdf +``` + +```shell +$ sf hardis:doc:project2markdown --hide-apex-code +``` + + diff --git a/docs/hardis/git/pull-requests/extract.md b/docs/hardis/git/pull-requests/extract.md new file mode 100644 index 000000000..2e3c9a66e --- /dev/null +++ b/docs/hardis/git/pull-requests/extract.md @@ -0,0 +1,58 @@ + +# hardis:git:pull-requests:extract + +## Description + + +## Command Behavior + +**Extracts pull request information from your Git server based on specified filtering criteria.** + +This command provides a powerful way to query and retrieve details about pull requests (or merge requests, depending on your Git provider) in your repository. It's highly useful for reporting, auditing, and analyzing development workflows. + +Key functionalities include: + +- **Target Branch Filtering:** You can filter pull requests by their target branch using the `--target-branch` flag. If not specified, the command will prompt you to select one. +- **Status Filtering:** Filter pull requests by their status: `open`, `merged`, or `abandoned` using the `--status` flag. An interactive prompt is provided if no status is specified. +- **Minimum Date Filtering:** Use the `--min-date` flag to retrieve pull requests created or updated after a specific date. +- **CSV Output:** The extracted pull request data is generated into a CSV file, which can be used for further analysis in spreadsheet software. + +
+Technical explanations + +The command's technical implementation involves interacting with a Git provider's API: + +- **Git Provider Abstraction:** It uses the `GitProvider.getInstance(true)` to abstract away the specifics of different Git platforms (e.g., GitHub, GitLab, Azure DevOps). This ensures the command can work across various environments. +- **API Calls:** The `gitProvider.listPullRequests()` method is called with a `prConstraint` object that encapsulates the filtering criteria (target branch, minimum date, status). +- **Interactive Prompts:** The `prompts` library is used to interactively gather input from the user for the target branch and pull request status if they are not provided as command-line flags. +- **Date Handling:** The `moment` library is used to parse and handle date inputs for the `--min-date` flag. +- **CSV Generation:** The `generateCsvFile` utility is responsible for converting the retrieved pull request data into a CSV format, and `generateReportPath` determines the output file location. +- **Error Handling:** It includes error handling for cases where a Git provider cannot be identified. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:---------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-----------------------------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| min-date
-m | option | Minimum date for PR | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| status
-x | option | Status of the PR | | | open
merged
abandoned | +| target-branch
-t | option | Target branch of PRs | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:git:pull-requests:extract +``` + +```shell +$ sf hardis:git:pull-requests:extract --target-branch main --status merged +``` + + diff --git a/docs/hardis/lint/access.md b/docs/hardis/lint/access.md index c0183e386..2849d307e 100644 --- a/docs/hardis/lint/access.md +++ b/docs/hardis/lint/access.md @@ -1,38 +1,66 @@ - + # hardis:lint:access ## Description -Check if elements(apex class and field) are at least in one permission set + +## Command Behavior + +**Checks if specified Salesforce metadata elements (Apex classes and custom fields) have at least one permission defined in any Permission Set or Profile.** + +This command is crucial for maintaining proper access control and identifying potential security vulnerabilities or misconfigurations in your Salesforce project. It helps ensure that all custom elements are accessible to the intended users through appropriate permission assignments. + +Key functionalities: + +- **Element Validation:** Verifies that Apex classes and custom fields have `enabled` (for Apex classes) or `readable`/`editable` (for custom fields) access in at least one Permission Set or Profile. +- **Configurable Ignores:** Allows you to ignore specific elements or entire types of elements (e.g., all Apex classes, a particular custom field) using the `--elementsignored` flag or project configuration. +- **Permission Set/Profile Filtering:** You can specify Permission Sets or Profiles to ignore during the access check using the `--ignorerights` flag. +- **Reporting:** Generates a CSV report of all missing access elements, which can be used for auditing or further analysis. +- **Notifications:** Integrates with notification providers (Grafana, Slack, MS Teams) to alert about missing access issues, making it suitable for CI/CD monitoring. +- **Interactive Fix:** In non-CI environments, it offers an interactive prompt to automatically add missing accesses to selected Permission Sets. + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-missing-access/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **File System Traversal:** Uses `glob` to find all Apex class (`.cls`) and custom field (`.field-meta.xml`) files within the specified root folder. +- **XML Parsing:** Parses the XML content of Permission Set (`.permissionset-meta.xml`) and Profile (`.profile-meta.xml`) files to extract access configurations. +- **Element Filtering:** Filters out elements that are explicitly ignored (via flags or configuration) or are not subject to access checks (e.g., Master-Detail fields, required fields, Custom Metadata Types, Custom Settings). +- **Access Verification Logic:** Iterates through each element to check and verifies if it has the necessary access enabled in any of the non-ignored Permission Sets or Profiles. +- **Data Aggregation:** Collects all elements with missing access into a `missingElements` array and `missingElementsMap` for reporting and notification purposes. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-----------------------|:-------:|:--------------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| elementsignored
-e | option | Ignore specific elements separated by commas | | | | -| folder
-f | option | Root folder | force-app | | | -| ignorerights
-i | option | Ignore permission sets or profiles | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-----------------------|:-------:|:------------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| elementsignored
-e | option | Ignore specific elements separated by commas | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| ignorerights
-i | option | Ignore permission sets or profiles | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-x | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:lint:access +$ sf hardis:lint:access ``` ```shell -sfdx hardis:lint:access -e "ApexClass:ClassA, CustomField:Account.CustomField" +$ sf hardis:lint:access -e "ApexClass:ClassA, CustomField:Account.CustomField" ``` ```shell -sfdx hardis:lint:access -i "PermissionSet:permissionSetA, Profile" +$ sf hardis:lint:access -i "PermissionSet:permissionSetA, Profile" ``` diff --git a/docs/hardis/lint/metadatastatus.md b/docs/hardis/lint/metadatastatus.md index a0e6d0996..0deda5e17 100644 --- a/docs/hardis/lint/metadatastatus.md +++ b/docs/hardis/lint/metadatastatus.md @@ -1,27 +1,62 @@ - + # hardis:lint:metadatastatus ## Description -Check if elements(flows) are inactive in the project + +## Command Behavior + +**Checks for inactive metadata elements within your Salesforce DX project, helping to maintain a clean and efficient codebase.** + +This command identifies various types of metadata components that are marked as inactive in your local project files. Keeping metadata active and relevant is crucial for deployment success, performance, and avoiding confusion. This tool helps you pinpoint and address such inactive elements. + +It specifically checks for the inactive status of: + +- **Approval Processes** +- **Assignment Rules** +- **Auto Response Rules** +- **Escalation Rules** +- **Flows** (specifically those in 'Draft' status) +- **Forecasting Types** +- **Record Types** +- **Validation Rules** +- **Workflow Rules** + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/detect-inactive-metadata.gif) + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-inactive-metadata/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses `glob` patterns (e.g., `**/flows/**/*.flow-meta.xml`, `**/objects/**/validationRules/*.validationRule-meta.xml`) to locate relevant metadata files within your project. +- **XML Parsing:** For each identified metadata file, it reads the XML content and parses it to extract the `active` or `status` flag (e.g., `false`, `Draft`). +- **Status Verification:** It checks the value of these flags to determine if the metadata component is inactive. +- **Data Aggregation:** All detected inactive items are collected into a list, including their type, name, and a severity level. +- **Report Generation:** It generates a CSV report (`lint-metadatastatus.csv`) containing details of all inactive metadata elements, which can be used for further analysis or record-keeping. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications (e.g., to Slack, MS Teams, Grafana) about the presence and count of inactive metadata, making it suitable for automated monitoring in CI/CD pipelines. +- **Error Handling:** It includes basic error handling for file operations and ensures that the process continues even if some files cannot be read. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:lint:metadatastatus +$ sf hardis:lint:metadatastatus ``` diff --git a/docs/hardis/lint/missingattributes.md b/docs/hardis/lint/missingattributes.md index 510ade60e..844985fee 100644 --- a/docs/hardis/lint/missingattributes.md +++ b/docs/hardis/lint/missingattributes.md @@ -1,27 +1,52 @@ - + # hardis:lint:missingattributes ## Description -Check if elements(custom fields) aren't description + +## Command Behavior + +**Checks for missing descriptions on custom fields within your Salesforce DX project.** + +This command helps enforce documentation standards by identifying custom fields that lack a descriptive explanation. Comprehensive field descriptions are crucial for: + +- **Maintainability:** Making it easier for developers and administrators to understand the purpose and usage of each field. +- **Data Governance:** Ensuring data quality and consistency. +- **User Adoption:** Providing clear guidance to end-users on how to interact with fields. + +It specifically targets custom fields (ending with `__c`) and excludes standard fields, managed package fields, and fields on Custom Settings or Data Cloud objects. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses `glob` to find all custom field metadata files (`.field-meta.xml`) within your project. +- **Custom Setting Exclusion:** It first filters out fields belonging to Custom Settings by reading the corresponding object metadata files (`.object-meta.xml`) and checking for the `` tag. It also excludes Data Cloud objects (`__dlm`, `__dll`) and managed package fields. +- **XML Parsing:** For each remaining custom field file, it reads the XML content and parses it using `xml2js` to extract the `fullName` and `description` attributes. +- **Description Check:** It verifies if the `description` attribute is present and not empty for each custom field. +- **Data Aggregation:** All custom fields found to be missing a description are collected into a list, along with their object and field names. +- **Report Generation:** It generates a CSV report (`lint-missingattributes.csv`) containing details of all fields with missing descriptions. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications (e.g., to Slack, MS Teams, Grafana) about the presence and count of fields with missing descriptions, making it suitable for automated quality checks in CI/CD pipelines. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:lint:missingattributes +$ sf hardis:lint:missingattributes ``` diff --git a/docs/hardis/lint/unusedmetadatas.md b/docs/hardis/lint/unusedmetadatas.md index 23044e48f..2de0a0725 100644 --- a/docs/hardis/lint/unusedmetadatas.md +++ b/docs/hardis/lint/unusedmetadatas.md @@ -1,27 +1,55 @@ - + # hardis:lint:unusedmetadatas ## Description -Check if elements (custom labels and custom permissions) are used in the project + +## Command Behavior + +**Checks for unused custom labels and custom permissions within your Salesforce DX project.** + +This command helps identify and report on custom labels and custom permissions that are defined in your project but do not appear to be referenced anywhere in your codebase. Identifying unused metadata is crucial for: + +- **Code Cleanliness:** Removing dead code and unnecessary metadata improves project maintainability. +- **Performance:** Reducing the overall size of your metadata, which can positively impact deployment times and org performance. +- **Clarity:** Ensuring that all defined components serve a purpose, making the codebase easier to understand. + +It specifically scans for references to custom labels (e.g., `$Label.MyLabel`) and custom permissions (by their API name or label) across various file types (Apex, JavaScript, HTML, XML, etc.). + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-unused-metadata/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses `glob` to find all relevant project files (Apex classes, triggers, JavaScript, HTML, XML, Aura components, Visualforce pages) and custom label (`CustomLabels.labels-meta.xml`) and custom permission (`.customPermission-meta.xml`) definition files. +- **XML Parsing:** It uses `xml2js` to parse the XML content of `CustomLabels.labels-meta.xml` and custom permission files to extract the full names of labels and permissions. +- **Content Scanning:** For each label and custom permission, it iterates through all other project files and checks if their names or associated labels are present in the file content. It performs case-insensitive checks for labels. +- **Usage Tracking:** It maintains a count of how many times each custom permission is referenced. Labels are checked for any inclusion. +- **Unused Identification:** Elements with no or very few references (for custom permissions, less than 2 to account for their own definition file) are flagged as unused. +- **Data Aggregation:** All identified unused labels and custom permissions are collected into a list. +- **Report Generation:** It generates a CSV report (`lint-unusedmetadatas.csv`) containing details of all unused metadata elements. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications (e.g., to Slack, MS Teams, Grafana) about the presence and count of unused metadata, making it suitable for automated monitoring in CI/CD pipelines. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:lint:unusedmetadatas +$ sf hardis:lint:unusedmetadatas ``` diff --git a/docs/hardis/mdapi/deploy.md b/docs/hardis/mdapi/deploy.md index b57e9b9fc..375955124 100644 --- a/docs/hardis/mdapi/deploy.md +++ b/docs/hardis/mdapi/deploy.md @@ -1,39 +1,62 @@ - + # hardis:mdapi:deploy ## Description -sfdx-hardis wrapper for sfdx force:mdapi:deploy that displays tips to solve deployment errors. + +## Command Behavior + +**A wrapper command for Salesforce CLI's `sf project deploy start` (formerly `sfdx force:mdapi:deploy`), designed to assist with deployment error resolution.** + +This command facilitates the deployment of metadata API source (either from a zip file, a deployment directory, or a validated deploy request ID) to a Salesforce org. Its primary enhancement over the standard Salesforce CLI command is its ability to provide tips and guidance for solving common deployment errors. + +Key features: + +- **Flexible Input:** Supports deploying from a `.zip` file (`--zipfile`), a local directory (`--deploydir`), or by referencing a previously validated deployment (`--validateddeployrequestid`). +- **Test Level Control:** Allows specifying the test level for deployments (`NoTestRun`, `RunSpecifiedTests`, `RunLocalTestsInOrg`, `RunAllTestsInOrg`). +- **Error Handling Assistance:** Displays helpful tips and links to documentation to guide you through resolving deployment failures. + +**Important Note:** The underlying Salesforce CLI command `sfdx force:mdapi:deploy` is being deprecated by Salesforce in November 2024. It is recommended to migrate to `sf project deploy start` for future compatibility. See [Salesforce CLI Migration Guide](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm) for more information. + +For visual assistance with solving deployment errors, refer to this article: [![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) -[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_mdapi.htm#cli_reference_force_mdapi_deploy) +
+Technical explanations + +This command acts as an intelligent wrapper around the Salesforce CLI's metadata deployment functionality: + +- **Command Wrapping:** It uses the `wrapSfdxCoreCommand` utility to execute the `sfdx force:mdapi:deploy` (or its equivalent `sf project deploy start`) command, passing through all relevant flags and arguments. +- **Error Analysis (Implicit):** While the direct code snippet doesn't show explicit error analysis, the description implies that the `wrapSfdxCoreCommand` or a subsequent process intercepts deployment failures and provides contextual help. +- **User Guidance:** It logs messages to the console, including deprecation warnings and pointers to external documentation for troubleshooting. +- **Argument Passthrough:** It directly passes the command-line arguments (`this.argv`) to the underlying Salesforce CLI command, ensuring all standard deployment options are supported. +
## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------------------------|:-------:|:--------------------------------------------------------------------|:---------:|:--------:|:----------------------------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| checkonly
-c | boolean | checkOnly | | | | -| concise | boolean | concise | | | | -| debug | boolean | debug | | | | -| deploydir
-d | option | deployDir | | | | -| ignoreerrors
-o | boolean | ignoreErrors | | | | -| ignorewarnings
-g | boolean | ignoreWarnings | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| purgeondelete | boolean | purgeOnDelete | | | | -| runtests
-r | option | runTests | | | | -| singlepackage
-s | boolean | singlePackage | | | | -| soapdeploy | boolean | soapDeploy | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| testlevel
-l | option | testLevel | NoTestRun | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | -| validateddeployrequestid
-q | option | validatedDeployRequestId | | | | -| verbose | boolean | verbose | | | | -| wait
-w | option | wait | 0 minutes | | | -| websocket | option | websocket | | | | -| zipfile
-f | option | zipFile | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------------------------|:-------:|:-------------------------|:---------:|:--------:|:----------------------------------------------------------------------:| +| checkonly
-c | boolean | checkOnly | | | | +| concise | boolean | concise | | | | +| debug | boolean | debug | | | | +| deploydir
-d | option | deployDir | | | | +| flags-dir | option | undefined | | | | +| ignoreerrors | boolean | ignoreErrors | | | | +| ignorewarnings
-g | boolean | ignoreWarnings | | | | +| json | boolean | Format output as json. | | | | +| purgeondelete | boolean | purgeOnDelete | | | | +| runtests
-r | option | runTests | | | | +| singlepackage
-s | boolean | singlePackage | | | | +| soapdeploy | boolean | soapDeploy | | | | +| target-org
-o | option | undefined | | | | +| testlevel
-l | option | testLevel | NoTestRun | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | +| validateddeployrequestid
-q | option | validatedDeployRequestId | | | | +| verbose | boolean | verbose | | | | +| wait
-w | option | wait | 120 | | | +| websocket | option | websocket | | | | +| zipfile
-f | option | zipFile | | | | ## Examples diff --git a/docs/hardis/misc/custom-label-translations.md b/docs/hardis/misc/custom-label-translations.md new file mode 100644 index 000000000..944dfaef2 --- /dev/null +++ b/docs/hardis/misc/custom-label-translations.md @@ -0,0 +1,68 @@ + +# hardis:misc:custom-label-translations + +## Description + + +## Command Behavior + +**Extracts selected custom labels, or all custom labels used within a given Lightning Web Component (LWC), from all available language translation files in the project.** + +This command streamlines the process of managing and isolating specific custom label translations. It's particularly useful for: + +- **Localization Management:** Focusing on translations for a subset of labels or for labels relevant to a specific UI component. +- **Collaboration:** Sharing only the necessary translation files with translators, reducing complexity. +- **Debugging:** Isolating translation issues for specific labels or components. + +Key functionalities: + +- **Label Selection:** You can specify custom label names directly using the `--label` flag (comma-separated). +- **LWC-based Extraction:** Alternatively, you can provide an LWC developer name using the `--lwc` flag, and the command will automatically identify and extract all custom labels referenced within that LWC's JavaScript files. +- **Interactive Prompts:** If neither `--label` nor `--lwc` is provided, the command will interactively prompt you to choose between selecting specific labels or extracting from an LWC. +- **Output Generation:** For each language found in your project's `translations` folder, it generates a new `.translation-meta.xml` file containing only the extracted custom labels and their translations. These files are placed in a timestamped output directory. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses `glob` to find all `*.translation-meta.xml` files in the `**/translations/` directory and, if an LWC is specified, it searches for the LWC's JavaScript files (`**/lwc/**/*.js`). +- **LWC Label Extraction:** The `extractLabelsFromLwc` function uses regular expressions (`@salesforce/label/c.([a-zA-Z0-9_]+)`) to parse LWC JavaScript files and identify referenced custom labels. +- **XML Parsing and Building:** It uses `xml2js` (`parseStringPromise` and `Builder`) to: + - Read and parse existing `.translation-meta.xml` files. + - Filter the `customLabels` array to include only the requested labels. + - Construct a new XML structure containing only the filtered labels. + - Build a new XML string with proper formatting and write it to a new file. +- **Interactive Prompts:** The `prompts` library is used extensively to guide the user through the selection of extraction methods (labels or LWC) and specific labels/components. +- **File System Operations:** It uses `fs-extra` for creating output directories (`extracted-translations/`) and writing the generated translation files. +- **WebSocket Communication:** It uses `WebSocketClient.requestOpenFile` to open the output directory in VS Code for easy access to the generated files. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| label
-l | option | Developer name(s) of the custom label(s), comma-separated | | | | +| lwc
-c | option | Developer name of the Lightning Web Component | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:misc:custom-label-translations --label CustomLabelName +``` + +```shell +$ sf hardis:misc:custom-label-translations --label Label1,Label2 +``` + +```shell +$ sf hardis:misc:custom-label-translations --lwc MyComponent +``` + + diff --git a/docs/hardis/misc/purge-references.md b/docs/hardis/misc/purge-references.md new file mode 100644 index 000000000..f1553b24e --- /dev/null +++ b/docs/hardis/misc/purge-references.md @@ -0,0 +1,58 @@ + +# hardis:misc:purge-references + +## Description + + +## Command Behavior + +**Purges references to specified strings within your Salesforce metadata files before deployment.** + +This command is a powerful, yet dangerous, tool designed to modify your local Salesforce metadata by removing or altering references to specific strings. It's primarily intended for advanced use cases, such as refactoring a custom field's API name (e.g., changing a Master-Detail relationship to a Lookup) where direct string replacement across many files is necessary. + +**USE WITH EXTREME CAUTION AND CAREFULLY READ ALL MESSAGES!** Incorrect usage can lead to data loss or metadata corruption. + +Key functionalities: + +- **Reference String Input:** You can provide a comma-separated list of strings (e.g., `Affaire__c,MyField__c`) that you want to find and modify within your metadata. +- **Automatic Related Field Inclusion:** If a custom field API name (ending with `__c`) is provided, it automatically includes its relationship name (ending with `__r`) in the list of references to purge, ensuring comprehensive cleanup. +- **Source Synchronization Check:** Prompts you to confirm if your local sources are up-to-date with the target org, offering to retrieve metadata if needed. +- **Targeted File Scan:** Scans `.cls`, `.trigger`, and `.xml` files within your SFDX project to identify occurrences of the specified reference strings. +- **Configurable Replacements:** Applies predefined replacement rules based on file type (e.g., Apex classes, XML files) to modify the content where references are found. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Input:** Uses `prompts` to get the list of reference strings from the user if not provided via flags. +- **Metadata Retrieval:** If the user indicates that local sources are not up-to-date, it executes `sf project retrieve start` to fetch the latest metadata from the target org. +- **File System Scan:** It uses `glob` to efficiently find all relevant source files (`.cls`, `.trigger`, `.xml`) within the project's package directories. +- **Content Matching:** Reads the content of each source file and checks for the presence of any of the specified reference strings. + +The core utility function for replacements is called `applyAllReplacementsDefinitions`. It is responsible for iterating through the identified files and applying the defined replacement rules. These rules are structured to target specific patterns (for example, `,{{REF}},` or `{{REF}}[ |=].+` in Apex code) and replace them with a desired string (often an empty string or a modified version). + +- **Regular Expressions:** The replacement rules heavily rely on regular expressions (`regex`) to precisely match and modify the content. +- **User Feedback:** Provides real-time feedback using `ora` for spinners and `uxLog` for logging messages about the progress and results of the operation. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| references
-r | option | Comma-separated list of references to find in metadatas | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:misc:purge-references +``` + + diff --git a/docs/hardis/misc/servicenow-report.md b/docs/hardis/misc/servicenow-report.md new file mode 100644 index 000000000..724289176 --- /dev/null +++ b/docs/hardis/misc/servicenow-report.md @@ -0,0 +1,83 @@ + +# hardis:misc:servicenow-report + +## Description + +This command retrieves user stories from Salesforce and enriches them with data from ServiceNow. + +Define the following environment variables (in CICD variables or locally in a **.env** file): + +- SERVICENOW_URL: The base URL of the ServiceNow API (ex: https://your-instance.service-now.com/) +- SERVICENOW_USERNAME: The username for ServiceNow API authentication. +- SERVICENOW_PASSWORD: The password for ServiceNow API authentication. + +You also need to define JSON configuration file(e) in folder **config/user-stories/** + +Example: + +```json +{ + "userStoriesConfig": { + "fields": [ + "Id", + "Name", + "Ticket_Number__c", + "copado__User_Story_Title__c", + "CreatedBy.Name", + "copado__Release__r.Name", + "copado__Environment__r.Name" + ], + "table": "copado__User_Story__c", + "where": "copado__Environment__r.Name ='UAT'", + "whereChoices": { + "UAT all": "copado__Environment__r.Name ='UAT'", + "UAT postponed": "copado__Environment__r.Name ='UAT' AND copado__Release__r.Name = 'postponed'", + "UAT in progress": "copado__Environment__r.Name ='UAT' AND copado__Release__r.Name != 'postponed' AND copado__Release__r.Name != 'cancelled'" + }, + "orderBy": "Ticket_Number__c ASC", + "ticketField": "Ticket_Number__c", + "reportFields": [ + { "key": "US Name", "path": "Name" }, + { "key": "US SN Identifier", "path": "Ticket_Number__c" }, + { "key": "US Title", "path": "copado__User_Story_Title__c" }, + { "key": "US Created By", "path": "CreatedBy.Name" }, + { "key": "US Environment", "path": "copado__Environment__r.Name" }, + { "key": "US Release", "path": "copado__Release__r.Name" }, + { "key": "SN Identifier", "path": "serviceNowInfo.number", "default": "NOT FOUND" }, + { "key": "SN Title", "path": "serviceNowInfo.short_description", "default": "NOT FOUND" }, + { "key": "SN Status", "path": "serviceNowInfo.state", "default": "NOT FOUND" }, + { "key": "SN Created By", "path": "serviceNowInfo.sys_created_by", "default": "NOT FOUND" }, + { "key": "SN URL", "special": "serviceNowTicketUrl" } + ] + }, + "serviceNowConfig": { + "tables": [ + { "tableName": "demand" }, + { "tableName": "incident" } + ] + } +} +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:--------------------|:-------:|:------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| config
-c | option | Path to JSON config file containing user stories and ServiceNow configuration | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| where-choice
-w | option | Where selection for user stories. If not provided, you will be prompted to select one from the config file. | | | | + +## Examples + +```shell +$ sf hardis:misc:servicenow-report +``` + + diff --git a/docs/hardis/misc/toml2csv.md b/docs/hardis/misc/toml2csv.md index 7cd2379cd..4c103c9bf 100644 --- a/docs/hardis/misc/toml2csv.md +++ b/docs/hardis/misc/toml2csv.md @@ -1,43 +1,82 @@ - + # hardis:misc:toml2csv ## Description -Split TOML file into distinct CSV files + +## Command Behavior + +**Splits a TOML (Tom's Obvious, Minimal Language) file into multiple CSV files, applying transformations and filters based on a JSON configuration.** + +This command is designed for data processing workflows where data is initially stored in a TOML-like format and needs to be converted into structured CSV files for import into Salesforce or other systems. It offers powerful capabilities for data manipulation and cleansing during the conversion process. + +Key functionalities: + +- **TOML Parsing:** Reads an input TOML file, identifying sections (e.g., `[COMPTES]`) and processing data lines within each section. +- **Configurable Transformations:** Applies transformations to individual data fields based on a JSON configuration file (`transfoConfig.json`). This can include: + - **Date Formatting:** Reformatting date strings to a desired output format. + - **Enum Transcoding:** Mapping input values to predefined output values using lookup tables (enums). + - **Concatenation:** Combining multiple input fields into a single output field. + - **Record Type ID Resolution:** Dynamically retrieving Salesforce Record Type IDs. +- **Data Filtering:** Filters data lines based on specified criteria (e.g., date ranges, parent ID existence, column values), allowing you to exclude irrelevant data from the output. +- **Duplicate Removal:** Optionally removes duplicate lines from the output CSV files. +- **Error Handling and Reporting:** Catches transformation errors, logs them, and can output problematic lines to separate error CSV files for review. +- **CSV Output:** Generates one or more CSV files, with configurable separators and headers, ready for Salesforce Data Loader or other import tools. + +
+Technical explanations + +The command's technical implementation involves: + +- **File I/O:** Uses `fs-extra` for file system operations (reading TOML, writing CSVs, creating directories) and `readline` for efficient line-by-line processing of large TOML files. +- **Configuration Loading:** Reads and parses the `transfoConfig.json` file, which defines the mapping rules, transformations, and filters. It also loads external enum files if specified in the configuration. +- **Data Processing Pipeline:** Iterates through each line of the TOML file: + - Identifies section headers to determine the current data context. + - Parses data lines based on the input separator. + - Applies filters defined in `transfoConfig` to decide whether to process or skip a line. + - Performs data transformations (date formatting, enum lookups, concatenations) as specified in the `transfoConfig`. + - Resolves Salesforce Record Type IDs by querying the target org using `getRecordTypeId`. + - Formats the output CSV cells, handling special characters and separators. + - Writes the transformed data to the appropriate CSV output stream. +- **Error Management:** Catches exceptions during transformation and logs detailed error messages, including the problematic line and the reason for the error. +- **Progress Indication:** Uses `ora` for a command-line spinner to provide visual feedback on the processing progress. +- **Statistics Collection:** Tracks various statistics, such as the number of processed lines, successful lines, error lines, and filtered lines, providing a summary at the end. +- **File Copying:** Optionally copies generated CSV files to other specified locations. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:-------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| filtersections
-l | option | List of sections to process (if not set, all sections will be processed) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputdir
-o | option | Output directory | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| skiptransfo
-s | boolean | Do not apply transformation to input data | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| tomlfile
-f | option | Input TOML file path | | | | -| transfoconfig
-t | option | Path to JSON config file for mapping and transformation | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:-------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| filtersections
-l | option | List of sections to process (if not set, all sections will be processed) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputdir
-z | option | Output directory | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| skiptransfo
-s | boolean | Do not apply transformation to input data | | | | +| target-org
-o | option | undefined | | | | +| tomlfile
-f | option | Input TOML file path | | | | +| transfoconfig
-t | option | Path to JSON config file for mapping and transformation | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:misc:toml2csv --tomlfile 'D:/clients/toto/V1_full.txt' +$ sf hardis:misc:toml2csv --tomlfile 'D:/clients/toto/V1_full.txt' ``` ```shell -sfdx hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' +$ sf hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' ``` ```shell -sfdx hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' --outputdir 'C:/tmp/rrrr' +$ sf hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' --outputdir 'C:/tmp/rrrr' ``` ```shell -NODE_OPTIONS=--max_old_space_size=9096 sfdx hardis:misc:toml2csv --skiptransfo --tomlfile './input/V1.txt' --outputdir './output' --filtersections 'COMPTES,SOUS' +$ NODE_OPTIONS=--max_old_space_size=9096 sf hardis:misc:toml2csv --skiptransfo --tomlfile './input/V1.txt' --outputdir './output' --filtersections 'COMPTES,SOUS' ``` diff --git a/docs/hardis/org/community/update.md b/docs/hardis/org/community/update.md new file mode 100644 index 000000000..672486091 --- /dev/null +++ b/docs/hardis/org/community/update.md @@ -0,0 +1,57 @@ + +# hardis:org:community:update + +## Description + + +## Command Behavior + +**Updates the status of one or more Salesforce Experience Cloud (Community) networks.** + +This command provides a way to programmatically change the status of your Salesforce Communities, allowing you to manage their availability. This is particularly useful for: + +- **Maintenance:** Taking communities offline for planned maintenance (`DownForMaintenance`). +- **Activation/Deactivation:** Bringing communities online or offline (`Live`, `DownForMaintenance`). +- **Automation:** Integrating community status changes into CI/CD pipelines or scheduled jobs. + +Key functionalities: + +- **Network Selection:** You can specify one or more community network names (separated by commas) using the `--name` flag. +- **Status Update:** You can set the new status for the selected communities using the `--status` flag. Supported values are `Live` and `DownForMaintenance`. +- **Confirmation Prompt:** In non-CI environments, it provides a confirmation prompt before executing the update, ensuring intentional changes. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Query:** It first queries the Salesforce `Network` object using SOQL to retrieve the `Id`, `Name`, and `Status` of the specified communities. This ensures that only existing communities are targeted. +- **SObject Update:** It then constructs an array of `Network` sObjects with their `Id` and the new `Status` and performs a DML update operation using `conn.sobject("Network").update()`. The `allOrNone: false` option is used to allow partial success in case some updates fail. +- **Error Handling and Reporting:** It iterates through the update results, logging success or failure for each community. It also provides a summary of successful and erroneous updates. +- **User Interaction:** Uses `prompts` to confirm the update action with the user when not running in a CI environment. +- **Salesforce Connection:** Establishes a connection to the target Salesforce org using the `target-org` flag. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:-----------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| name
-n | option | List of Networks Names that you want to update, separated by comma | | | | +| status
-s | option | New status for the community, available values are: Live, DownForMaintenance | | | | +| target-org
-o | option | undefined | | | | + +## Examples + +```shell +$ sf hardis:org:community:update --name 'MyNetworkName' --status DownForMaintenance +``` + +```shell +$ sf hardis:org:community:update --name 'MyNetworkName,MySecondNetworkName' --status Live +``` + + diff --git a/docs/hardis/org/configure/data.md b/docs/hardis/org/configure/data.md index f7009140d..69598ec3a 100644 --- a/docs/hardis/org/configure/data.md +++ b/docs/hardis/org/configure/data.md @@ -1,29 +1,57 @@ - + # hardis:org:configure:data ## Description -Configure Data Export/Import with a [SFDX Data Loader](https://help.sfdmu.com/) Project -See article: +## Command Behavior + +**Configures a Salesforce Data Migration Utility (SFDMU) project for data export and import operations.** + +This command assists in setting up SFDMU workspaces, which are essential for managing data within your Salesforce environments. It streamlines the creation of `export.json` files and related configurations, enabling efficient data seeding, migration, and synchronization. + +Key functionalities: + +- **Template-Based Configuration:** Allows you to choose from predefined SFDMU templates or start with a blank configuration. Templates can pre-populate `export.json` with common data migration scenarios. +- **Interactive Setup:** Guides you through the process of defining the SFDMU project folder name, label, and description. +- **`export.json` Generation:** Creates the `export.json` file, which is the core configuration file for SFDMU, defining objects to export/import, queries, and operations. +- **Additional File Generation:** Can generate additional configuration files, such as a `badwords.json` file for data filtering scenarios. +- **Scratch Org Integration:** Offers to automatically configure the SFDMU project to be used for data import when initializing a new scratch org, ensuring consistent test data across development environments. + +See this article for a practical example: [![How to detect bad words in Salesforce records using SFDX Data Loader and sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-badwords.jpg)](https://nicolas.vuillamy.fr/how-to-detect-bad-words-in-salesforce-records-using-sfdx-data-loader-and-sfdx-hardis-171db40a9bac) +
+Technical explanations + +The command's technical implementation involves: + +- **SFDMU Integration:** It acts as a setup wizard for SFDMU, generating the necessary configuration files that the `sfdmu` plugin consumes. +- **Interactive Prompts:** Uses the `prompts` library to gather user input for various configuration parameters, such as the data path, label, and description. +- **File System Operations:** Employs `fs-extra` to create directories (e.g., `data/your-project-name/`) and write the `export.json` and any additional configuration files. +- **JSON Manipulation:** Constructs the `export.json` content dynamically based on user input and selected templates, including defining objects, queries, and operations. +- **PascalCase Conversion:** Uses `pascalcase` to format the SFDMU folder name consistently. +- **Configuration Persistence:** Updates the project's `sfdx-hardis.yml` file (via `setConfig`) to include the newly configured data package if it's intended for scratch org initialization. +- **WebSocket Communication:** Uses `WebSocketClient.requestOpenFile` to open the generated `export.json` file in VS Code, facilitating immediate configuration. +- **Required Plugin Check:** Explicitly lists `sfdmu` as a required plugin, ensuring the necessary dependency is present. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:configure:data +$ sf hardis:org:configure:data ``` diff --git a/docs/hardis/org/configure/files.md b/docs/hardis/org/configure/files.md index 1174cbc7f..73085e68d 100644 --- a/docs/hardis/org/configure/files.md +++ b/docs/hardis/org/configure/files.md @@ -1,29 +1,57 @@ - + # hardis:org:configure:files ## Description -Configure export of file attachments from a Salesforce org -See article below +## Command Behavior + +**Configures a project for exporting file attachments from a Salesforce org.** + +This command streamlines the setup of configurations for mass downloading files (such as Notes, Attachments, or Salesforce Files) associated with Salesforce records. It's particularly useful for data backups, migrations, or integrating Salesforce files with external systems. + +Key functionalities: + +- **Template-Based Configuration:** Allows you to choose from predefined templates for common file export scenarios or start with a blank configuration. Templates can pre-populate the export settings. +- **Interactive Setup:** Guides you through defining the export project folder name and other export parameters. +- **`export.json` Generation:** Creates an `export.json` file within the designated project folder. This file contains the configuration for the file export operation, including: + - **SOQL Query:** A SOQL query to select the parent records from which files will be exported. + - **File Types:** Specifies which types of files (e.g., `ContentVersion`, `Attachment`) to include. + - **Output Folder/File Naming:** Defines how the exported files and their containing folders will be named based on record fields. + - **Overwrite Options:** Controls whether existing files or parent records should be overwritten during the export. + +See this article for a practical example: [![How to mass download notes and attachments files from a Salesforce org](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-mass-download.jpg)](https://nicolas.vuillamy.fr/how-to-mass-download-notes-and-attachments-files-from-a-salesforce-org-83a028824afd) +
+Technical explanations + +The command's technical implementation involves: + +- **Template Selection:** It uses `selectTemplate` to present predefined file export templates or a blank option to the user. +- **Interactive Prompts:** The `promptFilesExportConfiguration` utility is used to gather detailed export settings from the user, such as the SOQL query, file types, and naming conventions. +- **File System Operations:** Employs `fs-extra` to create the project directory (`files/your-project-name/`) and write the `export.json` configuration file. +- **PascalCase Conversion:** Uses `pascalcase` to format the files export path consistently. +- **JSON Serialization:** Serializes the collected export configuration into a JSON string and writes it to `export.json`. +- **WebSocket Communication:** Uses `WebSocketClient.requestOpenFile` to open the generated `export.json` file in VS Code, facilitating immediate configuration. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:configure:files +$ sf hardis:org:configure:files ``` diff --git a/docs/hardis/org/configure/monitoring.md b/docs/hardis/org/configure/monitoring.md index fbf3ade38..35d715c4c 100644 --- a/docs/hardis/org/configure/monitoring.md +++ b/docs/hardis/org/configure/monitoring.md @@ -1,27 +1,59 @@ - + # hardis:org:configure:monitoring ## Description -Configure monitoring of an org + +## Command Behavior + +**Configures the monitoring of a Salesforce org within a dedicated Git repository.** + +This command streamlines the setup of continuous monitoring for a Salesforce organization, ensuring that changes and health metrics are tracked and reported. It is designed to be run within a Git repository specifically dedicated to monitoring configurations. + +Key functionalities include: + +- **Git Repository Validation:** Ensures the current Git repository's name contains "monitoring" to enforce best practices for separating monitoring configurations from deployment sources. +- **Prerequisite Check:** Guides the user to confirm that necessary monitoring prerequisites (CI/CD variables, permissions) are configured on their Git server. +- **Org Selection:** Prompts the user to select or connect to the Salesforce org they wish to monitor. +- **Monitoring Branch Creation:** Creates or checks out a dedicated Git branch (e.g., `monitoring_yourinstanceurl`) for the monitoring configuration. +- **SFDX Project Setup:** Initializes an SFDX project structure within the repository if it doesn't already exist, and copies default monitoring files. +- **Configuration File Update:** Updates the local `.sfdx-hardis.yml` file with the target org's username and instance URL. +- **SSL Certificate Generation:** Generates an SSL certificate for secure authentication to the monitored org. +- **Automated Commit and Push:** Offers to automatically commit and push the generated configuration files to the remote Git repository. +- **Scheduling Guidance:** Provides instructions and links for scheduling the monitoring job on the Git server. + +
+Technical explanations + +The command's technical implementation involves a series of Git operations, file system manipulations, and Salesforce CLI interactions: + +- **Git Operations:** Utilizes `ensureGitRepository`, `getGitRepoName`, `execCommand` (for `git add`, `git stash`), `ensureGitBranch`, and `gitAddCommitPush` to manage the Git repository, branches, and commits. +- **Interactive Prompts:** Employs the `prompts` library to interact with the user for confirmations and selections. +- **File System Management:** Uses `fs-extra` for copying default monitoring files (`defaults/monitoring`) and managing the SFDX project structure. +- **Salesforce CLI Integration:** Calls `sf project generate` to create a new SFDX project and uses `promptOrg` for Salesforce org authentication and selection. +- **Configuration Management:** Updates the `.sfdx-hardis.yml` file using `setInConfigFile` to store org-specific monitoring configurations. +- **SSL Certificate Generation:** Leverages `generateSSLCertificate` to create the necessary SSL certificates for JWT-based authentication to the Salesforce org. +- **External Tool Integration:** Requires `openssl` to be installed on the system for SSL certificate generation. +- **WebSocket Communication:** Uses `WebSocketClient.sendRunSfdxHardisCommandMessage` to restart the command in VS Code if the default org changes, and `WebSocketClient.sendRefreshStatusMessage` to update the status. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| orginstanceurl | option | Org instance url (technical param, do not use manually) | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| orginstanceurl | option | Org instance url (technical param, do not use manually) | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:configure:monitoring +$ sf hardis:org:configure:monitoring ``` diff --git a/docs/hardis/org/connect.md b/docs/hardis/org/connect.md index 66d350a04..e98494036 100644 --- a/docs/hardis/org/connect.md +++ b/docs/hardis/org/connect.md @@ -1,25 +1,47 @@ - + # hardis:org:connect ## Description -Connect to an org without setting it as default username, then proposes to open the org in web browser - + +## Command Behavior + +**Connects to a Salesforce org without setting it as the default username, and optionally opens the org in a web browser.** + +This command provides a quick way to establish a connection to a Salesforce organization for one-off tasks or when you don't want to change your default org. It's useful for accessing different environments without disrupting your primary development setup. + +Key functionalities: + +- **Org Selection:** Prompts the user to select an existing Salesforce org or connect to a new one. +- **Non-Default Connection:** Ensures that the selected org is connected but does not set it as the default username for subsequent Salesforce CLI commands. +- **Browser Launch (Optional):** Offers to open the connected org directly in your default web browser, providing immediate access to the Salesforce UI. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Org Prompt:** Uses the `promptOrg` utility to display a list of available Salesforce orgs and allows the user to select one or initiate a new authentication flow. +- **Salesforce CLI Integration:** Internally, it leverages Salesforce CLI commands to establish the connection to the chosen org. It does not use `sf config set target-org` to avoid changing the default org. +- **Browser Launch:** If the user opts to open the org in a browser, it executes the `sf org open` command, passing the selected org's username as the target. +- **Environment Awareness:** Checks the `isCI` flag to determine whether to offer the browser launch option, as it's typically not applicable in continuous integration environments. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:connect +$ sf hardis:org:connect ``` diff --git a/docs/hardis/org/create.md b/docs/hardis/org/create.md index a414f7ef0..588ca2a1c 100644 --- a/docs/hardis/org/create.md +++ b/docs/hardis/org/create.md @@ -1,26 +1,55 @@ - + # hardis:org:create ## Description -Create and initialize sandbox org + +## Command Behavior + +**Creates and initializes a Salesforce sandbox org.** + +This command automates the process of provisioning a new sandbox environment, making it ready for development or testing. It handles various aspects of sandbox creation and initial setup, reducing manual effort and ensuring consistency. + +Key functionalities: + +- **Sandbox Definition:** Uses a `project-sandbox-def.json` file (if present in `config/`) to define sandbox properties like name, description, license type, and source sandbox. If not provided, it uses default values. +- **Dynamic Naming:** Generates a unique sandbox alias based on the current username, Git branch, and a timestamp. +- **Sandbox Creation:** Executes the Salesforce CLI command to create the sandbox, including setting it as the default org and waiting for its completion. +- **User Update:** Updates the main sandbox user's details (e.g., Last Name, First Name) and can fix country values or marketing user permissions if needed. +- **Initialization Scripts:** Runs predefined Apex scripts, assigns permission sets, and imports initial data into the newly created sandbox, based on configurations in your project. +- **Error Handling:** Provides detailed error messages for common sandbox creation issues, including Salesforce-specific errors. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It loads project and user configurations using `getConfig` to retrieve settings like `projectName`, `devHubAlias`, and `userEmail`. +- **Git Integration:** Retrieves the current Git branch name using `getCurrentGitBranch` to inform sandbox naming. +- **File System Operations:** Uses `fs-extra` to manage sandbox definition files (reading `project-sandbox-def.json`, writing a user-specific definition file) and temporary directories. +- **Salesforce CLI Execution:** Executes Salesforce CLI commands (`sf org create sandbox`, `sf data get record`, `sf data update record`, `sf org open`) using `execSfdxJson` for sandbox creation, user updates, and opening the org in a browser. +- **Cache Management:** Clears the Salesforce CLI org list cache (`clearCache('sf org list')`) to ensure the newly created sandbox is immediately recognized. +- **Initialization Utilities:** Calls a suite of utility functions (`initPermissionSetAssignments`, `initApexScripts`, `initOrgData`) to perform post-creation setup tasks. +- **Error Assertions:** Uses `assert` to check the success of Salesforce CLI commands and provides custom error messages for better debugging. +- **WebSocket Communication:** Uses `WebSocketClient.sendRefreshStatusMessage` to notify connected VS Code clients about the new sandbox. +- **Required Plugin Check:** Explicitly lists `sfdmu` as a required plugin, indicating its role in data initialization. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:create +$ sf hardis:org:create ``` diff --git a/docs/hardis/org/data/delete.md b/docs/hardis/org/data/delete.md index 20504e534..8ef046fe1 100644 --- a/docs/hardis/org/data/delete.md +++ b/docs/hardis/org/data/delete.md @@ -1,27 +1,56 @@ - + # hardis:org:data:delete ## Description -Delete data in org using sfdmu + +## Command Behavior + +**Deletes records in multiple Salesforce objects using an SFDMU (Salesforce Data Migration Utility) workspace.** + +This command provides a powerful and controlled way to remove data from your Salesforce orgs based on configurations defined in an SFDMU workspace. It's particularly useful for: + +- **Data Cleanup:** Removing test data, obsolete records, or sensitive information. +- **Environment Reset:** Preparing sandboxes for new development cycles by clearing specific data sets. +- **Compliance:** Deleting data to meet regulatory requirements. + +**Important Considerations for Production Environments:** + +If you intend to run this command in a production environment, you must: + +- Set `runnableInProduction` to `true` in your `export.json` file within the SFDMU workspace. +- Define `sfdmuCanModify: YOUR_INSTANCE_URL` in your branch-specific configuration file (e.g., `config/branches/.sfdx-hardis.YOUR_BRANCH.yml`) to explicitly authorize data modification for that instance. + +
+Technical explanations + +The command's technical implementation relies heavily on the SFDMU plugin: + +- **SFDMU Integration:** It leverages the `sfdmu` plugin to perform the actual data deletion operations. The command acts as a wrapper, providing an assisted interface for SFDMU execution. +- **Workspace Selection:** If the SFDMU workspace path is not provided via the `--path` flag, it interactively prompts the user to select a data workspace using `selectDataWorkspace`. +- **Org Selection:** It ensures that a target Salesforce org is selected (either via the `--target-org` flag or through an interactive prompt using `promptOrgUsernameDefault`) to specify where the data deletion will occur. +- **`deleteData` Utility:** The core logic for executing the SFDMU deletion process is encapsulated within the `deleteData` utility function, which takes the SFDMU workspace path and the target username as arguments. +- **Environment Awareness:** It checks the `isCI` flag to determine whether to run in an interactive mode (prompting for user input) or a non-interactive mode (relying solely on command-line flags). +- **Required Plugin:** It explicitly lists `sfdmu` as a required plugin, ensuring that the necessary dependency is in place before execution. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| path
-p | option | Path to the sfdmu workspace folder | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| path
-p | option | Path to the sfdmu workspace folder | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:data:delete +$ sf hardis:org:data:delete ``` diff --git a/docs/hardis/org/data/export.md b/docs/hardis/org/data/export.md index 6ac8f3234..c09369c7e 100644 --- a/docs/hardis/org/data/export.md +++ b/docs/hardis/org/data/export.md @@ -1,32 +1,59 @@ - + # hardis:org:data:export ## Description -Export data from an org using a [SFDX Data Loader](https://help.sfdmu.com/) Project -See article: +## Command Behavior + +**Exports data from a Salesforce org using an SFDMU (Salesforce Data Migration Utility) project.** + +This command facilitates the extraction of data from your Salesforce environments based on configurations defined in an SFDMU workspace. It's a powerful tool for various data-related tasks, including: + +- **Data Backup:** Creating snapshots of your Salesforce data. +- **Data Migration:** Extracting data for transfer to another Salesforce org or external system. +- **Reporting and Analysis:** Exporting specific datasets for detailed analysis outside of Salesforce. +- **Data Seeding:** Preparing data for import into other environments. + +Key functionalities: + +- **SFDMU Workspace Integration:** Leverages an existing SFDMU workspace (defined by an `export.json` file) to determine which objects and records to export, along with any filtering or transformation rules. +- **Interactive Workspace Selection:** If the SFDMU workspace path is not provided via the `--path` flag, it interactively prompts the user to select one. +- **Org Selection:** Ensures that a target Salesforce org is selected (either via the `--target-org` flag or through an interactive prompt) to specify the source of the data export. + +See this article for a practical example: [![How to detect bad words in Salesforce records using SFDX Data Loader and sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-badwords.jpg)](https://nicolas.vuillamy.fr/how-to-detect-bad-words-in-salesforce-records-using-sfdx-data-loader-and-sfdx-hardis-171db40a9bac) +
+Technical explanations + +The command's technical implementation relies heavily on the SFDMU plugin: + +- **SFDMU Integration:** It acts as a wrapper around the `sfdmu` plugin, which performs the actual data export operations. The command provides an assisted interface for SFDMU execution. +- **`exportData` Utility:** The core logic for executing the SFDMU export process is encapsulated within the `exportData` utility function, which takes the SFDMU workspace path and the source username as arguments. +- **Interactive Prompts:** Uses `selectDataWorkspace` to allow the user to choose an SFDMU project and `promptOrgUsernameDefault` for selecting the source Salesforce org when not running in a CI environment. +- **Environment Awareness:** Checks the `isCI` flag to determine whether to run in an interactive mode (prompting for user input) or a non-interactive mode (relying solely on command-line flags). +- **Required Plugin:** It explicitly lists `sfdmu` as a required plugin, ensuring that the necessary dependency is in place before execution. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| path
-p | option | Path to the sfdmu workspace folder | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| path
-p | option | Path to the sfdmu workspace folder | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:data:export +$ sf hardis:org:data:export ``` diff --git a/docs/hardis/org/data/import.md b/docs/hardis/org/data/import.md index a2a85da7b..aa331b517 100644 --- a/docs/hardis/org/data/import.md +++ b/docs/hardis/org/data/import.md @@ -1,10 +1,15 @@ - + # hardis:org:data:import ## Description Import/Load data in an org using a [SFDX Data Loader](https://help.sfdmu.com/) Project +If you need to run this command in a production org, you need to either: + +- Define **sfdmuCanModify** in your .sfdx-hardis.yml config file. (Example: `sfdmuCanModify: prod-instance.my.salesforce.com`) +- Define an environment variable SFDMU_CAN_MODIFY. (Example: `SFDMU_CAN_MODIFY=prod-instance.my.salesforce.com`) + See article: [![How to detect bad words in Salesforce records using SFDX Data Loader and sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-badwords.jpg)](https://nicolas.vuillamy.fr/how-to-detect-bad-words-in-salesforce-records-using-sfdx-data-loader-and-sfdx-hardis-171db40a9bac) @@ -12,21 +17,20 @@ See article: ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| path
-p | option | Path to the sfdmu workspace folder | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| path
-p | option | Path to the sfdmu workspace folder | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:data:import +$ sf hardis:org:data:import ``` diff --git a/docs/hardis/org/diagnose/audittrail.md b/docs/hardis/org/diagnose/audittrail.md index 557cc08ed..4286aca26 100644 --- a/docs/hardis/org/diagnose/audittrail.md +++ b/docs/hardis/org/diagnose/audittrail.md @@ -1,10 +1,12 @@ - + # hardis:org:diagnose:audittrail ## Description Export Audit trail into a CSV file with selected criteria, and highlight suspect actions +Also detects updates of Custom Settings values (disable by defining `SKIP_AUDIT_TRAIL_CUSTOM_SETTINGS=true`) + Regular setup actions performed in major orgs are filtered. - "" @@ -17,6 +19,10 @@ Regular setup actions performed in major orgs are filtered. - Custom App Licenses - addeduserpackagelicense - granteduserpackagelicense + - revokeduserpackagelicense +- Customer Portal + - createdcustomersuccessuser + - CSPUserDisabled - Currency - updateddatedexchrate - Data Management @@ -24,12 +30,15 @@ Regular setup actions performed in major orgs are filtered. - Email Administration - dkimRotationPreparationSuccessful - dkimRotationSuccessful +- External Objects + - xdsEncryptedFieldChange - Groups - groupMembership - Holidays - holiday_insert - Inbox mobile and legacy desktop apps - enableSIQUserNonEAC + - siqUserAcceptedTOS - Manage Users - activateduser - createduser @@ -41,16 +50,24 @@ Regular setup actions performed in major orgs are filtered. - changedinteractionuseronoff - changedmarketinguseroffon - changedmarketinguseronoff + - changedofflineuseroffon + - changedprofileforuserstdtostd - changedprofileforuser - changedprofileforusercusttostd - changedprofileforuserstdtocust - changedroleforusertonone - changedroleforuser - changedroleforuserfromnone + - changedUserAdminVerifiedStatusVerified - changedUserEmailVerifiedStatusUnverified - changedUserEmailVerifiedStatusVerified + - changedknowledgeuseroffon + - changedsfcontentuseroffon + - changedsupportuseroffon + - changedusername - changedUserPhoneNumber - changedUserPhoneVerifiedStatusUnverified + - changedUserPhoneVerifiedStatusVerified - deactivateduser - deleteAuthenticatorPairing - deleteTwoFactorInfo2 @@ -68,6 +85,8 @@ Regular setup actions performed in major orgs are filtered. - PermSetLicenseUnassign - registeredUserPhoneNumber - resetpassword + - suNetworkAdminLogin + - suNetworkAdminLogout - suOrgAdminLogin - suOrgAdminLogout - unfrozeuser @@ -98,39 +117,48 @@ monitoringAllowedSectionsActions: "Some section": [] // Will ignore all actions from such section "Some other section": ["actionType1","actionType2","actionType3"] // Will ignore only those 3 actions from section "Some other section". Other actions in the same section will be considered as suspect. ``` - + +## Excel output example + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-monitoring-audittrail-excel.jpg) + +## Local output example + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-monitoring-audittrail-local.jpg) + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-suspect-audit-trail/) and can output Grafana, Slack and MsTeams Notifications. + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| excludeusers
-e | option | Comma-separated list of usernames to exclude | | | | -| json | boolean | format output as json | | | | -| lastndays
-t | option | Number of days to extract from today (included) | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| excludeusers
-e | option | Comma-separated list of usernames to exclude | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| lastndays
-t | option | Number of days to extract from today (included) | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:diagnose:audittrail +$ sf hardis:org:diagnose:audittrail ``` ```shell -sfdx hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com +$ sf hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com ``` ```shell -sfdx hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com,bertrand@titi.com +$ sf hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com,bertrand@titi.com ``` ```shell -sfdx hardis:org:diagnose:audittrail --lastndays 5 +$ sf hardis:org:diagnose:audittrail --lastndays 5 ``` diff --git a/docs/hardis/org/diagnose/instanceupgrade.md b/docs/hardis/org/diagnose/instanceupgrade.md new file mode 100644 index 000000000..14bd85a07 --- /dev/null +++ b/docs/hardis/org/diagnose/instanceupgrade.md @@ -0,0 +1,52 @@ + +# hardis:org:diagnose:instanceupgrade + +## Description + + +## Command Behavior + +**Retrieves and displays the scheduled upgrade date for a Salesforce org's instance.** + +This command provides crucial information about when your Salesforce instance will be upgraded to the next major release (Spring, Summer, or Winter). This is vital for release planning, testing, and ensuring compatibility with upcoming Salesforce features. + +Key functionalities: + +- **Instance Identification:** Determines the Salesforce instance name of your target org. +- **Upgrade Date Retrieval:** Fetches the planned start time of the next major core service upgrade for that instance from the Salesforce Status API. +- **Days Until Upgrade:** Calculates and displays the number of days remaining until the next major upgrade. +- **Severity-Based Logging:** Adjusts the log severity (info, warning) based on the proximity of the upgrade date, providing a visual cue for urgency. +- **Notifications:** Sends notifications to configured channels (e.g., Slack, MS Teams, Grafana) with the upgrade information, making it suitable for automated monitoring. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Query:** It first queries the `Organization` object in Salesforce to get the `InstanceName` of the target org. +- **Salesforce Status API Integration:** It makes an HTTP GET request to the Salesforce Status API (`https://api.status.salesforce.com/v1/instances/{instanceName}/status`) to retrieve detailed information about the instance, including scheduled maintenances. +- **Data Parsing:** It parses the JSON response from the Status API to extract the relevant major release upgrade information. +- **Date Calculation:** Uses the `moment` library to calculate the difference in days between the current date and the planned upgrade date. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications, including the instance name, upgrade date, and days remaining, along with relevant metrics for monitoring dashboards. +- **User Feedback:** Provides clear messages to the user about the upgrade status and proximity. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:diagnose:instanceupgrade +``` + + diff --git a/docs/hardis/org/diagnose/legacyapi.md b/docs/hardis/org/diagnose/legacyapi.md index e7abf247e..9250f5ca1 100644 --- a/docs/hardis/org/diagnose/legacyapi.md +++ b/docs/hardis/org/diagnose/legacyapi.md @@ -1,4 +1,4 @@ - + # hardis:org:diagnose:legacyapi ## Description @@ -10,39 +10,39 @@ See article below [![Handle Salesforce API versions Deprecation like a pro](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deprecated-api.jpg)](https://nicolas.vuillamy.fr/handle-salesforce-api-versions-deprecation-like-a-pro-335065f52238) +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-deprecated-api-calls/) and can output Grafana, Slack and MsTeams Notifications. ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| eventtype
-e | option | Type of EventLogFile event to analyze | ApiTotalUsage | | | -| json | boolean | format output as json | | | | -| limit
-l | option | Number of latest EventLogFile events to analyze | 999 | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| eventtype
-e | option | Type of EventLogFile event to analyze | ApiTotalUsage | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| limit
-l | option | Number of latest EventLogFile events to analyze | 999 | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:diagnose:legacyapi +$ sf hardis:org:diagnose:legacyapi ``` ```shell -sfdx hardis:org:diagnose:legacyapi -u hardis@myclient.com +$ sf hardis:org:diagnose:legacyapi -u hardis@myclient.com ``` ```shell -sfdx hardis:org:diagnose:legacyapi --outputfile 'c:/path/to/folder/legacyapi.csv' +$ sf hardis:org:diagnose:legacyapi --outputfile 'c:/path/to/folder/legacyapi.csv' ``` ```shell -sfdx hardis:org:diagnose:legacyapi -u hardis@myclient.com --outputfile ./tmp/legacyapi.csv +$ sf hardis:org:diagnose:legacyapi -u hardis@myclient.com --outputfile ./tmp/legacyapi.csv ``` diff --git a/docs/hardis/org/diagnose/licenses.md b/docs/hardis/org/diagnose/licenses.md index 77759aa63..447ac5617 100644 --- a/docs/hardis/org/diagnose/licenses.md +++ b/docs/hardis/org/diagnose/licenses.md @@ -1,28 +1,57 @@ - + # hardis:org:diagnose:licenses ## Description -Mostly used for monitoring (Grafana) but you can also use it manually :) + +**Lists and analyzes User Licenses and Permission Set Licenses subscribed and used in a Salesforce org.** + +This command provides a comprehensive overview of your Salesforce license consumption. It's particularly useful for: + +- **License Management:** Understanding which licenses are active, how many are available, and how many are being used. +- **Cost Optimization:** Identifying unused or underutilized licenses that could be reallocated or decommissioned. +- **Compliance:** Ensuring that your organization is compliant with Salesforce licensing agreements. +- **Monitoring:** Tracking license usage trends over time. + +Key functionalities: + +- **User License Details:** Retrieves information about standard and custom User Licenses, including `MasterLabel`, `Name`, `TotalLicenses`, and `UsedLicenses`. +- **Permission Set License Details:** Retrieves information about Permission Set Licenses, including `MasterLabel`, `PermissionSetLicenseKey`, `TotalLicenses`, and `UsedLicenses`. +- **Used Licenses Filter:** The `--usedonly` flag allows you to filter the report to show only licenses that have at least one `UsedLicenses` count greater than zero. +- **CSV Report Generation:** Generates a CSV file containing all the retrieved license information, suitable for detailed analysis. +- **Notifications:** Sends notifications to configured channels (e.g., Grafana, Slack, MS Teams) with a summary of license usage, including lists of active and used licenses. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Queries:** It executes SOQL queries against the `UserLicense` and `PermissionSetLicense` objects in Salesforce to retrieve license data. +- **Data Transformation:** It processes the query results, reformatting the data to be more readable and consistent for reporting purposes (e.g., removing `Id` and `attributes`, renaming `PermissionSetLicenseKey` to `Name`). +- **Data Aggregation:** It aggregates license information, creating a `licensesByKey` object for quick lookups and a `usedLicenses` array for a concise list of actively used licenses. +- **Report Generation:** It uses `generateCsvFile` to create the CSV report of license data. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **User Feedback:** Provides clear messages to the user about the license extraction process and the used licenses. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| usedonly
-u | boolean | Filter to have only used licenses | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| usedonly
-u | boolean | Filter to have only used licenses | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:diagnose:licenses +$ sf hardis:org:diagnose:licenses ``` diff --git a/docs/hardis/org/diagnose/releaseupdates.md b/docs/hardis/org/diagnose/releaseupdates.md new file mode 100644 index 000000000..fb93b39d1 --- /dev/null +++ b/docs/hardis/org/diagnose/releaseupdates.md @@ -0,0 +1,33 @@ + +# hardis:org:diagnose:releaseupdates + +## Description + +Export Release Updates into a CSV file with selected criteria, and highlight Release Updates that should be checked. + +Before publishing **Breaking Changes** ❌, Salesforce announce them in the setup menu [**Release Updates**](https://help.salesforce.com/s/articleView?id=sf.release_updates.htm&type=5) + +⚠️ Some of them are very important, because if you don't make the related upgrades in time (ex: before Winter 25) , your production org can crash ! + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-release-updates/) and can output Grafana, Slack and MsTeams Notifications. + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:diagnose:releaseupdates +``` + + diff --git a/docs/hardis/org/diagnose/unused-apex-classes.md b/docs/hardis/org/diagnose/unused-apex-classes.md new file mode 100644 index 000000000..6d812acaa --- /dev/null +++ b/docs/hardis/org/diagnose/unused-apex-classes.md @@ -0,0 +1,44 @@ + +# hardis:org:diagnose:unused-apex-classes + +## Description + +List all async Apex classes (Batch,Queueable,Schedulable) that has not been called for more than 365 days. + +The result class list probably can be removed from the project, and that will improve your test classes performances :) + +The number of unused day is overridable using --days option. + +The command uses queries on AsyncApexJob and CronTrigger technical tables to build the result. + +Apex Classes CreatedBy and CreatedOn fields are calculated from MIN(date from git, date from org) + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-unused-apex-classes/) and can output Grafana, Slack and MsTeams Notifications. + +![](https://sfdx-hardis.cloudity.com/assets/images/screenshot-monitoring-unused-apex-grafana.jpg) + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| days
-t | option | Extracts the users that have been inactive for the amount of days specified. In CI, default is 180 days | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:diagnose:unused-apex-classes +``` + +```shell +$ sf hardis:org:diagnose:unused-apex-classes --days 700 +``` + + diff --git a/docs/hardis/org/diagnose/unused-connected-apps.md b/docs/hardis/org/diagnose/unused-connected-apps.md new file mode 100644 index 000000000..c80f12a1e --- /dev/null +++ b/docs/hardis/org/diagnose/unused-connected-apps.md @@ -0,0 +1,83 @@ + +# hardis:org:diagnose:unused-connected-apps + +## Description + + +## Command Behavior + +**Identifies and reports on potentially unused Connected Apps in a Salesforce org, suggesting candidates for deletion or deactivation.** + +This command helps improve org security and reduce technical debt by pinpointing Connected Apps that are no longer actively used. Connected Apps can pose security risks if left unmonitored, and cleaning them up contributes to a healthier Salesforce environment. + +Key functionalities: + +- **Connected App Data Collection:** Gathers information about all Connected Apps in the org, including creation and last modified dates, and associated users. +- **Usage Analysis:** Analyzes `LoginHistory` and `OAuthToken` records to determine the last usage date of each Connected App. +- **Inactivity Detection:** Flags Connected Apps as potentially unused if they have no recent login history or OAuth token usage. +- **Accessibility Check:** Examines Connected App metadata to identify if they are accessible (e.g., if they require admin approval and have no profiles or permission sets assigned). +- **Ignored Apps:** Automatically ignores a predefined list of common Salesforce Connected Apps (e.g., `Salesforce CLI`, `Salesforce Mobile Dashboards`). You can extend this list by defining the `ALLOWED_INACTIVE_CONNECTED_APPS` environment variable. +- **CSV Report Generation:** Generates a CSV file containing details of all analyzed Connected Apps, including their usage status, last usage date, and reasons for being flagged as potentially unused. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of potentially unused Connected Apps. + +**Default Ignored Connected Apps:** + +- Ant Migration Tool +- Chatter Desktop +- Chatter Mobile for BlackBerry +- Force.com IDE +- OIQ_Integration +- Salesforce CLI +- Salesforce Files +- Salesforce Mobile Dashboards +- Salesforce Touch +- Salesforce for Outlook +- SalesforceA +- SalesforceA for Android +- SalesforceA for iOS +- SalesforceDX Namespace Registry +- SalesforceIQ + +You can add more ignored apps by defining a comma-separated list of names in the `ALLOWED_INACTIVE_CONNECTED_APPS` environment variable. + +_Example: +ALLOWED_INACTIVE_CONNECTED_APPS=My App 1,My App 2, My App 3_ + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-unused-connected-apps/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Queries:** It performs SOQL queries against `ConnectedApplication`, `LoginHistory`, and `OAuthToken` objects to gather comprehensive data about Connected Apps and their usage. +- **Temporary SFDX Project:** It creates a temporary SFDX project to retrieve Connected App metadata, allowing for local parsing and analysis of their XML files. +- **Metadata Parsing:** It parses the `connectedApp-meta.xml` files to check for `isAdminApproved` and the presence of `profileName` or `permissionsetName` to determine accessibility. +- **Data Correlation:** It correlates data from various Salesforce objects to build a complete picture of each Connected App's usage and status. +- **Date Calculation:** Uses `moment` to calculate the time since the last OAuth token usage. +- **Report Generation:** It uses `generateCsvFile` to create the CSV report of unused Connected Apps. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **File System Operations:** Uses `fs-extra` for creating and removing temporary directories and files. +- **Environment Variable Reading:** Reads the `ALLOWED_INACTIVE_CONNECTED_APPS` environment variable to customize the list of ignored Connected Apps. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:diagnose:unused-connected-apps +``` + + diff --git a/docs/hardis/org/diagnose/unusedlicenses.md b/docs/hardis/org/diagnose/unusedlicenses.md index e54cdcbab..f75ac153a 100644 --- a/docs/hardis/org/diagnose/unusedlicenses.md +++ b/docs/hardis/org/diagnose/unusedlicenses.md @@ -1,40 +1,65 @@ - + # hardis:org:diagnose:unusedlicenses ## Description -When you assign a Permission Set to a user, and that this Permission Set is related to a Permission Set License, a Permission Set License Assignment is automatically created for the user. - But when you unassign this Permission Set from the user, **the Permission Set License Assignment is not deleted**. +## Command Behavior - This leads that you can be **charged for Permission Set Licenses that are not used** ! +**Detects and suggests the deletion of unused Permission Set License Assignments in a Salesforce org.** - This command detects such useless Permission Set Licenses Assignments and suggests to delete them. +When a Permission Set (PS) linked to a Permission Set License (PSL) is assigned to a user, a Permission Set License Assignment (PSLA) is automatically created. However, when that PS is unassigned from the user, the PSLA is *not* automatically deleted. This can lead to organizations being charged for unused PSLAs, representing a hidden cost and technical debt. + +This command identifies such useless PSLAs and provides options to delete them, helping to optimize license usage and reduce unnecessary expenses. + +Key functionalities: + +- **PSLA Detection:** Queries the Salesforce org to find all active PSLAs. +- **Usage Verification:** Correlates PSLAs with actual Permission Set Assignments and Permission Set Group Assignments to determine if the underlying Permission Sets are still assigned to the user. +- **Special Case Handling:** Accounts for specific scenarios where profiles might implicitly assign PSLAs (e.g., `Salesforce API Only` profile assigning `SalesforceAPIIntegrationPsl`) and allows for always excluding certain PSLAs from the unused check. +- **Reporting:** Generates a CSV report of all identified unused PSLAs, including the user and the associated Permission Set License. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of unused PSLAs. +- **Interactive Deletion:** In non-CI environments, it offers an interactive prompt to bulk delete the identified unused PSLAs. + +Many thanks to [Vincent Finet](https://www.linkedin.com/in/vincentfinet/) for the inspiration during his great speaker session at [French Touch Dreamin '23](https://frenchtouchdreamin.com/), and his kind agreement for reusing such inspiration in this command :) + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-unused-licenses/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves extensive querying of Salesforce objects and data correlation: + +- **SOQL Queries (Bulk API):** It uses `bulkQuery` and `bulkQueryChunksIn` to efficiently retrieve large volumes of data from `PermissionSetLicenseAssign`, `PermissionSetLicense`, `PermissionSet`, `PermissionSetGroupComponent`, and `PermissionSetAssignment` objects. +- **Data Correlation:** It meticulously correlates data across these objects to determine if a `PermissionSetLicenseAssign` record has a corresponding active assignment to a Permission Set or Permission Set Group for the same user. +- **Filtering Logic:** It applies complex filtering logic to exclude PSLAs that are genuinely in use or are part of predefined exceptions (e.g., `alwaysExcludeForActiveUsersPermissionSetLicenses`). +- **Bulk Deletion:** If the user opts to delete unused PSLAs, it uses `bulkUpdate` with the `delete` operation to efficiently remove multiple records. +- **Report Generation:** It uses `generateCsvFile` to create the CSV report of unused PSLAs. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **User Interaction:** Uses `prompts` for interactive confirmation before performing deletion operations. +
- Many thanks to [Vincent Finet](https://www.linkedin.com/in/vincentfinet/) for the inspiration during his great speaker session at [French Touch Dreamin '23](https://frenchtouchdreamin.com/), and his kind agreement for reusing such inspiration in this command :) - ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:diagnose:unusedlicenses +$ sf hardis:org:diagnose:unusedlicenses ``` ```shell -sfdx hardis:org:diagnose:unusedlicenses --fix +$ sf hardis:org:diagnose:unusedlicenses --fix ``` diff --git a/docs/hardis/org/diagnose/unusedusers.md b/docs/hardis/org/diagnose/unusedusers.md index 8982ae89d..d9f88a6c7 100644 --- a/docs/hardis/org/diagnose/unusedusers.md +++ b/docs/hardis/org/diagnose/unusedusers.md @@ -1,60 +1,79 @@ - + # hardis:org:diagnose:unusedusers ## Description + +## Command Behavior + +**Detects and reports on inactive or unused Salesforce user accounts, helping to optimize license usage and enhance security.** + Efficient user management is vital in Salesforce to ensure resources are optimized and costs are controlled. However, inactive or unused user accounts can often go unnoticed, leading to wasted licenses and potential security risks. This tool addresses this challenge by enabling administrators to identify users who haven't logged in within a specified period. By analyzing user login activity and last login timestamps, this feature highlights inactive user accounts, allowing administrators to take appropriate action. Whether it's deactivating dormant accounts, freeing up licenses, or ensuring compliance with security policies, this functionality empowers administrators to maintain a lean and secure Salesforce environment. -licensetypes values are the following: +Key functionalities: -- all-crm: SFDC,AUL,AUL1,AULL_IGHT +- **Inactivity Detection:** Identifies users who have not logged in for a specified number of days (`--days` flag, default 180 days in CI, 365 days otherwise). +- **License Type Filtering:** Allows filtering users by license type using `--licensetypes` (e.g., `all-crm`, `all-paying`) or specific license identifiers using `--licenseidentifiers`. + - `all-crm`: Includes `SFDC`, `AUL`, `AUL1`, `AULL_IGHT` licenses. + - `all-paying`: Includes `SFDC`, `AUL`, `AUL1`, `AULL_IGHT`, `PID_Customer_Community`, `PID_Customer_Community_Login`, `PID_Partner_Community`, `PID_Partner_Community_Login` licenses. + - Note: You can see the full list of available license identifiers in [Salesforce Documentation](https://developer.salesforce.com/docs/atlas.en-us.object_reference.meta/sfdx_cli_reference/sforce_api_objects_userlicense.htm). +- **Active User Retrieval:** The `--returnactiveusers` flag inverts the command, allowing you to retrieve active users who *have* logged in during the specified period. +- **CSV Report Generation:** Generates a CSV file containing details of all identified users (inactive or active), including their last login date, profile, and license information. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of inactive or active users. -- all-paying: SFDC,AUL,AUL1,AULL_IGHT,PID_Customer_Community,PID_Customer_Community_Login,PID_Partner_Community,PID_Partner_Community_Login +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-inactive-users/) and can output Grafana, Slack and MsTeams Notifications. -Note: You can see the full list of available license identifiers in [Salesforce Documentation](https://developer.salesforce.com/docs/atlas.en-us.object_reference.meta/object_reference/sforce_api_objects_userlicense.htm) +
+Technical explanations -Use --returnactiveusers to revert the command and retrieve active users that has logged in during the period. +The command's technical implementation involves: +- **SOQL Query (Bulk API):** It uses `bulkQuery` to efficiently retrieve user records from the Salesforce `User` object. The SOQL query dynamically constructs its WHERE clause based on the `--days`, `--licensetypes`, `--licenseidentifiers`, and `--returnactiveusers` flags. +- **Interactive Prompts:** Uses `prompts` to interactively ask the user for the number of inactive days and license types if not provided via flags. +- **License Mapping:** Internally maps common license type aliases (e.g., `all-crm`) to their corresponding Salesforce `LicenseDefinitionKey` values. +- **Report Generation:** It uses `generateCsvFile` to create the CSV report of users. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **User Feedback:** Provides a summary of the findings in the console, indicating the number of inactive or active users found. +
## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------------------|:-------:|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| days
-t | option | Extracts the users that have been inactive for the amount of days specified. In CI, default is 180 days | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| licenseidentifiers
-i | option | Comma-separated list of license identifiers, in case licensetypes is not used.. Identifiers available at | | | | -| licensetypes
-l | option | Type of licenses to check. If set, do not use licenseidentifiers option. In CI, default is all-crm | | | all
all-crm
all-paying | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| returnactiveusers | boolean | Inverts the command by returning the active users | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------------------|:-------:|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:------------------------------:| +| days
-t | option | Extracts the users that have been inactive for the amount of days specified. In CI, default is 180 days | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| licenseidentifiers
-i | option | Comma-separated list of license identifiers, in case licensetypes is not used.. Identifiers available at https://developer.salesforce.com/docs/atlas.en-us.object_reference.meta/object_reference/sforce_api_objects_userlicense.htm | | | | +| licensetypes
-l | option | Type of licenses to check. If set, do not use licenseidentifiers option. In CI, default is all-crm | | | all
all-crm
all-paying | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| returnactiveusers | boolean | Inverts the command by returning the active users | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:diagnose:unusedusers +$ sf hardis:org:diagnose:unusedusers ``` ```shell -sfdx hardis:org:diagnose:unusedusers --days 365 +$ sf hardis:org:diagnose:unusedusers --days 365 ``` ```shell -sfdx hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm +$ sf hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm ``` ```shell -sfdx hardis:org:diagnose:unusedusers --days 60 --licenseidentifiers SFDC,AUL,AUL1 +$ sf hardis:org:diagnose:unusedusers --days 60 --licenseidentifiers SFDC,AUL,AUL1 ``` ```shell -sfdx hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm --returnactiveusers +$ sf hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm --returnactiveusers ``` diff --git a/docs/hardis/org/files/export.md b/docs/hardis/org/files/export.md index 39893d547..3ca7dc210 100644 --- a/docs/hardis/org/files/export.md +++ b/docs/hardis/org/files/export.md @@ -1,35 +1,60 @@ - + # hardis:org:files:export ## Description -Export file attachments from a Salesforce org -See article below +## Command Behavior + +**Exports file attachments (ContentVersion, Attachment) from a Salesforce org based on a predefined configuration.** + +This command enables the mass download of files associated with Salesforce records, providing a robust solution for backing up files, migrating them to other systems, or integrating them with external document management solutions. + +Key functionalities: + +- **Configuration-Driven Export:** Relies on an `export.json` file within a designated file export project to define the export criteria, including the SOQL query for parent records, file types to export, and output naming conventions. +- **Interactive Project Selection:** If the file export project path is not provided via the `--path` flag, it interactively prompts the user to select one. +- **Configurable Export Options:** Allows overriding default export settings such as `chunksize` (number of records processed in a batch), `polltimeout` (timeout for Bulk API calls), and `startchunknumber` (to resume a failed export). +- **Support for ContentVersion and Attachment:** Handles both modern Salesforce Files (ContentVersion) and older Attachments. + +See this article for a practical example: [![How to mass download notes and attachments files from a Salesforce org](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-mass-download.jpg)](https://nicolas.vuillamy.fr/how-to-mass-download-notes-and-attachments-files-from-a-salesforce-org-83a028824afd) +
+Technical explanations + +The command's technical implementation involves: + +- **FilesExporter Class:** The core logic is encapsulated within the `FilesExporter` class, which orchestrates the entire export process. +- **SOQL Queries (Bulk API):** It uses Salesforce Bulk API queries to efficiently retrieve large volumes of parent record IDs and file metadata. +- **File Download:** Downloads the actual file content from Salesforce. +- **File System Operations:** Writes the downloaded files to the local file system, organizing them into folders based on the configured naming conventions. +- **Configuration Loading:** Reads the `export.json` file to get the export configuration. It also allows for interactive overriding of these settings. +- **Interactive Prompts:** Uses `selectFilesWorkspace` to allow the user to choose a file export project and `promptFilesExportConfiguration` for customizing export options. +- **Error Handling:** Includes mechanisms to handle potential errors during the export process, such as network issues or API limits. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:------------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| chunksize
-c | option | Number of records to add in a chunk before it is processed | 1000 | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| path
-p | option | Path to the file export project | | | | -| polltimeout
-t | option | Timeout in MS for Bulk API calls | 300000 | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| startchunknumber
-s | option | Chunk number to start from | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| chunksize
-c | option | Number of records to add in a chunk before it is processed | 1000 | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| path
-p | option | Path to the file export project | | | | +| polltimeout
-t | option | Timeout in MS for Bulk API calls | 300000 | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| startchunknumber
-s | option | Chunk number to start from | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:files:export +$ sf hardis:org:files:export ``` diff --git a/docs/hardis/org/files/import.md b/docs/hardis/org/files/import.md new file mode 100644 index 000000000..dba77e92d --- /dev/null +++ b/docs/hardis/org/files/import.md @@ -0,0 +1,53 @@ + +# hardis:org:files:import + +## Description + + +This command facilitates the mass upload of files into Salesforce, allowing you to populate records with associated documents, images, or other file types. It's a crucial tool for data migration, content seeding, or synchronizing external file repositories with Salesforce. + +Key functionalities: + +- **Configuration-Driven Import:** Relies on an `export.json` file within a designated file export project (created using `sf hardis:org:configure:files`) to determine which files to import and how they should be associated with Salesforce records. +- **Interactive Project Selection:** If the file import project path is not provided via the `--path` flag, it interactively prompts the user to select one. +- **Overwrite Option:** The `--overwrite` flag allows you to replace existing files in Salesforce with local versions that have the same name. Be aware that this option doubles the number of API calls used. +- **Support for ContentVersion and Attachment:** Handles both modern Salesforce Files (ContentVersion) and older Attachments. + +See this article for how to export files, which is often a prerequisite for importing: + +[![How to mass download notes and attachments files from a Salesforce org](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-mass-download.jpg)](https://nicolas.vuillamy.fr/how-to-mass-download-notes-and-attachments-files-from-a-salesforce-org-83a028824afd) + +
+Technical explanations + +The command's technical implementation involves: + +- **FilesImporter Class:** The core logic is encapsulated within the `FilesImporter` class, which orchestrates the entire import process. +- **File System Scan:** Scans the local file system within the configured project directory to identify files for import. +- **Salesforce API Interaction:** Uses Salesforce APIs (e.g., ContentVersion, Attachment) to upload files and associate them with records. +- **Configuration Loading:** Reads the `export.json` file to get the import configuration, including SOQL queries to identify parent records for file association. +- **Interactive Prompts:** Uses `selectFilesWorkspace` to allow the user to choose a file import project and `prompts` for confirming the overwrite behavior. +- **Error Handling:** Includes mechanisms to handle potential errors during the import process, such as API limits or file upload failures. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| overwrite
-f | boolean | Override existing files (doubles the number of API calls) | | | | +| path
-p | option | Path to the file export project | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:files:import +``` + + diff --git a/docs/hardis/org/fix/listviewmine.md b/docs/hardis/org/fix/listviewmine.md index 9d560a974..ca24955be 100644 --- a/docs/hardis/org/fix/listviewmine.md +++ b/docs/hardis/org/fix/listviewmine.md @@ -1,4 +1,4 @@ - + # hardis:org:fix:listviewmine ## Description @@ -56,26 +56,25 @@ ENV PUPPETEER_EXECUTABLE_PATH="$\{CHROMIUM_PATH}" // remove \ before { ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | |listviews
-l|option|Comma-separated list of listviews following format Object:ListViewName Example: Contact:MyContacts,Contact:MyActiveContacts,Opportunity:MYClosedOpportunities|||| -|loglevel|option|logging level for this command invocation|warn||trace
debug
info
warn
error
fatal| |skipauth|boolean|Skip authentication check when a default username is required|||| -|targetusername
-u|option|username or alias for the target org; overrides default target org|||| +|target-org
-o|option|undefined|||| |websocket|option|Websocket host:port for VsCode SFDX Hardis UI integration|||| ## Examples ```shell -sfdx hardis:org:fix:listviewmine +$ sf hardis:org:fix:listviewmine ``` ```shell -sfdx hardis:org:fix:listviewmine --listviews Opportunity:MySubscriptions,Account:MyActivePartners +$ sf hardis:org:fix:listviewmine --listviews Opportunity:MySubscriptions,Account:MyActivePartners ``` diff --git a/docs/hardis/org/generate/packagexmlfull.md b/docs/hardis/org/generate/packagexmlfull.md index 5303e1277..bcc6cc334 100644 --- a/docs/hardis/org/generate/packagexmlfull.md +++ b/docs/hardis/org/generate/packagexmlfull.md @@ -1,35 +1,60 @@ - + # hardis:org:generate:packagexmlfull ## Description -Generates full org package.xml, including managed items + +## Command Behavior + +**Generates a comprehensive `package.xml` file for a Salesforce org, including all metadata components, even managed ones.** + +This command is essential for various Salesforce development and administration tasks, especially when you need a complete snapshot of an org's metadata. It goes beyond typical source tracking by including managed package components, which is crucial for understanding the full metadata footprint of an org. + +Key functionalities: + +- **Full Org Metadata Retrieval:** Connects to a specified Salesforce org (or prompts for one if not provided) and retrieves a complete list of all metadata types and their members. +- **Managed Package Inclusion:** Unlike standard source retrieval, this command explicitly includes metadata from managed packages, providing a truly comprehensive `package.xml`. +- **Customizable Output:** Allows you to specify the output file path for the generated `package.xml`. +- **Interactive Org Selection:** If no target org is specified, it interactively prompts the user to choose an org. (or use --no-prompt to skip this step) + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce Metadata API Interaction:** It leverages the Salesforce Metadata API to list all available metadata types and then retrieve all components for each type. +- **`buildOrgManifest` Utility:** The core logic for querying the org's metadata and constructing the `package.xml` is encapsulated within the `buildOrgManifest` utility function. +- **XML Generation:** It dynamically builds the XML structure of the `package.xml` file, including the `types` and `members` elements for all retrieved metadata. +- **File System Operations:** It writes the generated `package.xml` file to the specified output path. +- **Interactive Prompts:** Uses `promptOrgUsernameDefault` to guide the user in selecting the target Salesforce org. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile | option | Output package.xml file | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| no-prompt
-n | boolean | Do not prompt for org username, use the default one | | | | +| outputfile | option | Output package.xml file | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:generate:packagexmlfull +$ sf hardis:org:generate:packagexmlfull ``` ```shell -sfdx hardis:org:generate:packagexmlfull --outputfile /tmp/packagexmlfull.xml +$ sf hardis:org:generate:packagexmlfull --outputfile /tmp/packagexmlfull.xml ``` ```shell -sfdx hardis:org:generate:packagexmlfull --targetusername nico@example.com +$ sf hardis:org:generate:packagexmlfull --target-org nico@example.com ``` diff --git a/docs/hardis/org/monitor/all.md b/docs/hardis/org/monitor/all.md index 93bef4a0b..032e42ae9 100644 --- a/docs/hardis/org/monitor/all.md +++ b/docs/hardis/org/monitor/all.md @@ -1,4 +1,4 @@ - + # hardis:org:monitor:all ## Description @@ -29,31 +29,51 @@ Example: ```yaml monitoringCommands: - title: My Custom command - command: sfdx my:custom:command + command: sf my:custom:command - title: My Custom command 2 - command: sfdx my:other:custom:command + command: sf my:other:custom:command ``` You can force the daily run of all commands by defining env var `MONITORING_IGNORE_FREQUENCY=true` +The default list of commands is the following: + +| Key | Description | Command | Frequency | +|:--------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------|:---------:| +| [AUDIT_TRAIL](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail) | Detect suspect setup actions in major org | [sf hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail) | daily | +| [LEGACY_API](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/legacyapi) | Detect calls to deprecated API versions | [sf hardis:org:diagnose:legacyapi](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/legacyapi) | daily | +| [ORG_LIMITS](https://sfdx-hardis.cloudity.com/hardis/org/monitor/limits) | Detect if org limits are close to be reached | [sf hardis:org:monitor:limits](https://sfdx-hardis.cloudity.com/hardis/org/monitor/limits) | daily | +| [LICENSES](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/licenses) | Extract licenses information | [sf hardis:org:diagnose:licenses](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/licenses) | weekly | +| [LINT_ACCESS](https://sfdx-hardis.cloudity.com/hardis/lint/access) | Detect custom elements with no access rights defined in permission sets | [sf hardis:lint:access](https://sfdx-hardis.cloudity.com/hardis/lint/access) | weekly | +| [UNUSED_LICENSES](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedlicenses) | Detect permission set licenses that are assigned to users that do not need them | [sf hardis:org:diagnose:unusedlicenses](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedlicenses) | weekly | +| [UNUSED_USERS](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedusers) | Detect active users without recent logins | [sf hardis:org:diagnose:unusedusers](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedusers) | weekly | +| [ACTIVE_USERS](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedusers) | Detect active users with recent logins | [sf hardis:org:diagnose:unusedusers --returnactiveusers](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedusers) | weekly | +| [ORG_INFO](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/instanceupgrade) | Get org info + SF instance info + next major upgrade date | [sf hardis:org:diagnose:instanceupgrade](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/instanceupgrade) | weekly | +| [RELEASE_UPDATES](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/releaseupdates) | Gather warnings about incoming and overdue Release Updates | [sf hardis:org:diagnose:releaseupdates](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/releaseupdates) | weekly | +| [UNUSED_METADATAS](https://sfdx-hardis.cloudity.com/hardis/lint/unusedmetadatas) | Detect custom labels and custom permissions that are not in use | [sf hardis:lint:unusedmetadatas](https://sfdx-hardis.cloudity.com/hardis/lint/unusedmetadatas) | weekly | +| [UNUSED_APEX_CLASSES](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-apex-classes) | Detect unused Apex classes in an org | [sf hardis:org:diagnose:unused-apex-classes](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-apex-classes) | weekly | +| [CONNECTED_APPS](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-connected-apps) | Detect unused Connected Apps in an org | [sf hardis:org:diagnose:unused-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-connected-apps) | weekly | +| [UNSECURED_CONNECTED_APPS](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unsecure-connected-apps) | Detect unsecured OAuth Tokens and related Connected Apps | [sf hardis:org:diagnose:unsecure-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unsecure-connected-apps) | weekly | +| [METADATA_STATUS](https://sfdx-hardis.cloudity.com/hardis/lint/metadatastatus) | Detect inactive metadata | [sf hardis:lint:metadatastatus](https://sfdx-hardis.cloudity.com/hardis/lint/metadatastatus) | weekly | +| [MISSING_ATTRIBUTES](https://sfdx-hardis.cloudity.com/hardis/lint/missingattributes) | Detect missing description on custom field | [sf hardis:lint:missingattributes](https://sfdx-hardis.cloudity.com/hardis/lint/missingattributes) | weekly | + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:monitor:all +$ sf hardis:org:monitor:all ``` diff --git a/docs/hardis/org/monitor/backup.md b/docs/hardis/org/monitor/backup.md index e0037b258..21ff23e11 100644 --- a/docs/hardis/org/monitor/backup.md +++ b/docs/hardis/org/monitor/backup.md @@ -1,34 +1,105 @@ - + # hardis:org:monitor:backup ## Description Retrieve sfdx sources in the context of a monitoring backup - + +The command exists in 2 modes: filtered(default & recommended) and full. + +## Filtered mode (default, better performances) + +Automatically skips metadatas from installed packages with namespace. + You can remove more metadata types from backup, especially in case you have too many metadatas and that provokes a crash, using: - Manual update of `manifest/package-skip-items.xml` config file (then commit & push in the same branch) + - Works with full wildcard (`*`) , named metadata (`Account.Name`) or partial wildcards names (`pi__*` , `*__dlm` , or `prefix*suffix`) + - Environment variable MONITORING_BACKUP_SKIP_METADATA_TYPES (example: `MONITORING_BACKUP_SKIP_METADATA_TYPES=CustomLabel,StaticResource,Translation`): that will be applied to all monitoring branches. +## Full mode + +Activate it with **--full** parameter, or variable MONITORING_BACKUP_MODE_FULL=true + +Ignores filters (namespaces items & manifest/package-skip-items.xml) to retrieve ALL metadatas, including those you might not care about (reports, translations...) + +As we can retrieve only 10000 files by call, the list of all metadatas will be chunked to make multiple calls (and take more time than filtered mode) + +- if you use `--full-apply-filters` , manifest/package-skip-items.xml and MONITORING_BACKUP_SKIP_METADATA_TYPES filters will be applied anyway +- if you use `--exclude-namespaces` , namespaced items will be ignored + +_With those both options, it's like if you are not using --full, but with chunked metadata download_ + +## In CI/CD + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-metadata-backup/) and can output Grafana, Slack and MsTeams Notifications. + +## Troubleshooting + +If you have unknown errors (it happens !), you can investigate using the full command with smaller chunks. + +Example: `sf hardis:org:monitor:backup --full --exclude-namespaces --full-apply-filters --max-by-chunk 500` + +It will allow you the identify the responsible metadata and ignore it using package-skip-items.xml or MONITORING_BACKUP_SKIP_METADATA_TYPES env variable. + +## Documentation + +[Doc generation (including visual flows)](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/) is triggered at the end of the command. + +If you want to also upload HTML Documentation on your Salesforce Org as static resource, use variable **SFDX_HARDIS_DOC_DEPLOY_TO_ORG="true"** + +If you want to also upload HTML Documentation on Cloudflare, use variable **SFDX_HARDIS_DOC_DEPLOY_TO_CLOUDFLARE="true"** + +- If you want to generate the documentation in multiple languages, define variable SFDX_DOC_LANGUAGES (ex: SFDX_DOC_LANGUAGES=en,fr,de) +- You can define one Cloudflare site by language, for example with the following variables: + - CLOUDFLARE_PROJECT_NAME_EN=cloudity-demo-english + - CLOUDFLARE_PROJECT_NAME_FR=cloudity-demo-french + - CLOUDFLARE_PROJECT_NAME_DE=cloudity-demo-german + +If Flow history doc always display a single state, you probably need to update your workflow configuration: + +- on Gitlab: Env variable [`GIT_FETCH_EXTRA_FLAGS: --depth 10000`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.gitlab-ci.yml#L11) +- on GitHub: [`fetch-depth: 0`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.github/workflows/org-monitoring.yml#L58) +- on Azure: [`fetchDepth: "0"`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/azure-pipelines.yml#L39) +- on Bitbucket: [`step: clone: depth: full`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/bitbucket-pipelines.yml#L18) + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------------------|:-------:|:-------------------------------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| exclude-namespaces
-e | boolean | If mode --full is activated, exclude namespaced metadatas | | | | +| flags-dir | option | undefined | | | | +| full | boolean | Dot not take in account filtering using package-skip-items.xml and MONITORING_BACKUP_SKIP_METADATA_TYPES. Efficient but much much slower ! | | | | +| full-apply-filters
-z | boolean | If mode --full is activated, apply filters of manifest/package-skip-items.xml and MONITORING_BACKUP_SKIP_METADATA_TYPES anyway | | | | +| json | boolean | Format output as json. | | | | +| max-by-chunk
-m | option | If mode --full is activated, maximum number of metadatas in a package.xml chunk | 3000 | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skip-doc | boolean | Skip the generation of project documentation at the end of the command | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| start-chunk | option | Use this parameter to troubleshoot a specific chunk. It will be used as the first chunk to retrieve | 1 | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:monitor:backup +$ sf hardis:org:monitor:backup +``` + +```shell +$ sf hardis:org:monitor:backup --full +``` + +```shell +$ sf hardis:org:monitor:backup --full --exclude-namespaces +``` + +```shell +$ sf hardis:org:monitor:backup --full --exclude-namespaces --full-apply-filters ``` diff --git a/docs/hardis/org/monitor/limits.md b/docs/hardis/org/monitor/limits.md index 869a70d2b..ddc32c893 100644 --- a/docs/hardis/org/monitor/limits.md +++ b/docs/hardis/org/monitor/limits.md @@ -1,27 +1,57 @@ - + # hardis:org:monitor:limits ## Description -Check limits of a SF org and send relatednotifications + +## Command Behavior + +**Checks the current usage of various Salesforce org limits and sends notifications if thresholds are exceeded.** + +This command is a critical component of proactive Salesforce org management, helping administrators and developers monitor resource consumption and prevent hitting critical limits that could impact performance or functionality. It provides early warnings when limits are approaching their capacity. + +Key functionalities: + +- **Limit Retrieval:** Fetches a comprehensive list of all Salesforce org limits using the Salesforce CLI. +- **Usage Calculation:** Calculates the percentage of each limit that is currently being used. +- **Threshold-Based Alerting:** Assigns a severity (success, warning, or error) to each limit based on configurable thresholds: + - **Warning:** If usage exceeds 50% (configurable via `LIMIT_THRESHOLD_WARNING` environment variable). + - **Error:** If usage exceeds 75% (configurable via `LIMIT_THRESHOLD_ERROR` environment variable). +- **CSV Report Generation:** Generates a CSV file containing all org limits, their current usage, maximum allowed, and calculated percentage used, along with the assigned severity. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of limits that have exceeded the warning or error thresholds. + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-org-limits/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce CLI Integration:** It executes the `sf org limits list` command to retrieve the current org limits. It parses the JSON output of this command. +- **Data Processing:** It iterates through the retrieved limits, calculates the `used` and `percentUsed` values, and assigns a `severity` (success, warning, error) based on the configured thresholds. +- **Environment Variable Configuration:** Reads `LIMIT_THRESHOLD_WARNING` and `LIMIT_THRESHOLD_ERROR` environment variables to set the warning and error thresholds for limit usage. +- **Report Generation:** It uses `generateCsvFile` to create the CSV report of org limits. +- **Notification Integration:** It integrates with the `NotifProvider` to send notifications, including attachments of the generated CSV report and detailed metrics for each limit, which can be consumed by monitoring dashboards like Grafana. +- **Exit Code Management:** Sets the process exit code to 1 if any limit is in an 'error' state, indicating a critical issue. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| outputfile
-o | option | Force the path and name of output report file. Must end with .csv | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:monitor:limits +$ sf hardis:org:monitor:limits ``` diff --git a/docs/hardis/org/multi-org-query.md b/docs/hardis/org/multi-org-query.md new file mode 100644 index 000000000..18c94cbaf --- /dev/null +++ b/docs/hardis/org/multi-org-query.md @@ -0,0 +1,68 @@ + +# hardis:org:multi-org-query + +## Description + + +**Executes a SOQL query across multiple Salesforce organizations and consolidates the results into a single report.** + +This command is highly valuable for administrators and developers who need to gather consistent data from various Salesforce environments (e.g., sandboxes, production orgs) for reporting, auditing, or comparison purposes. It streamlines the process of querying multiple orgs, eliminating the need to log into each one individually. + +Key functionalities: + +- **Flexible Query Input:** You can provide a custom SOQL query directly using the `--query` flag, or select from a list of predefined query templates (e.g., `active-users`, `all-users`) using the `--query-template` flag. +- **Multiple Org Targeting:** Specify a list of Salesforce org usernames or aliases using the `--target-orgs` flag. If not provided, an interactive menu will allow you to select multiple authenticated orgs. +- **Consolidated Report:** All query results from the different orgs are combined into a single CSV file, making data analysis and comparison straightforward. +- **Authentication Handling:** For CI/CD jobs, ensure that the target orgs are already authenticated using Salesforce CLI. In interactive mode, it will prompt for authentication if an org is not connected. + +**Visual Demo:** + +[![Use in VsCode SFDX Hardis !](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/multi-org-query-demo.gif)](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) + +
+Technical explanations + +The command's technical implementation involves: + +- **Org Authentication and Connection:** It uses `AuthInfo.create` and `Connection.create` to establish connections to each target Salesforce org. It also leverages `makeSureOrgIsConnected` and `promptOrgList` for interactive org selection and authentication checks. +- **SOQL Query Execution (Bulk API):** It executes the specified SOQL query against each connected org using `bulkQuery` for efficient data retrieval, especially for large datasets. +- **Data Aggregation:** It collects the records from each org's query result and adds metadata about the source org (instance URL, alias, username) to each record, enabling easy identification of data origin in the consolidated report. +- **Report Generation:** It uses `generateCsvFile` to create the final CSV report and `generateReportPath` to determine the output file location. +- **Interactive Prompts:** The `prompts` library is used to guide the user through selecting a query template or entering a custom query, and for selecting target orgs if not provided as command-line arguments. +- **Error Handling:** It logs errors for any orgs where the query fails, ensuring that the overall process continues and provides a clear summary of successes and failures. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:--------------------------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | Force the path and name of output report file. Must end with .csv | | | | +| query
-q | option | SOQL Query to run on multiple orgs | | | | +| query-template
-t | option | Use one of predefined SOQL Query templates | | | active-users
all-users | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-orgs
-x | option | List of org usernames or aliases. | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:multi-org-query +``` + +```shell +$ sf hardis:org:multi-org-query --query "SELECT Id,Username FROM User" +``` + +```shell +$ sf hardis:org:multi-org-query --query "SELECT Id,Username FROM User" --target-orgs nico@cloudity.com nico@cloudity.com.preprod nico@cloudity.com.uat +``` + +```shell +$ sf hardis:org:multi-org-query --query-template active-users --target-orgs nico@cloudity.com nico@cloudity.com.preprod nico@cloudity.com.uat +``` + + diff --git a/docs/hardis/org/purge/apexlog.md b/docs/hardis/org/purge/apexlog.md index 9d69e7a7a..d9a0dc85a 100644 --- a/docs/hardis/org/purge/apexlog.md +++ b/docs/hardis/org/purge/apexlog.md @@ -1,31 +1,57 @@ - + # hardis:org:purge:apexlog ## Description -Purge apex logs in selected org + +**Purges Apex debug logs from a Salesforce org.** + +This command provides a quick and efficient way to clear out accumulated Apex debug logs from your Salesforce environment. This is particularly useful for: + +- **Storage Management:** Freeing up valuable data storage space in your Salesforce org. +- **Performance Optimization:** Reducing the overhead associated with large volumes of debug logs. +- **Troubleshooting:** Ensuring that new debug logs are generated cleanly without interference from old, irrelevant logs. + +Key functionalities: + +- **Log Identification:** Queries the `ApexLog` object to identify all existing debug logs. +- **Confirmation Prompt:** Before deletion, it prompts for user confirmation, displaying the number of Apex logs that will be deleted. +- **Bulk Deletion:** Uses the Salesforce Bulk API to efficiently delete a large number of Apex logs. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Query:** It executes a SOQL query (`SELECT Id FROM ApexLog LIMIT 50000`) to retrieve the IDs of Apex logs to be deleted. The limit is set to 50,000 to handle large volumes of logs. +- **CSV Export:** The retrieved log IDs are temporarily exported to a CSV file (`ApexLogsToDelete_*.csv`) in the `./tmp` directory. +- **User Confirmation:** It uses the `prompts` library to ask for user confirmation before proceeding with the deletion, displaying the count of logs to be purged. +- **Bulk API Deletion:** It then uses the Salesforce CLI's `sf data delete bulk` command, pointing to the generated CSV file, to perform the mass deletion of Apex logs. +- **File System Operations:** It uses `fs-extra` to create the temporary directory and manage the CSV file. +- **Error Handling:** Includes error handling for the query and deletion operations. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| prompt
-z | boolean | Prompt for confirmation (true by default, use --no-prompt to skip) | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:-------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| prompt
-z | boolean | Prompt for confirmation (true by default, use --no-prompt to skip) | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:purge:apexlog +$ sf hardis:org:purge:apexlog ``` ```shell -sfdx hardis:org:purge:apexlog --targetusername nicolas.vuillamy@gmail.com +$ sf hardis:org:purge:apexlog --target-org nicolas.vuillamy@gmail.com ``` diff --git a/docs/hardis/org/purge/flow.md b/docs/hardis/org/purge/flow.md index 0ea5a83d0..0a95be203 100644 --- a/docs/hardis/org/purge/flow.md +++ b/docs/hardis/org/purge/flow.md @@ -1,57 +1,63 @@ - + # hardis:org:purge:flow ## Description -Purge Obsolete flow versions to avoid the 50 max versions limit. Filters on Status and Name + +**Purges old or unwanted Flow versions from a Salesforce org, with an option to delete related Flow Interviews.** + +This command helps maintain a clean and performant Salesforce org by removing obsolete Flow versions. Over time, multiple versions of Flows can accumulate, consuming storage and potentially impacting performance. This tool provides a controlled way to clean up these versions. + +Key functionalities: + +- **Targeted Flow Selection:** Allows you to filter Flow versions to delete by name (`--name`) and status (`--status`, e.g., `Obsolete`, `Draft`, `Inactive`). +- **Flow Interview Deletion:** If a Flow version cannot be deleted due to active Flow Interviews, the `--delete-flow-interviews` flag (or interactive prompt) allows you to delete these interviews first, then retry the Flow version deletion. +- **Confirmation Prompt:** In interactive mode, it prompts for confirmation before proceeding with the deletion of Flow versions and Flow Interviews. +- **Partial Success Handling:** The `--allowpurgefailure` flag (default `true`) allows the command to continue even if some deletions fail, reporting the errors. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Tooling API):** It queries the `Flow` object (using the Tooling API) to list Flow versions based on the provided filters (name, status, manageable state). +- **Bulk Deletion (Tooling API):** It uses `bulkDeleteTooling` to perform mass deletions of Flow versions. If deletion fails due to active interviews, it extracts the interview IDs. +- **Flow Interview Management:** If `delete-flow-interviews` is enabled, it queries `FlowInterview` objects, performs bulk deletion of the identified interviews using `bulkDelete`, and then retries the Flow version deletion. +- **Interactive Prompts:** Uses the `prompts` library to interact with the user for selecting Flows, statuses, and confirming deletion actions. +- **Error Reporting:** Logs detailed error messages for failed deletions, including the specific reasons. +- **Command-Line Execution:** Uses `execSfdxJson` to execute Salesforce CLI commands for querying Flow data. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------------------|:-------:|:------------------------------------------------------------------------------------|:------------------------------:|:--------:|:-----------------------------------------------------:| -| allowpurgefailure
-f | boolean | Allows purges to fail without exiting with 1. Use --no-allowpurgefailure to disable | | | | -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| instanceurl
-r | option | URL of org instance | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| name
-n | option | Filter according to Name criteria | | | | -| prompt
-z | boolean | Prompt for confirmation (true by default, use --no-prompt to skip) | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| status
-s | option | Filter according to Status criteria | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------------------|:-------:|:-------------------------------------------------------------------------------------------------------------------------|:----------------------------:|:--------:|:-------:| +| allowpurgefailure
-f | boolean | Allows purges to fail without exiting with 1. Use --no-allowpurgefailure to disable | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| delete-flow-interviews
-w | boolean | If the presence of Flow interviews prevent to delete flows versions, delete them before retrying to delete flow versions | | | | +| flags-dir | option | undefined | | | | +| instanceurl
-r | option | URL of org instance | https://login.salesforce.com | | | +| json | boolean | Format output as json. | | | | +| name
-n | option | Filter according to Name criteria | | | | +| prompt
-z | boolean | Prompt for confirmation (true by default, use --no-prompt to skip) | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| status
-s | option | Filter according to Status criteria | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:purge:flow --no-prompt +$ sf hardis:org:purge:flow ``` ```shell -$ sfdx hardis:org:purge:flow --targetusername nicolas.vuillamy@gmail.com - Found 1 records: - ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS - 30109000000kX7uAAE TestFlow 2 test flowwww Obsolete - Are you sure you want to delete this list of records (y/n)?: y - Successfully deleted record: 30109000000kX7uAAE. - Deleted the following list of records: - ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS - 30109000000kX7uAAE TestFlow 2 test flowwww Obsolete - +$ sf hardis:org:purge:flow --target-org nicolas.vuillamy@gmail.com --no-prompt --delete-flow-interviews ``` ```shell -$ sfdx hardis:org:purge:flow --targetusername nicolas.vuillamy@gmail.com --status "Obsolete,Draft,InvalidDraft --name TestFlow" - Found 4 records: - ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS - 30109000000kX7uAAE TestFlow 2 test flowwww Obsolete - 30109000000kX8EAAU TestFlow 6 test flowwww InvalidDraft - 30109000000kX8AAAU TestFlow 5 test flowwww InvalidDraft - 30109000000kX89AAE TestFlow 4 test flowwww Draft - Are you sure you want to delete this list of records (y/n)?: n - No record deleted - +$ sf hardis:org:purge:flow --target-org nicolas.vuillamy@gmail.com --status "Obsolete,Draft,InvalidDraft" --name TestFlow ``` diff --git a/docs/hardis/org/refresh/after-refresh.md b/docs/hardis/org/refresh/after-refresh.md new file mode 100644 index 000000000..34ccfa51d --- /dev/null +++ b/docs/hardis/org/refresh/after-refresh.md @@ -0,0 +1,73 @@ + +# hardis:org:refresh:after-refresh + +## Description + + +## Command Behavior + +**Restores all previously backed-up Connected Apps (including Consumer Secrets) to a Salesforce org after a sandbox refresh.** + +This command is the second step in the sandbox refresh process. It scans the backup folder created before the refresh, allows selection of which Connected Apps to restore, and automates their deletion and redeployment to the refreshed org, ensuring all credentials and configuration are preserved. + +Key functionalities: + +- **Backup Folder Selection:** Prompts the user to select the correct backup folder for the sandbox instance. +- **Connected App Discovery:** Scans the backup for all Connected App metadata files. +- **User Selection:** Allows interactive or flag-based selection of which Connected Apps to restore. +- **Validation:** Ensures all selected apps exist in the backup and validates user input. +- **Org Cleanup:** Deletes existing Connected Apps from the refreshed org to allow clean redeployment. +- **Deployment:** Deploys the selected Connected Apps (with secrets) to the org. +- **Summary and Reporting:** Provides a summary of restored apps and their status. + +This command is part of [sfdx-hardis Sandbox Refresh](https://sfdx-hardis.cloudity.com/salesforce-sandbox-refresh/) and is designed to be run after a sandbox refresh, using the backup created by the before-refresh command. + +
+Technical explanations + +- **Backup Folder Handling:** Prompts for and validates the backup folder under `scripts/sandbox-refresh/`. +- **Metadata Scanning:** Uses glob patterns to find all `*.connectedApp - meta.xml` files in the backup. +- **Selection Logic:** Supports `--all`, `--name`, and interactive selection of apps to restore. +- **Validation:** Checks that all requested apps exist in the backup and provides clear errors if not. +- **Org Operations:** Deletes existing Connected Apps from the org before redeployment to avoid conflicts. +- **Deployment:** Uses utility functions to deploy Connected Apps and their secrets to the org. +- **Error Handling:** Handles and reports errors at each step, including parsing and deployment issues. + +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:----------------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| all
-a | boolean | If set, all Connected Apps from the local repository will be processed. Takes precedence over --name if both are specified. | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| name
-n | option | Connected App name(s) to process (bypasses selection prompt). For multiple apps, separate with commas (e.g., "App1,App2") | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:refresh:after-refresh +``` + +```shell +$ sf hardis:org:refresh:after-refresh --name "MyConnectedApp" // Process specific app, no selection prompt +``` + +```shell +$ sf hardis:org:refresh:after-refresh --name "App1,App2,App3" // Process multiple apps, no selection prompt +``` + +```shell +$ sf hardis:org:refresh:after-refresh --all // Process all apps, no selection prompt +``` + +```shell +$ sf hardis:org:refresh:after-refresh --target-org myDevOrg +``` + + diff --git a/docs/hardis/org/refresh/before-refresh.md b/docs/hardis/org/refresh/before-refresh.md new file mode 100644 index 000000000..431cbb029 --- /dev/null +++ b/docs/hardis/org/refresh/before-refresh.md @@ -0,0 +1,77 @@ + +# hardis:org:refresh:before-refresh + +## Description + + +## Command Behavior + +**Backs up all Connected Apps, their secrets, certificates, and custom settings from a Salesforce org before a sandbox refresh, enabling full restoration after the refresh.** + +This command is essential for Salesforce sandbox refresh operations where Connected Apps (and their Consumer Secrets), certificates, and custom settings would otherwise be lost. It automates the extraction, secure storage, and (optionally) deletion of Connected Apps, ensuring that all credentials and configuration can be restored post-refresh. + +Key functionalities: + +- **Connected App Discovery:** Lists all Connected Apps in the org, with options to filter by name, process all, or interactively select. +- **User Selection:** Allows interactive or flag-based selection of which Connected Apps to back up. +- **Metadata Retrieval:** Retrieves Connected App metadata and saves it in a dedicated project folder for the sandbox instance. +- **Consumer Secret Extraction:** Attempts to extract Consumer Secrets automatically using browser automation (Puppeteer), or prompts for manual entry if automation fails. +- **Config Persistence:** Stores the list of selected apps in the project config for use during restoration. +- **Optional Deletion:** Can delete the Connected Apps from the org after backup, as required for re-upload after refresh. +- **Certificate Backup:** Retrieves all org certificates and their definitions, saving them for later restoration. +- **Custom Settings Backup:** Lists all custom settings in the org, allows user selection, and exports their data to JSON files for backup. +- **Summary and Reporting:** Provides a summary of actions, including which apps, certificates, and custom settings were saved and whether secrets were captured. + +This command is part of [sfdx-hardis Sandbox Refresh](https://sfdx-hardis.cloudity.com/salesforce-sandbox-refresh/) and is designed to be run before a sandbox refresh. It ensures that all Connected Apps, secrets, certificates, and custom settings are safely stored for later restoration. + +
+Technical explanations + +- **Salesforce CLI Integration:** Uses `sf org list metadata`, `sf project retrieve start`, and other CLI commands to discover and retrieve Connected Apps, certificates, and custom settings. +- **Metadata Handling:** Saves Connected App XML files and certificate files in a dedicated folder under `scripts/sandbox-refresh/`. +- **Consumer Secret Handling:** Uses Puppeteer to automate browser login and extraction of Consumer Secrets, falling back to manual prompts if needed. +- **Custom Settings Handling:** Lists all custom settings, allows user selection, and exports their data using `sf data tree export` to JSON files. +- **Config Management:** Updates `config/.sfdx-hardis.yml` with the list of selected apps for later use. +- **Deletion Logic:** Optionally deletes Connected Apps from the org (required for re-upload after refresh), with user confirmation unless running in CI or with `--delete` flag. +- **Error Handling:** Provides detailed error messages and guidance if retrieval or extraction fails. +- **Reporting:** Sends summary and configuration files to the WebSocket client for reporting and traceability. + +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| all
-a | boolean | If set, all Connected Apps from the org will be processed. Takes precedence over --name if both are specified. | | | | +| delete
-d | boolean | By default, Connected Apps are not deleted from the org after saving. Set this flag to force their deletion so they will be able to be reuploaded again after refreshing the org. | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| name
-n | option | Connected App name(s) to process. For multiple apps, separate with commas (e.g., "App1,App2") | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:org:refresh:before-refresh +``` + +```shell +$ sf hardis:org:refresh:before-refresh --name "MyConnectedApp" +``` + +```shell +$ sf hardis:org:refresh:before-refresh --name "App1,App2,App3" +``` + +```shell +$ sf hardis:org:refresh:before-refresh --all +``` + +```shell +$ sf hardis:org:refresh:before-refresh --delete +``` + + diff --git a/docs/hardis/org/retrieve/packageconfig.md b/docs/hardis/org/retrieve/packageconfig.md index f8e564981..89708c6d4 100644 --- a/docs/hardis/org/retrieve/packageconfig.md +++ b/docs/hardis/org/retrieve/packageconfig.md @@ -1,30 +1,50 @@ - + # hardis:org:retrieve:packageconfig ## Description -Retrieve package configuration from an org + +**Retrieves the installed package configuration from a Salesforce org and optionally updates the local project configuration.** + +This command is useful for maintaining an accurate record of installed packages within your Salesforce project, which is crucial for managing dependencies and ensuring consistent deployments across environments. + +Key functionalities: + +- **Package Listing:** Connects to a specified Salesforce org (or prompts for one if not provided) and retrieves a list of all installed packages. +- **Configuration Update:** Offers the option to update your local project's configuration with the retrieved list of installed packages. This can be beneficial for automating package installations during environment setup or CI/CD processes. + +
+Technical explanations + +The command's technical implementation involves: + +- **Org Connection:** It establishes a connection to the target Salesforce org using the provided or prompted username. +- **Metadata Retrieval:** It utilizes `MetadataUtils.listInstalledPackages` to query the Salesforce org and obtain details about the installed packages. +- **Interactive Prompt:** It uses the `prompts` library to ask the user whether they want to update their local project configuration with the retrieved package list. +- **Configuration Management:** If the user confirms, it calls `managePackageConfig` to update the project's configuration file (likely `.sfdx-hardis.yml`) with the new package information. +- **User Feedback:** Provides clear messages to the user about the success of the package retrieval and configuration update. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:retrieve:packageconfig +$ sf hardis:org:retrieve:packageconfig ``` ```shell -sfdx hardis:org:retrieve:packageconfig -u myOrg +sf hardis:org:retrieve:packageconfig -u myOrg ``` diff --git a/docs/hardis/org/retrieve/sources/analytics.md b/docs/hardis/org/retrieve/sources/analytics.md index 0447cb732..22d9d19a0 100644 --- a/docs/hardis/org/retrieve/sources/analytics.md +++ b/docs/hardis/org/retrieve/sources/analytics.md @@ -1,26 +1,50 @@ - + # hardis:org:retrieve:sources:analytics ## Description -Retrieve all CRM Analytics sources from an org, with workarounds for SFDX bugs + +## Command Behavior + +**Retrieves all CRM Analytics (formerly Tableau CRM or Einstein Analytics) sources from a Salesforce org, including workarounds for known SFDX bugs.** + +This command is designed to extract the complete configuration of your CRM Analytics assets, such as dashboards, dataflows, lenses, and recipes. It's essential for version controlling your Analytics development, migrating assets between environments, or backing up your Analytics configurations. + +Key functionalities: + +- **Comprehensive Retrieval:** Fetches all supported CRM Analytics metadata types. +- **SFDX Bug Workarounds:** Incorporates internal logic to handle common issues or limitations encountered when retrieving CRM Analytics metadata using standard Salesforce CLI commands. +- **Target Org Selection:** Allows you to specify the Salesforce org from which to retrieve the Analytics sources. If not provided, it will prompt for selection. + +
+Technical explanations + +The command's technical implementation involves: + +- **Full Org Manifest Generation:** It first generates a complete `package.xml` for the target org using `buildOrgManifest`. This ensures that all available metadata, including CRM Analytics components, are identified. +- **Analytics Metadata Filtering:** It then filters this comprehensive `package.xml` to include only the CRM Analytics-related metadata types (e.g., `WaveApplication`, `WaveDashboard`, `WaveDataflow`, `WaveLens`, `WaveRecipe`, `WaveXmd`). +- **Filtered `package.xml` Creation:** A new `package.xml` file containing only the filtered CRM Analytics metadata is created temporarily. +- **Salesforce CLI Retrieval:** It executes the `sf project retrieve start` command, using the newly created Analytics-specific `package.xml` to retrieve the sources to your local project. +- **Temporary File Management:** It uses `createTempDir` to manage temporary files and directories created during the process. +- **Interactive Org Selection:** Uses `promptOrgUsernameDefault` to guide the user in selecting the target Salesforce org if not provided via flags. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:retrieve:sources:analytics +$ sf hardis:org:retrieve:sources:analytics ``` diff --git a/docs/hardis/org/retrieve/sources/dx.md b/docs/hardis/org/retrieve/sources/dx.md index b2b7143b7..ea9622ff4 100644 --- a/docs/hardis/org/retrieve/sources/dx.md +++ b/docs/hardis/org/retrieve/sources/dx.md @@ -1,32 +1,64 @@ - + # hardis:org:retrieve:sources:dx ## Description -Retrieve Salesforce DX project from org + +## Command Behavior + +**Retrieves Salesforce metadata from an org and converts it into Salesforce DX (SFDX) source format.** + +This command provides a flexible way to pull metadata from any Salesforce org into your local SFDX project. It's particularly useful for: + +- **Initial Project Setup:** Populating a new SFDX project with existing org metadata. +- **Environment Synchronization:** Bringing changes from a Salesforce org (e.g., a sandbox) into your local development environment. +- **Selective Retrieval:** Allows you to specify which metadata types to retrieve, or to filter out certain types. +- **Org Shape Creation:** Can optionally create an org shape, which is useful for defining the characteristics of scratch orgs. + +Key functionalities: + +- **Metadata Retrieval:** Connects to a target Salesforce org and retrieves metadata based on specified filters. +- **MDAPI to SFDX Conversion:** Converts the retrieved metadata from Metadata API format to SFDX source format. +- **Org Shape Generation (Optional):** If the `--shape` flag is used, it also captures the org's shape and stores installed package information. +- **Temporary File Management:** Uses temporary folders for intermediate steps, ensuring a clean working directory. + +
+Technical explanations + +The command's technical implementation involves: + +- **Temporary Directory Management:** It creates and manages temporary directories (`./tmp`, `mdapipkg`, `sfdx-project`) to stage the retrieved metadata and the converted SFDX sources. +- **`MetadataUtils.retrieveMetadatas`:** This utility is used to connect to the Salesforce org and retrieve metadata in Metadata API format. It supports filtering by metadata types and excluding certain items. +- **SFDX Project Creation:** It executes `sf project generate` to create a new SFDX project structure within a temporary directory. +- **MDAPI to SFDX Conversion:** It then uses `sf project convert mdapi` to convert the retrieved metadata from the MDAPI format to the SFDX source format. +- **File System Operations:** It uses `fs-extra` to copy the converted SFDX sources to the main project folder, while preserving important project files like `.gitignore` and `sfdx-project.json`. +- **Org Shape Handling:** If `--shape` is enabled, it copies the generated `package.xml` and stores information about installed packages using `setConfig`. +- **Error Handling:** Includes robust error handling for Salesforce CLI commands and file system operations. +- **WebSocket Communication:** Uses `WebSocketClient.sendRefreshCommandsMessage` to notify connected VS Code clients about changes to the project. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------------------|:-------:|:-----------------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| filteredmetadatas
-m | option | Comma separated list of Metadatas keys to remove from PackageXml file | | | | -| folder
-f | option | Folder | . | | | -| instanceurl
-r | option | URL of org instance | | | | -| json | boolean | format output as json | | | | -| keepmetadatatypes
-k | option | Comma separated list of metadatas types that will be the only ones to be retrieved | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| shape
-o | boolean | Updates project-scratch-def.json from org shape | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| tempfolder
-t | option | Temporary folder | ./tmp | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:-----------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| filteredmetadatas
-m | option | Comma separated list of Metadatas keys to remove from PackageXml file | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Folder | . | | | +| instanceurl
-r | option | URL of org instance | | | | +| json | boolean | Format output as json. | | | | +| keepmetadatatypes
-k | option | Comma separated list of metadatas types that will be the only ones to be retrieved | | | | +| shape
-s | boolean | Updates project-scratch-def.json from org shape | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| tempfolder
-t | option | Temporary folder | ./tmp | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:retrieve:sources:dx +$ sf hardis:org:retrieve:sources:dx ``` diff --git a/docs/hardis/org/retrieve/sources/dx2.md b/docs/hardis/org/retrieve/sources/dx2.md index 872ffb76e..7630dd6b6 100644 --- a/docs/hardis/org/retrieve/sources/dx2.md +++ b/docs/hardis/org/retrieve/sources/dx2.md @@ -1,28 +1,52 @@ - + # hardis:org:retrieve:sources:dx2 ## Description -Retrieve Salesforce DX project from org + +## Command Behavior + +**Retrieves Salesforce metadata from an org into SFDX source format, offering flexible input options for specifying metadata to retrieve.** + +This command provides an alternative and enhanced way to pull metadata from any Salesforce org into your local SFDX project. It's particularly useful when you need fine-grained control over which metadata components are retrieved, either by providing a custom `package.xml` or by using predefined templates. + +Key functionalities: + +- **`package.xml` Input:** You can specify the path to a `package.xml` file using the `--packagexml` flag, which defines the exact metadata components to retrieve. +- **Template-Based Retrieval:** Use the `--template` flag to leverage predefined `package.xml` templates provided by sfdx-hardis (e.g., `wave` for CRM Analytics metadata), simplifying common retrieval scenarios. +- **Interactive Input:** If neither `--packagexml` nor `--template` is provided, the command will interactively prompt you to select a `package.xml` file or a template. +- **Target Org Selection:** Allows you to specify the Salesforce org from which to retrieve the sources. If not provided, it will prompt for selection. + +
+Technical explanations + +The command's technical implementation involves: + +- **Org Selection:** It uses `promptOrg` to guide the user in selecting the target Salesforce org if not provided via flags. +- **`package.xml` Resolution:** It determines the `package.xml` to use based on the provided flags (`--packagexml` or `--template`). If a template is used, it resolves the path to the corresponding template file within the sfdx-hardis installation. +- **File System Operations:** It checks if the specified `package.xml` file exists. If the file is outside the current project directory, it copies it to a temporary location within the project to ensure proper handling by the Salesforce CLI. +- **Salesforce CLI Retrieval:** It executes the `sf project retrieve start` command, passing the resolved `package.xml` path and the target username to retrieve the sources. +- **User Feedback:** Provides clear messages to the user about the retrieval process and its success. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| packagexml
-x | option | Path to package.xml file | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| template
-t | option | sfdx-hardis package.xml Template name. ex: wave | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| packagexml
-x | option | Path to package.xml file | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| template
-t | option | sfdx-hardis package.xml Template name. ex: wave | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:retrieve:sources:dx2 +$ sf hardis:org:retrieve:sources:dx2 ``` diff --git a/docs/hardis/org/retrieve/sources/metadata.md b/docs/hardis/org/retrieve/sources/metadata.md index 97d1621f0..d80cabb59 100644 --- a/docs/hardis/org/retrieve/sources/metadata.md +++ b/docs/hardis/org/retrieve/sources/metadata.md @@ -1,34 +1,62 @@ - + # hardis:org:retrieve:sources:metadata ## Description -Retrieve Salesforce DX project from org + +## Command Behavior + +**Retrieves Salesforce metadata from an org into a local directory, primarily for backup and monitoring purposes.** + +This command is designed to pull metadata from any Salesforce org, providing a snapshot of its configuration. It's particularly useful in monitoring contexts where you need to track changes in an org's metadata over time. + +Key functionalities: + +- **Metadata Retrieval:** Connects to a target Salesforce org and retrieves metadata based on a specified `package.xml`. +- **Managed Package Filtering:** By default, it filters out metadata from managed packages to reduce the volume of retrieved data. This can be overridden with the `--includemanaged` flag. +- **Monitoring Integration:** Designed to be used within a monitoring CI/CD job, it performs additional post-retrieval actions like running Apex tests and checking for legacy API usage. + +
+Technical explanations + +The command's technical implementation involves: + +- **Git Repository Check:** Ensures the current directory is a Git repository and initializes it if necessary. +- **`MetadataUtils.retrieveMetadatas`:** This utility is the core of the retrieval process. It connects to the Salesforce org, retrieves metadata based on the provided `package.xml` and filtering options (e.g., `filterManagedItems`), and places the retrieved files in a specified folder. +- **File System Operations:** Uses `fs-extra` to manage directories and copy retrieved files to the target folder. +- **Post-Retrieval Actions (for Monitoring Jobs):** If the command detects it's running within a monitoring CI/CD job (`isMonitoringJob()`): + - It updates the `.gitlab-ci.yml` file if `AUTO_UPDATE_GITLAB_CI_YML` is set. + - It converts the retrieved metadata into SFDX format using `sf project convert mdapi`. + - It executes `sf hardis:org:test:apex` to run Apex tests. + - It executes `sf hardis:org:diagnose:legacyapi` to check for legacy API usage. + - It logs warnings if post-actions fail or if the monitoring version is deprecated. +- **Error Handling:** Includes robust error handling for retrieval failures and post-action execution. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Folder | . | | | -| includemanaged | boolean | Include items from managed packages | | | | -| instanceurl
-r | option | URL of org instance | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| packagexml
-p | option | Path to package.xml manifest file | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Folder | . | | | +| includemanaged | boolean | Include items from managed packages | | | | +| instanceurl
-r | option | URL of org instance | | | | +| json | boolean | Format output as json. | | | | +| packagexml
-p | option | Path to package.xml manifest file | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:retrieve:sources:metadata +$ sf hardis:org:retrieve:sources:metadata ``` ```shell -SFDX_RETRIEVE_WAIT_MINUTES=200 sfdx hardis:org:retrieve:sources:metadata +$ SFDX_RETRIEVE_WAIT_MINUTES=200 sf hardis:org:retrieve:sources:metadata ``` diff --git a/docs/hardis/org/retrieve/sources/retrofit.md b/docs/hardis/org/retrieve/sources/retrofit.md index 64a579548..0c3bc0c5d 100644 --- a/docs/hardis/org/retrieve/sources/retrofit.md +++ b/docs/hardis/org/retrieve/sources/retrofit.md @@ -1,4 +1,4 @@ - + # hardis:org:retrieve:sources:retrofit ## Description @@ -9,36 +9,36 @@ Retrieve changes from org link to a ref branch not present in sources Define the following properties in **.sfdx-hardis.yml** -- **productionBranch** : Name of the git branch that is corresponding to production org -- **retrofitBranch** : Name of the git branch that will be used as merge request target + - **productionBranch** : Name of the git branch that is corresponding to production org + - **retrofitBranch** : Name of the git branch that will be used as merge request target List of metadata to retrieve can be set in three way, in order of priority : -- `CI_SOURCES_TO_RETROFIT`: env variable (can be defined in CI context) -- `sourcesToRetrofit` property in `.sfdx-hardis.yml` -- Default list: - - - CompactLayout - - CustomApplication - - CustomField - - CustomLabel - - CustomLabels - - CustomMetadata - - CustomObject - - CustomObjectTranslation - - CustomTab - - DuplicateRule - - EmailTemplate - - FlexiPage - - GlobalValueSet - - Layout - - ListView - - MatchingRules - - PermissionSet - - RecordType - - StandardValueSet - - Translations - - ValidationRule + - `CI_SOURCES_TO_RETROFIT`: env variable (can be defined in CI context) + - `sourcesToRetrofit` property in `.sfdx-hardis.yml` + - Default list: + + - CompactLayout + - CustomApplication + - CustomField + - CustomLabel + - CustomLabels + - CustomMetadata + - CustomObject + - CustomObjectTranslation + - CustomTab + - DuplicateRule + - EmailTemplate + - FlexiPage + - GlobalValueSet + - Layout + - ListView + - MatchingRules + - PermissionSet + - RecordType + - StandardValueSet + - Translations + - ValidationRule You can also ignore some files even if they have been updated in production. To do that, define property **retrofitIgnoredFiles** in .sfdx-hardis.yml @@ -56,14 +56,13 @@ Retrieve changes from org link to a ref branch not present in sources ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:-------------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| commit | boolean | If true, a commit will be performed after the retrofit | | | | -| commitmode | option | Defines if we commit all retrieved updates, or all updates including creations | updated | | updated
all | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:-------------------------------------------------------------------------------|:-------:|:--------:|:---------------:| +| commit | boolean | If true, a commit will be performed after the retrofit | | | | +| commitmode | option | Defines if we commit all retrieved updates, or all updates including creations | updated | | updated
all | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | |productionbranch|option|Name of the git branch corresponding to the org we want to perform the retrofit on. Can be defined in productionBranch property in .sfdx-hardis.yml|||| |push|boolean|If true, a push will be performed after the retrofit|||| @@ -71,21 +70,21 @@ Can be defined in productionBranch property in .sfdx-hardis.yml|||| |retrofittargetbranch|option|Name of branch the merge request will have as target Can be defined in retrofitBranch property in .sfdx-hardis.yml|||| |skipauth|boolean|Skip authentication check when a default username is required|||| -|targetusername
-u|option|username or alias for the target org; overrides default target org|||| +|target-org
-o|option|undefined|||| |websocket|option|Websocket host:port for VsCode SFDX Hardis UI integration|||| ## Examples ```shell -sfdx hardis:org:retrieve:sources:retrofit +$ sf hardis:org:retrieve:sources:retrofit ``` ```shell -sfdx hardis:org:retrieve:sources:retrofit --productionbranch master --commit --commitmode updated +sf hardis:org:retrieve:sources:retrofit --productionbranch master --commit --commitmode updated ``` ```shell -sfdx hardis:org:retrieve:sources:retrofit --productionbranch master --retrofitbranch preprod --commit --commitmode updated --push --pushmode mergerequest +sf hardis:org:retrieve:sources:retrofit --productionbranch master --retrofitbranch preprod --commit --commitmode updated --push --pushmode mergerequest ``` diff --git a/docs/hardis/org/select.md b/docs/hardis/org/select.md index 6d279573b..b54a35f3e 100644 --- a/docs/hardis/org/select.md +++ b/docs/hardis/org/select.md @@ -1,26 +1,51 @@ - + # hardis:org:select ## Description -Interactive org selection for user + +## Command Behavior + +**Allows you to select a Salesforce org and set it as your default, optionally filtering by Dev Hub or scratch orgs.** + +This command simplifies switching between different Salesforce environments. It presents an interactive list of your authenticated orgs, enabling you to quickly set a new default org for subsequent Salesforce CLI commands. + +Key functionalities: + +- **Interactive Org Selection:** Displays a list of your authenticated Salesforce orgs, allowing you to choose one. +- **Default Org Setting:** Sets the selected org as the default for your Salesforce CLI environment. +- **Dev Hub Filtering:** The `--devhub` flag filters the list to show only Dev Hub orgs. +- **Scratch Org Filtering:** The `--scratch` flag filters the list to show only scratch orgs related to your default Dev Hub. +- **Connection Verification:** Ensures that the selected org is connected and prompts for re-authentication if necessary. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Org Prompt:** Uses the `promptOrg` utility to display a list of available Salesforce orgs and allows the user to select one. It passes the `devHub` and `scratch` flags to `promptOrg` to filter the displayed list. +- **Default Org Configuration:** The `promptOrg` utility (internally) handles setting the selected org as the default using Salesforce CLI's configuration mechanisms. +- **Connection Check:** It calls `makeSureOrgIsConnected` to verify the connection status of the selected org and guides the user to re-authenticate if the org is not connected. +- **Salesforce CLI Integration:** It leverages Salesforce CLI's underlying commands for org listing and authentication. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:---------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| devhub
-h | boolean | Also connect associated DevHub | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| scratch
-s | boolean | Select scratch org related to default DevHub | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:---------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| devhub
-h | boolean | Also connect associated DevHub | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| scratch
-s | boolean | Select scratch org related to default DevHub | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:select +$ sf hardis:org:select ``` diff --git a/docs/hardis/org/test/apex.md b/docs/hardis/org/test/apex.md index e2df5da00..a0412e4e6 100644 --- a/docs/hardis/org/test/apex.md +++ b/docs/hardis/org/test/apex.md @@ -1,4 +1,4 @@ - + # hardis:org:test:apex ## Description @@ -10,26 +10,27 @@ If following configuration is defined, it will fail if apex coverage target is n - Env `APEX_TESTS_MIN_COVERAGE_ORG_WIDE` or `.sfdx-hardis` property `apexTestsMinCoverageOrgWide` - Env `APEX_TESTS_MIN_COVERAGE_ORG_WIDE` or `.sfdx-hardis` property `apexTestsMinCoverageOrgWide` -You can override env var SFDX_TEST_WAIT_MINUTES to wait more than 60 minutes +You can override env var SFDX_TEST_WAIT_MINUTES to wait more than 60 minutes. + +This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-apex-tests/) and can output Grafana, Slack and MsTeams Notifications. ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------------:|:--------:|:----------------------------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| testlevel
-l | option | Level of tests to apply to validate deployment | RunLocalTests | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------------:|:--------:|:----------------------------------------------------------------------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| testlevel
-l | option | Level of tests to apply to validate deployment | RunLocalTests | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:test:apex +$ sf hardis:org:test:apex ``` diff --git a/docs/hardis/org/user/activateinvalid.md b/docs/hardis/org/user/activateinvalid.md index 258e1f772..377e51ba8 100644 --- a/docs/hardis/org/user/activateinvalid.md +++ b/docs/hardis/org/user/activateinvalid.md @@ -1,4 +1,4 @@ - + # hardis:org:user:activateinvalid ## Description @@ -14,29 +14,28 @@ See article below ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:-------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| profiles
-p | option | Comma-separated list of profiles names that you want to reactive users assigned to and with a .invalid email | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:-------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| profiles
-p | option | Comma-separated list of profiles names that you want to reactive users assigned to and with a .invalid email | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:user:activateinvalid +$ sf hardis:org:user:activateinvalid ``` ```shell -sfdx hardis:org:user:activateinvalid --targetusername myuser@myorg.com +$ sf hardis:org:user:activateinvalid --target-org my-user@myorg.com ``` ```shell -sfdx hardis:org:user:activateinvalid --profiles 'System Administrator,MyCustomProfile' --targetusername myuser@myorg.com +$ sf hardis:org:user:activateinvalid --profiles 'System Administrator,MyCustomProfile' --target-org my-user@myorg.com ``` diff --git a/docs/hardis/org/user/freeze.md b/docs/hardis/org/user/freeze.md index 316ee2914..8d8777411 100644 --- a/docs/hardis/org/user/freeze.md +++ b/docs/hardis/org/user/freeze.md @@ -1,48 +1,69 @@ - + # hardis:org:user:freeze ## Description -Mass freeze users in org before a maintenance or go live -See user guide in the following article +## Command Behavior - +**Freezes Salesforce user logins, temporarily revoking access for selected users.** + +This command allows administrators to freeze Salesforce user logins. It provides a controlled way to temporarily revoke user access without deactivating the user record itself. This is useful for managing user access during leaves, security incidents, or when a user's access needs to be temporarily suspended. + +Key functionalities: + +- **User Selection:** You can select users to freeze based on their assigned profiles. + - `--includeprofiles`: Freeze users belonging to a comma-separated list of specified profiles. + - `--excludeprofiles`: Freeze users belonging to all profiles *except* those specified in a comma-separated list. + - If no profile flags are provided, an interactive menu will allow you to select profiles. +- **Interactive Confirmation:** In non-CI environments, it prompts for confirmation before freezing the selected users. +- **Bulk Freezing:** Efficiently freezes multiple user logins using Salesforce's Bulk API. +- **Reporting:** Generates CSV and XLSX reports of the users that are about to be frozen. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Bulk API):** It executes SOQL queries against the `User` and `Profile` objects to identify active users based on the provided profile filters. It then queries the `UserLogin` object to find active login sessions for these users. +- **Interactive Prompts:** Uses the `prompts` library to guide the user through profile selection and to confirm the freezing operation. +- **Bulk Update:** It constructs an array of `UserLogin` records with their `Id` and `IsFrozen` set to `true`, then uses `bulkUpdate` to perform the mass update operation on the Salesforce org. +- **Reporting:** It uses `generateReports` to create CSV and XLSX files containing details of the users to be frozen. +- **Logging:** Provides clear messages about the number of users found and the success of the freezing process. +
-[![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://medium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3) ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| excludeprofiles
-e | option | List of profiles that you want to NOT freeze, separated by commas | | | | -| includeprofiles
-p | option | List of profiles that you want to freeze, separated by commas | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| maxuserdisplay
-m | option | Maximum users to display in logs | 100 | | | -| name
-n | option | Filter according to Name criteria | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-----------------------|:-------:|:------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| excludeprofiles
-e | option | List of profiles that you want to NOT freeze, separated by commas | | | | +| flags-dir | option | undefined | | | | +| includeprofiles
-p | option | List of profiles that you want to freeze, separated by commas | | | | +| json | boolean | Format output as json. | | | | +| maxuserdisplay
-m | option | Maximum users to display in logs | 100 | | | +| name
-n | option | Filter according to Name criteria | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:user:freeze +$ sf hardis:org:user:freeze ``` ```shell -sfdx hardis:org:user:freeze --targetusername myuser@myorg.com +$ sf hardis:org:user:freeze --target-org my-user@myorg.com ``` ```shell -sfdx hardis:org:user:freeze --includeprofiles 'Standard' +$ sf hardis:org:user:freeze --includeprofiles 'Standard' ``` ```shell -sfdx hardis:org:user:freeze --excludeprofiles 'System Administrator,Some Other Profile' +$ sf hardis:org:user:freeze --excludeprofiles 'System Administrator,Some Other Profile' ``` diff --git a/docs/hardis/org/user/unfreeze.md b/docs/hardis/org/user/unfreeze.md index b1e8d31a7..fa406e327 100644 --- a/docs/hardis/org/user/unfreeze.md +++ b/docs/hardis/org/user/unfreeze.md @@ -1,48 +1,69 @@ - + # hardis:org:user:unfreeze ## Description -Mass unfreeze users in org after a maintenance or go live -See user guide in the following article +## Command Behavior - +**Unfreezes Salesforce user logins, restoring access for selected users.** + +This command allows administrators to unfreeze Salesforce user logins, reactivating their access to the Salesforce org. This is the counterpart to the `freeze` command and is used to restore access after a temporary suspension. + +Key functionalities: + +- **User Selection:** You can select users to unfreeze based on their assigned profiles. + - `--includeprofiles`: Unfreeze users belonging to a comma-separated list of specified profiles. + - `--excludeprofiles`: Unfreeze users belonging to all profiles *except* those specified in a comma-separated list. + - If no profile flags are provided, an interactive menu will allow you to select profiles. +- **Interactive Confirmation:** In non-CI environments, it prompts for confirmation before unfreezing the selected users. +- **Bulk Unfreezing:** Efficiently unfreezes multiple user logins using Salesforce's Bulk API. +- **Reporting:** Generates CSV and XLSX reports of the users that are about to be unfrozen. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Bulk API):** It executes SOQL queries against the `User` and `Profile` objects to identify active users based on the provided profile filters. It then queries the `UserLogin` object to find frozen login sessions for these users. +- **Interactive Prompts:** Uses the `prompts` library to guide the user through profile selection and to confirm the unfreezing operation. +- **Bulk Update:** It constructs an array of `UserLogin` records with their `Id` and `IsFrozen` set to `false`, then uses `bulkUpdate` to perform the mass update operation on the Salesforce org. +- **Reporting:** It uses `generateReports` to create CSV and XLSX files containing details of the users to be unfrozen. +- **Logging:** Provides clear messages about the number of users found and the success of the unfreezing process. +
-[![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://medium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3) ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| excludeprofiles
-e | option | List of profiles that you want to NOT unfreeze, separated by commas | | | | -| includeprofiles
-p | option | List of profiles that you want to unfreeze, separated by commas | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| maxuserdisplay
-m | option | Maximum users to display in logs | 100 | | | -| name
-n | option | Filter according to Name criteria | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| excludeprofiles
-e | option | List of profiles that you want to NOT unfreeze, separated by commas | | | | +| flags-dir | option | undefined | | | | +| includeprofiles
-p | option | List of profiles that you want to unfreeze, separated by commas | | | | +| json | boolean | Format output as json. | | | | +| maxuserdisplay
-m | option | Maximum users to display in logs | 100 | | | +| name
-n | option | Filter according to Name criteria | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:org:user:unfreeze +$ sf hardis:org:user:unfreeze ``` ```shell -sfdx hardis:org:user:unfreeze --targetusername myuser@myorg.com +$ sf hardis:org:user:unfreeze --target-org my-user@myorg.com ``` ```shell -sfdx hardis:org:user:unfreeze --includeprofiles 'Standard' +$ sf hardis:org:user:unfreeze --includeprofiles 'Standard' ``` ```shell -sfdx hardis:org:user:unfreeze --excludeprofiles 'System Administrator,Some Other Profile' +$ sf hardis:org:user:unfreeze --excludeprofiles 'System Administrator,Some Other Profile' ``` diff --git a/docs/hardis/package/create.md b/docs/hardis/package/create.md index 9aeea5169..00787b58d 100644 --- a/docs/hardis/package/create.md +++ b/docs/hardis/package/create.md @@ -1,26 +1,50 @@ - + # hardis:package:create ## Description -Create a new package + +## Command Behavior + +**Creates a new Salesforce package (either Managed or Unlocked) in your Dev Hub.** + +This command streamlines the process of setting up a new Salesforce package, which is a fundamental step for modularizing your Salesforce metadata and enabling continuous integration and delivery practices. It guides you through defining the package's essential properties. + +Key functionalities: + +- **Interactive Package Definition:** Prompts you for the package name, the path to its source code, and the package type (Managed or Unlocked). +- **Package Type Selection:** + - **Managed Packages:** Ideal for AppExchange solutions, where code is hidden in subscriber orgs. + - **Unlocked Packages:** Suitable for client projects or shared tooling, where code is readable and modifiable in subscriber orgs. +- **Package Creation:** Executes the Salesforce CLI command to create the package in your connected Dev Hub. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Prompts:** Uses the `prompts` library to gather necessary information from the user, such as `packageName`, `packagePath`, and `packageType`. +- **Salesforce CLI Integration:** It constructs and executes the `sf package create` command, passing the user-provided details as arguments. +- **`execSfdxJson`:** This utility is used to execute the Salesforce CLI command and capture its JSON output, which includes the newly created package's ID. +- **User Feedback:** Provides clear messages to the user about the successful creation of the package, including its ID and the associated Dev Hub. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:package:create +$ sf hardis:package:create ``` diff --git a/docs/hardis/package/install.md b/docs/hardis/package/install.md index 5d7706ad2..881a68590 100644 --- a/docs/hardis/package/install.md +++ b/docs/hardis/package/install.md @@ -1,4 +1,4 @@ - + # hardis:package:install ## Description @@ -10,22 +10,21 @@ Assisted menu to propose to update `installedPackages` property in `.sfdx-hardis ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| installationkey
-k | option | installation key for key-protected package (default: null) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| package
-p | option | Package Version Id to install (04t...) | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| installationkey
-k | option | installation key for key-protected package (default: null) | | | | +| json | boolean | Format output as json. | | | | +| package
-p | option | Package Version Id to install (04t...) | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:package:install +$ sf hardis:package:install ``` diff --git a/docs/hardis/package/mergexml.md b/docs/hardis/package/mergexml.md index c0325d662..81682987e 100644 --- a/docs/hardis/package/mergexml.md +++ b/docs/hardis/package/mergexml.md @@ -1,35 +1,63 @@ - + # hardis:package:mergexml ## Description -Select and merge package.xml files + +## Command Behavior + +**Merges multiple Salesforce `package.xml` files into a single, consolidated `package.xml` file.** + +This command is useful for combining metadata definitions from various sources (e.g., different feature branches, separate development efforts) into one comprehensive package.xml, which can then be used for deployments or retrievals. + +Key functionalities: + +- **Flexible Input:** You can specify the `package.xml` files to merge either by: + - Providing a comma-separated list of file paths using the `--packagexmls` flag. + - Specifying a folder and a glob pattern using `--folder` and `--pattern` to automatically discover `package.xml` files. + - If no input is provided, an interactive menu will prompt you to select files from the `manifest` folder. +- **Customizable Output:** You can define the name and path of the resulting merged `package.xml` file using the `--result` flag. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses `glob` to find `package.xml` files based on the provided folder and pattern, or it directly uses the list of files from the `--packagexmls` flag. +- **Interactive Prompts:** If no `package.xml` files are specified, it uses the `prompts` library to allow the user to interactively select files to merge. +- **`appendPackageXmlFilesContent` Utility:** The core merging logic is handled by the `appendPackageXmlFilesContent` utility function. This function reads the content of each input `package.xml` file, combines their metadata types and members, and writes the consolidated content to the specified result file. +- **XML Manipulation:** Internally, `appendPackageXmlFilesContent` parses the XML of each `package.xml`, merges the `` and `` elements, and then rebuilds the XML structure for the output file. +- **File System Operations:** It uses `fs-extra` to ensure the output directory exists and to write the merged `package.xml` file. +- **WebSocket Communication:** It uses `WebSocketClient.requestOpenFile` to open the generated merged `package.xml` file in VS Code for immediate review. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------------|:-------:|:---------------------------------------------------------------------------------------------|:-----------------:|:--------:|:-----------------------------------------------------:| -| folder
-f | option | Root folder | manifest | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| packagexmls
-p | option | Comma separated list of package.xml files to merge. Will be prompted to user if not provided | | | | -| pattern
-x | option | Name criteria to list package.xml files | /**/*package*.xml | | | -| result
-r | option | Result package.xml file name | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------------|:-------:|:---------------------------------------------------------------------------------------------|:-----------------:|:--------:|:-------:| +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | manifest | | | +| json | boolean | Format output as json. | | | | +| packagexmls
-p | option | Comma separated list of package.xml files to merge. Will be prompted to user if not provided | | | | +| pattern
-x | option | Name criteria to list package.xml files | /**/*package*.xml | | | +| result
-r | option | Result package.xml file name | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:package:mergexml +$ sf hardis:package:mergexml ``` ```shell -sfdx hardis:package:mergexml --folder packages --pattern /**/*.xml --result myMergedPackage.xml +$ sf hardis:package:mergexml --folder packages --pattern /**/*.xml --result myMergedPackage.xml ``` ```shell -sfdx hardis:package:mergexml --packagexmls "config/mypackage1.xml,config/mypackage2.xml,config/mypackage3.xml" --result myMergedPackage.xml +$ sf hardis:package:mergexml --packagexmls "config/mypackage1.xml,config/mypackage2.xml,config/mypackage3.xml" --result myMergedPackage.xml ``` diff --git a/docs/hardis/package/version/create.md b/docs/hardis/package/version/create.md index 359e512e3..345d71145 100644 --- a/docs/hardis/package/version/create.md +++ b/docs/hardis/package/version/create.md @@ -1,30 +1,58 @@ - + # hardis:package:version:create ## Description -Create a new version of an unlocked package + +## Command Behavior + +**Creates a new version of a Salesforce package (2GP or Unlocked) in your Dev Hub.** + +This command is a crucial step in the package development lifecycle, allowing you to iterate on your Salesforce functionalities and prepare them for deployment or distribution. It automates the process of creating a new, immutable package version. + +Key functionalities: + +- **Package Selection:** Prompts you to select an existing package from your `sfdx-project.json` file if not specified via the `--package` flag. +- **Installation Key:** Allows you to set an installation key (password) for the package version, protecting it from unauthorized installations. This can be provided via the `--installkey` flag or interactively. +- **Code Coverage:** Automatically includes code coverage checks during package version creation. +- **Post-Creation Actions:** + - **Delete After Creation (`--deleteafter`):** Deletes the newly created package version immediately after its creation. This is useful for testing the package creation process without accumulating unnecessary versions. + - **Install After Creation (`--install`):** Installs the newly created package version on your default Salesforce org. This is convenient for immediate testing or validation. + +
+Technical explanations + +The command's technical implementation involves: + +- **Package Directory Identification:** It identifies the package directory from your `sfdx-project.json` based on the selected package name. +- **Interactive Prompts:** Uses the `prompts` library to guide the user through package selection and installation key input if not provided as command-line arguments. +- **Configuration Persistence:** Stores the `defaultPackageInstallationKey` in your project's configuration (`.sfdx-hardis.yml`) for future use. +- **Salesforce CLI Integration:** It constructs and executes the `sf package version create` command, passing the package ID, installation key, and other flags. +- **`execSfdxJson`:** This utility is used to execute the Salesforce CLI command and capture its JSON output, which includes the `SubscriberPackageVersionId` of the newly created version. +- **Post-Creation Command Execution:** If `--deleteafter` or `--install` flags are set, it executes `sf package version delete` or delegates to `MetadataUtils.installPackagesOnOrg` respectively. +- **Error Handling:** Includes checks for missing package arguments and handles errors during package version creation or post-creation actions. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:--------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| deleteafter | boolean | Delete package version after creating it | | | | -| install
-i | boolean | Install package version on default org after generation | | | | -| installkey
-k | option | Package installation key | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| package
-p | option | Package identifier that you want to use to generate a new package version | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| deleteafter | boolean | Delete package version after creating it | | | | +| flags-dir | option | undefined | | | | +| install
-i | boolean | Install package version on default org after generation | | | | +| installkey
-k | option | Package installation key | | | | +| json | boolean | Format output as json. | | | | +| package
-p | option | Package identifier that you want to use to generate a new package version | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:package:version:create +$ sf hardis:package:version:create ``` diff --git a/docs/hardis/package/version/list.md b/docs/hardis/package/version/list.md index 719b45fb7..0ff13d631 100644 --- a/docs/hardis/package/version/list.md +++ b/docs/hardis/package/version/list.md @@ -1,26 +1,46 @@ - + # hardis:package:version:list ## Description -List versions of unlocked package + +## Command Behavior + +**Lists all Salesforce package versions associated with your Dev Hub.** + +This command provides a comprehensive overview of your Salesforce packages and their versions, including details such as package ID, version number, installation key status, and creation date. It's an essential tool for managing your package development lifecycle, tracking releases, and identifying available versions for installation or promotion. + +Key functionalities: + +- **Comprehensive Listing:** Displays all package versions, regardless of their status (e.g., released, beta). +- **Dev Hub Integration:** Retrieves package version information directly from your connected Dev Hub. + +
+Technical explanations + +The command's technical implementation is straightforward: + +- **Salesforce CLI Integration:** It directly executes the `sf package version list` command. +- **`execCommand`:** This utility is used to run the Salesforce CLI command and capture its output. +- **Output Display:** The raw output from the Salesforce CLI command is displayed to the user, providing all the details about the package versions. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:package:version:list +$ sf hardis:package:version:list ``` diff --git a/docs/hardis/package/version/promote.md b/docs/hardis/package/version/promote.md index a63d73fc4..28fc78b42 100644 --- a/docs/hardis/package/version/promote.md +++ b/docs/hardis/package/version/promote.md @@ -1,31 +1,55 @@ - + # hardis:package:version:promote ## Description -Promote package(s) version(s): convert it from beta to released + +## Command Behavior + +**Promotes a Salesforce package version from beta to released status in your Dev Hub.** + +This command is a critical step in the package development lifecycle, marking a package version as stable and ready for production use. Once promoted, a package version can be installed in production organizations. + +Key functionalities: + +- **Package Version Selection:** Allows you to select a specific package version to promote. If the `--auto` flag is used, it automatically identifies package versions that are not yet released and promotes them. +- **Automated Promotion:** When `--auto` is enabled, it queries for all unreleased package versions and promotes them without further user interaction. +- **Dev Hub Integration:** Interacts with your connected Dev Hub to change the status of the package version. + +
+Technical explanations + +The command's technical implementation involves: + +- **Package Alias Retrieval:** It retrieves package aliases from your `sfdx-project.json` to identify available packages. +- **Automated Promotion Logic:** If `--auto` is used, it executes `sf package version list --released` to get a list of already released packages and then filters the available package aliases to find those that are not yet released. +- **Interactive Prompts:** If not in auto mode, it uses the `prompts` library to allow the user to select a package version to promote. +- **Salesforce CLI Integration:** It constructs and executes the `sf package version promote` command, passing the package version ID. +- **`execSfdxJson`:** This utility is used to execute the Salesforce CLI command and capture its JSON output. +- **Error Handling:** It handles cases where a package version might already be promoted or if other errors occur during the promotion process. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| auto
-d | boolean | Auto-detect which versions of which packages need to be promoted | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:-----------------------------------------------------------------|:-------:|:--------:|:-------:| +| auto
-f | boolean | Auto-detect which versions of which packages need to be promoted | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:package:version:promote +$ sf hardis:package:version:promote ``` ```shell -sfdx hardis:package:version:promote --auto +$ sf hardis:package:version:promote --auto ``` diff --git a/docs/hardis/packagexml/append.md b/docs/hardis/packagexml/append.md new file mode 100644 index 000000000..a7db82258 --- /dev/null +++ b/docs/hardis/packagexml/append.md @@ -0,0 +1,49 @@ + +# hardis:packagexml:append + +## Description + + +## Command Behavior + +**Appends the content of one or more Salesforce `package.xml` files into a single target `package.xml` file.** + +This command is useful for consolidating metadata definitions from various sources into a single manifest. For instance, you might have separate `package.xml` files for different features or metadata types, and this command allows you to combine them into one comprehensive file for deployment or retrieval. + +Key functionalities: + +- **Multiple Input Files:** Takes a comma-separated list of `package.xml` file paths as input. +- **Single Output File:** Merges the content of all input files into a specified output `package.xml` file. +- **Metadata Consolidation:** Combines the `` and `` elements from all input files, ensuring that all unique metadata components are included in the resulting file. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Parsing:** It reads and parses the XML content of each input `package.xml` file. +- **Content Merging:** It iterates through the parsed XML structures, merging the `types` and `members` arrays. If a metadata type exists in multiple input files, its members are combined (duplicates are typically handled by the underlying XML utility). +- **XML Building:** After consolidating the metadata, it rebuilds the XML structure for the output `package.xml` file. +- **File Writing:** The newly constructed XML content is then written to the specified output file. +- **`appendPackageXmlFilesContent` Utility:** The core logic for this operation is encapsulated within the `appendPackageXmlFilesContent` utility function, which handles the parsing, merging, and writing of the `package.xml` files. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------------|:-------:|:---------------------------------------------|:-------:|:--------:|:-------:| +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | package.xml output file | | | | +| packagexmls
-p | option | package.xml files path (separated by commas) | | | | +| websocket | option | websocket | | | | + +## Examples + +```shell +$ sf hardis packagexml append -p package1.xml,package2.xml -o package3.xml +``` + + diff --git a/docs/hardis/packagexml/remove.md b/docs/hardis/packagexml/remove.md new file mode 100644 index 000000000..904e2f0d4 --- /dev/null +++ b/docs/hardis/packagexml/remove.md @@ -0,0 +1,52 @@ + +# hardis:packagexml:remove + +## Description + + +## Command Behavior + +**Removes metadata components from a `package.xml` file that are also present in another `package.xml` file (e.g., a `destructiveChanges.xml`).** + +This command is useful for refining your `package.xml` manifests by excluding components that are being deleted or are otherwise irrelevant for a specific deployment or retrieval. For example, you can use it to create a `package.xml` that only contains additions and modifications, by removing items listed in a `destructiveChanges.xml`. + +Key functionalities: + +- **Source `package.xml`:** The main `package.xml` file from which components will be removed (specified by `--packagexml`). Defaults to `package.xml`. +- **Filter `package.xml`:** The `package.xml` file containing the components to be removed from the source (specified by `--removepackagexml`). Defaults to `destructiveChanges.xml`. +- **Output File:** The path to the new `package.xml` file that will contain the filtered content (specified by `--outputfile`). +- **Removed Only Output:** The `--removedonly` flag allows you to generate a `package.xml` that contains *only* the items that were removed from the source `package.xml`. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Parsing:** It reads and parses the XML content of both the source `package.xml` and the filter `package.xml`. +- **Content Comparison and Filtering:** It compares the metadata types and members defined in both files. Components found in the filter `package.xml` are excluded from the output. +- **XML Building:** After filtering, it rebuilds the XML structure for the new `package.xml` file. +- **File Writing:** The newly constructed XML content is then written to the specified output file. +- **`removePackageXmlFilesContent` Utility:** The core logic for this operation is encapsulated within the `removePackageXmlFilesContent` utility function, which handles the parsing, filtering, and writing of the `package.xml` files. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------------|:-------:|:----------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| outputfile
-f | option | package.xml output file | | | | +| packagexml
-p | option | package.xml file to reduce | | | | +| removedonly
-z | boolean | Use this flag to generate a package.xml with only removed items | | | | +| removepackagexml
-r | option | package.xml file to use to filter input package.xml | | | | +| websocket | option | websocket | | | | + +## Examples + +```shell +$ sf hardis packagexml:remove -p package.xml -r destructiveChanges.xml -o my-reduced-package.xml +``` + + diff --git a/docs/hardis/project/audit/apiversion.md b/docs/hardis/project/audit/apiversion.md index 34634121d..df851eca9 100644 --- a/docs/hardis/project/audit/apiversion.md +++ b/docs/hardis/project/audit/apiversion.md @@ -1,26 +1,50 @@ - + # hardis:project:audit:apiversion ## Description -Audit API version +This command identifies metadata with an apiVersion lower than the value specified in the --minimumapiversion parameter. + + It can also update the apiVersion to a specific value: + - When --fix parameter is provided (updates to minimumapiversion) + - When --newapiversion is specified (updates to that version) + + Example to handle [ApexClass / Trigger & ApexPage mandatory version upgrade](https://help.salesforce.com/s/articleView?id=sf.admin_locales_update_api.htm&type=5) : + + `sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45 --newapiversion 50` + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| failiferror
-f | boolean | Fails (exit code 1) if an error is found | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| minimumapiversion
-m | option | Minimum allowed API version | 20 | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:---------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| failiferror
-f | boolean | Fails (exit code 1) if an error is found | | | | +| fix | boolean | Automatically update API versions in files that are below the minimum version threshold to match the minimum version | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| metadatatype | option | Metadata Types to fix. Comma separated. Supported Metadata types: ApexClass, ApexTrigger, ApexPage | | | | +| minimumapiversion
-m | option | Minimum allowed API version | 20 | | | +| newapiversion
-n | option | Define an API version value to apply when updating files | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:audit:apiversion +$ sf hardis:project:audit:apiversion +``` + +```shell +$ sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45 +``` + +```shell +$ sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45 --fix +``` + +```shell +$ sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45 --newapiversion 50 ``` diff --git a/docs/hardis/project/audit/callincallout.md b/docs/hardis/project/audit/callincallout.md index f05249787..79508d9b2 100644 --- a/docs/hardis/project/audit/callincallout.md +++ b/docs/hardis/project/audit/callincallout.md @@ -1,24 +1,52 @@ - + # hardis:project:audit:callincallout ## Description -Generate list of callIn and callouts from sfdx project + +## Command Behavior + +**Audits Apex classes for inbound (Call-In) and outbound (Call-Out) API calls, providing insights into integration points.** + +This command helps developers and architects understand the integration landscape of their Salesforce project by identifying where Apex code interacts with external systems or exposes functionality for external consumption. It's useful for security reviews, refactoring efforts, and documenting system integrations. + +Key functionalities: + +- **Inbound Call Detection:** Identifies Apex methods exposed as web services (`webservice static`) or REST resources (`@RestResource`). +- **Outbound Call Detection:** Detects HTTP callouts (`new HttpRequest`). +- **Detailed Information:** Extracts relevant details for each detected call, such as endpoint URLs for outbound calls or resource names for inbound calls. +- **Test Class Exclusion:** Automatically skips test classes (`@isTest`) to focus on production code. +- **CSV Report Generation:** Generates a CSV report summarizing all detected call-ins and call-outs, including their type, subtype (protocol), file name, namespace, and extracted details. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses `glob` to find all Apex class (`.cls`) and trigger (`.trigger`) files within the project. +- **Content Analysis:** Reads the content of each Apex file and uses regular expressions to identify patterns indicative of inbound or outbound calls. +- **Pattern Matching:** Defines a set of `catchers`, each with a `type` (INBOUND/OUTBOUND), `subType` (SOAP/REST/HTTP), and `regex` to match specific API call patterns. It also includes `detail` regexes to extract additional information. +- **`catchMatches` Utility:** This utility function is used to apply the defined `catchers` to each Apex file and extract all matching occurrences. +- **Data Structuring:** Organizes the extracted information into a structured format, including the file name, namespace, and detailed matches. +- **Reporting:** Uses `generateReports` to create a CSV report and display a table in the console, summarizing the audit findings. +- **Filtering:** Filters out files that start with 'hidden' or contain `@isTest` to focus on relevant code. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:audit:callouts +$ sf hardis:project:audit:callouts ``` diff --git a/docs/hardis/project/audit/duplicatefiles.md b/docs/hardis/project/audit/duplicatefiles.md index d1f50550a..3ad77bbf3 100644 --- a/docs/hardis/project/audit/duplicatefiles.md +++ b/docs/hardis/project/audit/duplicatefiles.md @@ -1,25 +1,49 @@ - + # hardis:project:audit:duplicatefiles ## Description -Find duplicate files in sfdx folder (often from past @salesforce/cli bugs) + +## Command Behavior + +**Identifies and reports on duplicate file names within your Salesforce DX project folder.** + +This command helps detect instances where files with the same name exist in different directories within your SFDX project. While some duplicates are expected (e.g., metadata files for different components of the same object), others can be a result of past Salesforce CLI bugs or improper source control practices, leading to confusion and potential deployment issues. + +Key functionalities: + +- **File Scan:** Recursively scans a specified root path (defaults to the current working directory) for all files. +- **Duplicate Detection:** Identifies files that share the same name but reside in different locations. +- **Intelligent Filtering:** Accounts for known patterns where duplicate file names are legitimate (e.g., `field-meta.xml`, `listView-meta.xml`, `recordType-meta.xml`, `webLink-meta.xml` files within object subdirectories). +- **Reporting:** Outputs a JSON object detailing the detected duplicates, including the file name and the full paths of its occurrences. + +
+Technical explanations + +The command's technical implementation involves: + +- **File System Traversal:** Uses `fs-readdir-recursive` to list all files within the specified directory, excluding `node_modules`. +- **Duplicate Logic:** Iterates through the list of all files and compares their base names. If two files have the same base name but different full paths, they are considered potential duplicates. +- **Exclusion Logic:** The `checkDoublingAllowed` function contains regular expressions to identify specific file path patterns where duplicate names are acceptable (e.g., `objects/Account/fields/MyField__c.field-meta.xml` and `objects/Contact/fields/MyField__c.field-meta.xml`). This prevents false positives. +- **Data Structuring:** Organizes the results into a JavaScript object where keys are duplicate file names and values are arrays of their full paths. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-----------------------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| path
-p | option | Root path to check | C:\git\pro\sfdx-hardis2 | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-----------------------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| path
-p | option | Root path to check | C:\git\pro\sfdx-hardis2 | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:audit:duplicatefiles +$ sf hardis:project:audit:duplicatefiles ``` diff --git a/docs/hardis/project/audit/remotesites.md b/docs/hardis/project/audit/remotesites.md index 4b128e215..eedb34bc6 100644 --- a/docs/hardis/project/audit/remotesites.md +++ b/docs/hardis/project/audit/remotesites.md @@ -1,24 +1,50 @@ - + # hardis:project:audit:remotesites ## Description -Generate list of remote sites + +## Command Behavior + +**Audits Salesforce Remote Site Settings in your project, providing a comprehensive overview of external endpoints accessed by your Salesforce org.** + +This command is crucial for security reviews, compliance checks, and understanding the external integrations of your Salesforce environment. It helps identify all configured remote sites, their URLs, activity status, and associated protocols. + +Key functionalities: + +- **Remote Site Discovery:** Scans your project for RemoteSiteSetting metadata files (.remoteSite-meta.xml or .remoteSite). +- **URL Extraction:** Extracts the URL, active status, and description for each remote site. +- **Protocol and Domain Identification:** Determines the protocol (HTTP/HTTPS) and extracts the domain from each URL, providing a clearer picture of the external systems being accessed. +- **Reporting:** Generates a CSV report summarizing all detected remote sites, including their protocol, domain, name, URL, active status, and description. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses `glob` to find all RemoteSiteSetting metadata files within the project. +- **Content Analysis:** Reads the content of each XML file and uses regular expressions (/(.*?)<\/url>/gim, /(.*?)<\/isActive>/gim, /(.*?)<\/description>/gim) to extract relevant details. +- **`catchMatches` Utility:** This utility function is used to apply the defined regular expressions to each file and extract all matching occurrences. +- **URL Parsing:** Uses Node.js's `url` module to parse the extracted URLs and `psl` (Public Suffix List) to extract the domain name from the hostname. +- **Data Structuring:** Organizes the extracted information into a structured format, including the remote site's name, file name, namespace, URL, active status, description, protocol, and domain. +- **Reporting:** Uses `generateReports` to create a CSV report and display a table in the console, summarizing the audit findings. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:audit:remotesites +$ sf hardis:project:audit:remotesites ``` diff --git a/docs/hardis/project/clean/emptyitems.md b/docs/hardis/project/clean/emptyitems.md index 2a02eb365..06f3b4b27 100644 --- a/docs/hardis/project/clean/emptyitems.md +++ b/docs/hardis/project/clean/emptyitems.md @@ -1,25 +1,51 @@ - + # hardis:project:clean:emptyitems ## Description -Remove unwanted empty items within sfdx project sources + +## Command Behavior + +**Removes empty or irrelevant metadata items from your Salesforce DX project sources.** + +This command helps maintain a clean and efficient Salesforce codebase by deleting metadata files that are essentially empty or contain no meaningful configuration. These files can sometimes be generated during retrieval processes or remain after refactoring, contributing to unnecessary clutter in your project. + +Key functionalities: + +- **Targeted Cleaning:** Specifically targets and removes empty instances of: + - Global Value Set Translations (`.globalValueSetTranslation-meta.xml`) + - Standard Value Sets (`.standardValueSet-meta.xml`) + - Sharing Rules (`.sharingRules-meta.xml`) +- **Content-Based Deletion:** It checks the XML content of these files for the presence of specific tags (e.g., `valueTranslation` for Global Value Set Translations) to determine if they are truly empty or lack relevant data. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses `glob` to find files matching predefined patterns for Global Value Set Translations, Standard Value Sets, and Sharing Rules within the specified root folder (defaults to `force-app`). +- **XML Parsing:** For each matching file, it reads and parses the XML content using `parseXmlFile`. +- **Content Validation:** It then checks the parsed XML object for the existence of specific nested properties (e.g., `xmlContent.GlobalValueSetTranslation.valueTranslation`). If these properties are missing or empty, the file is considered empty. +- **File Deletion:** If a file is determined to be empty, it is removed from the file system using `fs.remove`. +- **Logging:** Provides clear messages about which files are being removed and a summary of the total number of items cleaned. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:emptyitems +$ sf hardis:project:clean:emptyitems ``` diff --git a/docs/hardis/project/clean/filter-xml-content.md b/docs/hardis/project/clean/filter-xml-content.md new file mode 100644 index 000000000..ef4361b7c --- /dev/null +++ b/docs/hardis/project/clean/filter-xml-content.md @@ -0,0 +1,61 @@ + +# hardis:project:clean:filter-xml-content + +## Description + + +## Command Behavior + +**Filters the content of Salesforce metadata XML files to remove specific elements, enabling more granular deployments.** + +This command addresses a common challenge in Salesforce development: deploying only a subset of metadata from an XML file when the target org might not support all elements or when certain elements are not desired. It allows you to define rules in a JSON configuration file to remove unwanted XML nodes. + +Key functionalities: + +- **Configurable Filtering:** Uses a JSON configuration file (e.g., `filter-config.json`) to define which XML elements to remove. This configuration specifies the XML tags to target and the values within those tags that should trigger removal. +- **Targeted File Processing:** Processes XML files within a specified input folder (defaults to current directory) and writes the filtered content to an output folder. +- **Example Use Cases:** Useful for scenarios like: + - Removing references to features not enabled in the target org. + - Stripping out specific profile permissions or field-level security settings. + - Cleaning up metadata that is not relevant to a particular deployment. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** Reads the `filter-config.json` file, which contains an array of `filters`. Each filter defines a `name`, `description`, `folders` (where to apply the filter), `file_extensions`, and an `exclude_list`. +- **File System Operations:** Copies the input folder to an output folder (if different) to avoid modifying original files directly. It then iterates through the files in the output folder that match the specified file extensions. +- **XML Parsing and Manipulation:** For each matching XML file: + - It uses `xml2js.Parser` to parse the XML content into a JavaScript object. + - It recursively traverses the JavaScript object, applying the `filterElement` function. + - The `filterElement` function checks for `type_tag` and `identifier_tag` defined in the `exclude_list`. If a match is found and the value is in the `excludeDef.values`, the element is removed from the XML structure. + - After filtering, it uses `writeXmlFile` to write the modified JavaScript object back to the XML file. +- **Logging:** Provides detailed logs about the filtering process, including which files are being processed and which elements are being filtered. +- **Summary Reporting:** Tracks and reports on the files that have been updated due to filtering. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:--------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| configfile
-c | option | Config JSON file path | | | | +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| inputfolder
-i | option | Input folder (default: "." ) | | | | +| json | boolean | Format output as json. | | | | +| outputfolder
-f | option | Output folder (default: parentFolder + _xml_content_filtered) | | | | +| websocket | option | websocket | | | | + +## Examples + +```shell +sf hardis:project:clean:filter-xml-content -i "./mdapi_output" +``` + +```shell +sf hardis:project:clean:filter-xml-content -i "retrieveUnpackaged" +``` + + diff --git a/docs/hardis/project/clean/flowpositions.md b/docs/hardis/project/clean/flowpositions.md index 1747bf060..4045e94dd 100644 --- a/docs/hardis/project/clean/flowpositions.md +++ b/docs/hardis/project/clean/flowpositions.md @@ -1,4 +1,4 @@ - + # hardis:project:clean:flowpositions ## Description @@ -34,19 +34,19 @@ autoCleanTypes: ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:flowpositions +$ sf hardis:project:clean:flowpositions ``` diff --git a/docs/hardis/project/clean/hiddenitems.md b/docs/hardis/project/clean/hiddenitems.md index 594463b97..e47909550 100644 --- a/docs/hardis/project/clean/hiddenitems.md +++ b/docs/hardis/project/clean/hiddenitems.md @@ -1,25 +1,51 @@ - + # hardis:project:clean:hiddenitems ## Description -Remove unwanted hidden items within sfdx project sources + +## Command Behavior + +**Removes hidden or temporary metadata items from your Salesforce DX project sources.** + +This command helps clean up your local Salesforce project by deleting files that are marked as hidden or are temporary artifacts. These files can sometimes be generated by Salesforce CLI or other tools and are not intended to be part of your version-controlled source. + +Key functionalities: + +- **Targeted File Scan:** Scans for files with specific extensions (`.app`, `.cmp`, `.evt`, `.tokens`, `.html`, `.css`, `.js`, `.xml`) within the specified root folder (defaults to `force-app`). +- **Hidden Content Detection:** Identifies files whose content starts with (hidden). This is a convention used by some Salesforce tools to mark temporary or internal files. +- **Component Folder Removal:** If a hidden file is part of a Lightning Web Component (LWC) or Aura component folder, the entire component folder is removed to ensure a complete cleanup. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses `glob` to find files matching the specified patterns within the `folder`. +- **Content Reading:** Reads the content of each file. +- **Hidden Marker Check:** Checks if the file content starts with the literal string (hidden). +- **Folder or File Removal:** If a file is identified as hidden: + - If it's within an lwc or aura component folder, the entire component folder is removed using `fs.remove`. + - Otherwise, only the individual file is removed. +- **Logging:** Provides clear messages about which items are being removed and a summary of the total number of hidden items cleaned. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:hiddenitems +$ sf hardis:project:clean:hiddenitems ``` diff --git a/docs/hardis/project/clean/listviews.md b/docs/hardis/project/clean/listviews.md index 6f3f68452..d3f2db988 100644 --- a/docs/hardis/project/clean/listviews.md +++ b/docs/hardis/project/clean/listviews.md @@ -1,4 +1,4 @@ - + # hardis:project:clean:listviews ## Description @@ -7,19 +7,19 @@ Replace Mine by Everything in ListView, and log the replacements in sfdx-hardis. ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:listviews +$ sf hardis:project:clean:listviews ``` diff --git a/docs/hardis/project/clean/manageditems.md b/docs/hardis/project/clean/manageditems.md index 68be00cff..33fef2d17 100644 --- a/docs/hardis/project/clean/manageditems.md +++ b/docs/hardis/project/clean/manageditems.md @@ -1,26 +1,51 @@ - + # hardis:project:clean:manageditems ## Description -Remove unwanted managed items within sfdx project sources + +## Command Behavior + +**Removes unwanted managed package items from your Salesforce DX project sources.** + +This command helps clean up your local Salesforce project by deleting metadata files that belong to a specific managed package namespace. This is particularly useful when you retrieve metadata from an org that contains managed packages, and you only want to keep the unmanaged or custom metadata in your local repository. + +Key functionalities: + +- **Namespace-Based Filtering:** Requires a `--namespace` flag to specify which managed package namespace's files should be removed. +- **Targeted File Deletion:** Scans for files and folders that start with the specified namespace prefix (e.g., `yourNamespace__*`). +- **Intelligent Folder Handling:** Prevents the deletion of managed folders if they contain local custom items. This ensures that if you have custom metadata within a managed package's folder structure, only the managed components are removed, preserving your local customizations. +- **Object Metadata Preservation:** Specifically, it will not remove .object-meta.xml files if there are local custom items defined within that object's folder. + +
+Technical explanations + +The command's technical implementation involves: + +- **Namespace Validation:** Ensures that a namespace is provided, throwing an `SfError` if it's missing. +- **File Discovery:** Uses `glob` to find all files and directories within the specified `folder` (defaults to `force-app`) that match the managed package namespace pattern (`**/${this.namespace}__*`). +- **Folder Content Check:** For identified managed folders, the `folderContainsLocalItems` function is called. This function uses `glob` again to check for the presence of any files within that folder that *do not* start with the managed package namespace, indicating local customizations. +- **Conditional Deletion:** Based on the `folderContainsLocalItems` check, it conditionally removes files and folders using `fs.remove`. If a managed folder contains local items, it is skipped to prevent accidental deletion of custom work. +- **Logging:** Provides clear messages about which managed items are being removed. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-----------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| namespace
-n | option | Namespace to remove | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-----------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| namespace
-n | option | Namespace to remove | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:manageditems --namespace crta +$ sf hardis:project:clean:manageditems --namespace crta ``` diff --git a/docs/hardis/project/clean/minimizeprofiles.md b/docs/hardis/project/clean/minimizeprofiles.md index c0f6c4be6..1635df3bc 100644 --- a/docs/hardis/project/clean/minimizeprofiles.md +++ b/docs/hardis/project/clean/minimizeprofiles.md @@ -1,15 +1,18 @@ - + # hardis:project:clean:minimizeprofiles ## Description -Remove all profile attributes that exist on Permission Sets + +## Command Behavior + +**Removes all profile attributes that exist on Permission Sets** It is a bad practice to define on Profiles elements that can be defined on Permission Sets. Salesforce will deprecate such capability in Spring 26. -Don't wait for that, and use minimizeProfiles cleaning to automatically remove from Profiles any permission that exists on a Permission Set ! +Don't wait for that, and use minimizeProfiles cleaning to automatically remove from Profiles any permission that exists on a Permission Set! The following XML tags are removed automatically: @@ -21,14 +24,14 @@ The following XML tags are removed automatically: - pageAccesses - userPermissions (except on Admin Profile) -You can override this list by defining a property minimizeProfilesNodesToRemove in your .sfdx-hardis.yml config file. +You can override this list by defining a property `minimizeProfilesNodesToRemove` in your `.sfdx-hardis.yml` config file. -You can also skip profiles using property skipMinimizeProfiles +You can also skip profiles using property `skipMinimizeProfiles`. Example: ```yaml -skipMinimizeProfiles +skipMinimizeProfiles: - MyClient Customer Community Login User - MyClientPortail Profile ``` @@ -36,19 +39,19 @@ skipMinimizeProfiles ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:minimizeprofiles +$ sf hardis:project:clean:minimizeprofiles ``` diff --git a/docs/hardis/project/clean/orgmissingitems.md b/docs/hardis/project/clean/orgmissingitems.md index 1cf2cc5c5..6353ffb06 100644 --- a/docs/hardis/project/clean/orgmissingitems.md +++ b/docs/hardis/project/clean/orgmissingitems.md @@ -1,22 +1,52 @@ - + # hardis:project:clean:orgmissingitems ## Description -Clean SFDX sources from items present neither in target org nor local package.xml + +## Command Behavior + +**Cleans Salesforce DX project sources by removing metadata components that are not present in a target Salesforce org or the local `package.xml` file.** + +This command helps maintain a lean and accurate codebase by identifying and removing metadata that is either obsolete in the target org or not explicitly included in your project's `package.xml`. This is particularly useful for: + +- **Reducing Deployment Size:** Eliminating unnecessary metadata reduces the size of deployments, leading to faster deployments and fewer conflicts. +- **Ensuring Consistency:** Synchronizing your local codebase with the actual state of a Salesforce org. +- **Cleaning Up Orphaned Metadata:** Removing components that might have been deleted from the org but still exist in your local project. + +Key features: + +- **Target Org Integration:** Connects to a specified Salesforce org (or prompts for one) to retrieve its metadata manifest. +- **`package.xml` Comparison:** Compares your local project's metadata with the target org's metadata and your local `package.xml` to identify missing items. +- **Report Type Cleaning:** Specifically targets and cleans `reportType-meta.xml` files by removing references to fields or objects that are not present in the target org or your `package.xml`. + +
+Technical explanations + +The command's technical implementation involves several steps: + +- **Org Manifest Generation:** If not provided, it generates a full `package.xml` from the target Salesforce org using `buildOrgManifest`. +- **XML Parsing and Merging:** It parses the generated org manifest and merges it with the local `package.xml` and `destructiveChanges.xml` files to create a comprehensive list of existing and deleted metadata. +- **Metadata Analysis:** It iterates through specific metadata types (currently `reportType-meta.xml` files) within the configured source folder. +- **Field and Object Validation:** For each `reportType-meta.xml` file, it examines the columns and filters out references to custom fields or objects that are not found in the merged `package.xml` content or are marked for destruction. +- **XML Modification:** If changes are detected, it updates the `reportType-meta.xml` file by writing the modified XML content back to the file using `writeXmlFile`. +- **File System Operations:** It uses `fs-extra` for file system operations and `glob` for pattern matching to find relevant metadata files. +- **SOQL Queries:** The `buildOrgManifest` utility (used internally) performs SOQL queries to retrieve metadata information from the Salesforce org. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | |packagexmlfull
-p|option|Path to packagexml used for cleaning. Must contain also standard CustomObject and CustomField elements. If not provided, it will be generated from a remote org|||| -|packagexmltargetorg
-t|option|Target org username or alias to build package.xml (sfdx must be authenticated). +|packagexmltargetorg
-t|option|Target org username or alias to build package.xml (SF CLI must be authenticated). If not provided, will be prompted to the user.|||| |skipauth|boolean|Skip authentication check when a default username is required|||| |websocket|option|Websocket host:port for VsCode SFDX Hardis UI integration|||| @@ -24,7 +54,7 @@ If not provided, will be prompted to the user.|||| ## Examples ```shell -sfdx hardis:project:clean:orgmissingitems +$ sf hardis:project:clean:orgmissingitems ``` diff --git a/docs/hardis/project/clean/references.md b/docs/hardis/project/clean/references.md index 90036650e..948343b78 100644 --- a/docs/hardis/project/clean/references.md +++ b/docs/hardis/project/clean/references.md @@ -1,38 +1,64 @@ - + # hardis:project:clean:references ## Description -Remove unwanted references within sfdx project sources + +## Command Behavior + +**Removes unwanted references and cleans up metadata within your Salesforce DX project sources.** + +This command provides a powerful way to maintain a clean and efficient Salesforce codebase by eliminating unnecessary or problematic metadata. It supports various cleaning types, from removing hardcoded user references in dashboards to minimizing profile attributes. + +Key functionalities include: + +- **Configurable Cleaning Types:** You can specify a particular cleaning type (e.g., +- **JSON/XML Configuration:** Cleaning operations can be driven by a JSON configuration file or a +- **Interactive Selection:** If no cleaning type is specified, the command interactively prompts you to select which references to clean. +- **Persistent Configuration:** You can choose to save your cleaning selections in your project's configuration (`.sfdx-hardis.yml`) so they are automatically applied during future Work Save operations. +- **File Deletion:** Beyond just cleaning XML content, it can also delete related files (e.g., custom field files and their translations when a custom field is marked for deletion). + +
+Technical explanations + +The command's technical implementation involves several steps: + +- **Configuration Loading:** It reads the project's configuration to determine default cleaning types and user preferences. +- **Cleaning Type Processing:** For each selected cleaning type, it either executes a dedicated sub-command (e.g., +- **XML Filtering:** For template-based cleanings, it constructs a temporary JSON configuration file based on predefined templates or user-provided +- **Package.xml Cleanup:** It iterates through +- **Object Property Removal:** The +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------------------------------------------------------------------------------------------------------------------------------:| -| config
-c | option | Path to a JSON config file or a destructiveChanges.xml file | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| type
-t | option | Cleaning type | | | all
caseentitlement
dashboards
datadotcom
destructivechanges
localfields
productrequest
entitlement | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:| +| config
-c | option | Path to a JSON config file or a destructiveChanges.xml file | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| type
-t | option | Cleaning type | | | all
caseentitlement
dashboards
datadotcom
destructivechanges
localfields
productrequest
entitlement
flowPositions
sensitiveMetadatas
minimizeProfiles | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:references +$ sf hardis:project:clean:references ``` ```shell -sfdx hardis:project:clean:references --type all +$ sf hardis:project:clean:references --type all ``` ```shell -sfdx hardis:project:clean:references --config ./cleaning/myconfig.json +$ sf hardis:project:clean:references --config ./cleaning/myconfig.json ``` ```shell -sfdx hardis:project:clean:references --config ./somefolder/myDestructivePackage.xml +$ sf hardis:project:clean:references --config ./somefolder/myDestructivePackage.xml ``` diff --git a/docs/hardis/project/clean/retrievefolders.md b/docs/hardis/project/clean/retrievefolders.md index c69e96fd6..c1b09958f 100644 --- a/docs/hardis/project/clean/retrievefolders.md +++ b/docs/hardis/project/clean/retrievefolders.md @@ -1,26 +1,46 @@ - + # hardis:project:clean:retrievefolders ## Description -Retrieve dashboards, documents and report folders in DX sources. Use -u ORGALIAS + +## Command Behavior + +**Retrieves specific folders of Dashboards, Documents, Email Templates, and Reports from a Salesforce org into your DX project sources.** + +This command is designed to help developers and administrators synchronize their local Salesforce DX project with the latest versions of these folder-based metadata types. It's particularly useful for: + +- **Selective Retrieval:** Instead of retrieving all dashboards or reports, it allows you to retrieve specific folders, which can be more efficient for targeted development or backup. +- **Maintaining Folder Structure:** Ensures that the folder structure of these metadata types is preserved in your local project. + +
+Technical explanations + +The command's technical implementation involves: + +- **Folder Iteration:** It defines a list of folder-based metadata types (`dashboards`, `documents`, `email`, `reports`). +- **File System Check:** For each type, it checks if the corresponding folder exists in `force-app/main/default/`. +- **Recursive Retrieval:** It iterates through subfolders within these main folders. For each subfolder, it constructs and executes a `sf project retrieve start` command. +- **Salesforce CLI Integration:** It uses `sf project retrieve start -m :` to retrieve the content of individual folders. This ensures that only the specified folder and its contents are retrieved. +- **Error Handling:** It includes basic error handling for the `execCommand` calls. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:retrievefolders +$ sf hardis:project:clean:retrievefolders ``` diff --git a/docs/hardis/project/clean/sensitive-metadatas.md b/docs/hardis/project/clean/sensitive-metadatas.md new file mode 100644 index 000000000..ed53c4f4a --- /dev/null +++ b/docs/hardis/project/clean/sensitive-metadatas.md @@ -0,0 +1,38 @@ + +# hardis:project:clean:sensitive-metadatas + +## Description + +Sensitive data like credentials and certificates are not supposed to be stored in Git, to avoid security breaches. + +This command detects the related metadata and replaces their sensitive content by "HIDDEN_BY_SFDX_HARDIS" + +Can be automated at each **hardis:work:save** if **sensitiveMetadatas** is added in .sfdx-hardis.yml **autoCleanTypes** property + +Example in config/.sfdx-hardis.yml: + +```yaml +autoCleanTypes: + - destructivechanges + - sensitiveMetadatas +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:project:clean:sensitive-metadatas +``` + + diff --git a/docs/hardis/project/clean/standarditems.md b/docs/hardis/project/clean/standarditems.md index cb8c16473..0e65002d7 100644 --- a/docs/hardis/project/clean/standarditems.md +++ b/docs/hardis/project/clean/standarditems.md @@ -1,24 +1,50 @@ - + # hardis:project:clean:standarditems ## Description -Remove unwanted standard items within sfdx project sources + +## Command Behavior + +**Removes unwanted standard Salesforce items from your Salesforce DX project sources.** + +This command helps maintain a clean and focused Salesforce codebase by deleting metadata files that represent standard Salesforce objects or fields, especially when they are retrieved but not intended to be managed in your version control system. This is useful for reducing repository size and avoiding conflicts with standard Salesforce metadata. + +Key functionalities: + +- **Standard Object Cleaning:** Scans for standard objects (those without a `__c` suffix) within your `force-app/main/default/objects` folder. +- **Conditional Folder Deletion:** If a standard object folder contains no custom fields (fields with a `__c` suffix), the entire folder and its associated sharing rules (`.sharingRules-meta.xml`) are removed. +- **Standard Field Deletion:** If a standard object folder *does* contain custom fields, only the standard fields within that object are removed, preserving your custom metadata. + +
+Technical explanations + +The command's technical implementation involves: + +- **File System Traversal:** It starts by listing the contents of the `force-app/main/default/objects` directory. +- **Standard Object Identification:** It iterates through each directory within `objects` and identifies standard objects by checking if their name does not contain `__` (the custom object suffix). +- **Custom Field Detection:** For each standard object, it uses `glob` to search for custom fields (`*__*.field-meta.xml`) within its `fields` subdirectory. +- **Conditional Removal:** + - If no custom fields are found, it removes the entire object directory and any corresponding sharing rules file using `fs.remove`. + - If custom fields are found, it then uses `glob` again to find all standard fields (`*.field-meta.xml` without `__`) within the object's `fields` directory and removes only those standard field files. +- **Logging:** Provides clear messages about which folders and files are being removed or kept. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:standarditems +$ sf hardis:project:clean:standarditems ``` diff --git a/docs/hardis/project/clean/systemdebug.md b/docs/hardis/project/clean/systemdebug.md index c8fbd3a2c..ac8a63b77 100644 --- a/docs/hardis/project/clean/systemdebug.md +++ b/docs/hardis/project/clean/systemdebug.md @@ -1,25 +1,54 @@ - + # hardis:project:clean:systemdebug ## Description -Clean System.debug() lines in APEX Code (classes and triggers) + +## Command Behavior + +**Removes or comments out `System.debug()` statements from Apex classes and triggers in your Salesforce DX project.** + +This command helps maintain clean and optimized Apex code by eliminating debug statements that are often left in production code. While `System.debug()` is invaluable during development, it can impact performance and expose sensitive information if left in deployed code. + +Key functionalities: + +- **Targeted File Scan:** Scans all Apex class (.cls) and trigger (.trigger) files within the specified root folder (defaults to `force-app`). +- **Conditional Action:** + - **Comment Out (default):** By default, it comments out `System.debug()` lines by prepending // to them. + - **Delete (`--delete` flag):** If the `--delete` flag is used, it completely removes the lines containing `System.debug()`. +- **Exclusion:** Lines containing `NOPMD` are ignored, allowing developers to intentionally keep specific debug statements. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses `glob` to find all Apex class and trigger files. +- **Content Reading:** Reads the content of each Apex file line by line. +- **Pattern Matching:** Checks each line for the presence of `System.debug` (case-insensitive). +- **Line Modification:** + - If `System.debug` is found and the `--delete` flag is not used, it modifies the line to comment out the debug statement. + - If `System.debug` is found and the `--delete` flag is used, it removes the line entirely. +- **File Writing:** If any changes are made to a file, the modified content is written back to the file using `fs.writeFile`. +- **Logging:** Provides a summary of how many files were cleaned. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-----------------------------------------------------:| -| delete
-d | boolean | Delete lines with System.debug | | | | -| folder
-f | option | Root folder | force-app | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:---------:|:--------:|:-------:| +| delete
-d | boolean | Delete lines with System.debug | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:clean:systemdebug +$ sf hardis:project:clean:systemdebug ``` diff --git a/docs/hardis/project/clean/xml.md b/docs/hardis/project/clean/xml.md index cc79dc73a..98aaff859 100644 --- a/docs/hardis/project/clean/xml.md +++ b/docs/hardis/project/clean/xml.md @@ -1,4 +1,4 @@ - + # hardis:project:clean:xml ## Description @@ -19,26 +19,26 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------------|:-------:|:--------------------------------------------------------------------------------------------------------------------|:-----------------------------------------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| folder
-f | option | Root folder | force-app | | | -| globpattern
-p | option | Glob pattern to find files to clean. Ex: /**/*.flexipage-meta.xml | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| namespace
-n | option | XML Namespace to use | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | -| xpath
-x | option | XPath to use to detect the elements to remove. Ex: //ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')] | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------------|:-------:|:--------------------------------------------------------------------------------------------------------------------|:---------------------------------------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| folder
-f | option | Root folder | force-app | | | +| globpattern
-p | option | Glob pattern to find files to clean. Ex: /**/*.flexipage-meta.xml | | | | +| json | boolean | Format output as json. | | | | +| namespace
-n | option | XML Namespace to use | http://soap.sforce.com/2006/04/metadata | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| xpath
-x | option | XPath to use to detect the elements to remove. Ex: //ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')] | | | | ## Examples ```shell -sfdx hardis:project:clean:xml +$ sf hardis:project:clean:xml ``` ```shell -sfdx hardis:project:clean:xml --globpattern "/**/*.flexipage-meta.xml" --xpath "//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]" +$ sf hardis:project:clean:xml --globpattern "/**/*.flexipage-meta.xml" --xpath "//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]" ``` diff --git a/docs/hardis/project/configure/auth.md b/docs/hardis/project/configure/auth.md index 149a8b220..a6c066184 100644 --- a/docs/hardis/project/configure/auth.md +++ b/docs/hardis/project/configure/auth.md @@ -1,28 +1,61 @@ - + # hardis:project:configure:auth ## Description -Configure authentication from git branch to target org + +## Command Behavior + +**Configures authentication between a Git branch and a target Salesforce org for CI/CD deployments.** + +This command facilitates the setup of automated CI/CD pipelines, enabling seamless deployments from specific Git branches to designated Salesforce orgs. It supports both standard Salesforce orgs and Dev Hub configurations, catering to various enterprise deployment workflows. + +Key functionalities include: + +- **Org Selection/Login:** Guides the user to select an existing Salesforce org or log in to a new one. +- **Git Branch Association:** Allows associating a specific Git branch with the chosen Salesforce org. +- **Merge Target Definition:** Enables defining target Git branches into which the configured branch can merge, ensuring controlled deployment flows. +- **Salesforce Username Configuration:** Prompts for the Salesforce username to be used by the CI server for deployments. +- **SSL Certificate Generation:** Automatically generates an SSL certificate for secure authentication. + +
+Technical explanations + +The command's implementation involves several key technical aspects: + +- **SF CLI Integration:** Utilizes +@salesforce/sf-plugins-core + for command structure and flag parsing. +- **Interactive Prompts:** Employs the +prompts + library for interactive user input, guiding the configuration process. +- **Git Integration:** Interacts with Git to retrieve branch information using +`git().branch(["--list", "-r"])` +. +- **Configuration Management:** Leverages internal utilities (`checkConfig`, `getConfig`, `setConfig`, `setInConfigFile`) to read from and write to project-specific configuration files (e.g., `.sfdx-hardis..yml`). +- **Salesforce CLI Execution:** Executes Salesforce CLI commands programmatically via `execSfdxJson` for org interactions. +- **SSL Certificate Generation:** Calls `generateSSLCertificate` to create necessary SSL certificates for JWT-based authentication. +- **WebSocket Communication:** Uses `WebSocketClient` for potential communication with external tools or processes, such as restarting the command in VS Code. +- **Dependency Check:** Ensures the presence of `openssl` on the system, which is required for SSL certificate generation. + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| devhub
-b | boolean | Configure project DevHub | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| devhub
-b | boolean | Configure project DevHub | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:configure:auth +$ sf hardis:project:configure:auth ``` diff --git a/docs/hardis/project/convert/profilestopermsets.md b/docs/hardis/project/convert/profilestopermsets.md index 90b6e9d6c..d59728fd9 100644 --- a/docs/hardis/project/convert/profilestopermsets.md +++ b/docs/hardis/project/convert/profilestopermsets.md @@ -1,25 +1,48 @@ - + # hardis:project:convert:profilestopermsets ## Description -Creates permission sets from existing profiles, with id PS_PROFILENAME + +## Command Behavior + +**Converts existing Salesforce Profiles into Permission Sets, facilitating a more granular and recommended security model.** + +This command helps in migrating permissions from Profiles to Permission Sets, which is a best practice for managing user access in Salesforce. It creates a new Permission Set for each specified Profile, adopting a naming convention of `PS_PROFILENAME`. + +Key functionalities: + +- **Profile to Permission Set Conversion:** Automatically extracts permissions from a Profile and creates a corresponding Permission Set. +- **Naming Convention:** New Permission Sets are named with a `PS_` prefix followed by the Profile name (e.g., `PS_Standard_User`). +- **Exclusion Filter:** Allows you to exclude specific Profiles from the conversion process using the `--except` flag. + +
+Technical explanations + +The command's technical implementation involves: + +- **External Plugin Integration:** It relies on the `shane-sfdx-plugins` (specifically the `sf shane:profile:convert` command) to perform the actual conversion. +- **File System Scan:** It reads the contents of the `force-app/main/default/profiles` directory to identify all available Profile metadata files. +- **Command Execution:** For each identified Profile (that is not excluded), it constructs and executes the `sf shane:profile:convert` command with the appropriate Profile name and desired Permission Set name. +- **Error Handling:** Includes basic error handling for the external command execution. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| except
-e | option | List of filters | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| except
-e | option | List of filters | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:convert:profilestopermsets +$ sf hardis:project:convert:profilestopermsets ``` diff --git a/docs/hardis/project/create.md b/docs/hardis/project/create.md index 5c5d048e2..f4ae0b605 100644 --- a/docs/hardis/project/create.md +++ b/docs/hardis/project/create.md @@ -1,4 +1,4 @@ - + # hardis:project:create ## Description @@ -7,18 +7,18 @@ Create a new SFDX Project ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:create +$ sf hardis:project:create ``` diff --git a/docs/hardis/project/deploy/notify.md b/docs/hardis/project/deploy/notify.md new file mode 100644 index 000000000..ed9b7da85 --- /dev/null +++ b/docs/hardis/project/deploy/notify.md @@ -0,0 +1,103 @@ + +# hardis:project:deploy:notify + +## Description + +Post notifications related to: + +- **Deployment simulation** _(use with --check-only)_ + +- **Deployment process** _(to call only if your deployment is successful)_ + +### Integrations + +According to the [integrations you configured](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integrations-home/), notifications can contain deployment information and [Flow Visual Git Diff](https://sfdx-hardis.cloudity.com/salesforce-deployment-assistant-home/#flow-visual-git-diff) + + - GitHub, Gitlab, Azure DevOps, Bitbucket comments on Pull Requests (including Flows Visual Git Diff) + + - Slack, Microsoft Teams, Email deployment summary after a successful deployment + + - JIRA tags and comments on tickets that just has been deployed + +![](https://sfdx-hardis.cloudity.com/assets/images/screenshot-jira-gitlab.jpg) + +![](https://sfdx-hardis.cloudity.com/assets/images/screenshot-jira-slack.jpg) + +### Flows Visual Git Diff + +- Visually show you the differences on a diagram + +- Display the update details without having to open any XML ! + +🟩 = added + +🟥 = removed + +🟧 = updated + +![](https://sfdx-hardis.cloudity.com/assets/images/flow-visual-git-diff.jpg) + +![](https://sfdx-hardis.cloudity.com/assets/images/flow-visual-git-diff-2.jpg) + +### In custom CI/CD workflow + +Example of usage in a custom CI/CD pipeline: + +```bash +# Disable exit-on-error temporarily +set +e + +# Run the deploy command +sf project deploy start [....] +RET_CODE=$? + +# Re-enable exit-on-error +set -e + +# Determine MYSTATUS based on return code +if [ $RET_CODE -eq 0 ]; then + MYSTATUS="valid" +else + MYSTATUS="invalid" +fi + +# Run the notify command with MYSTATUS +sf hardis:project:deploy:notify --check-only --deploy-status "$MYSTATUS" +``` + +### Other usages + +This command is for custom SF Cli pipelines, if you are a sfdx-hardis user, it is already embedded in sf hardis:deploy:smart. + +You can also use [sfdx-hardis wrapper commands of SF deployment commands](https://sfdx-hardis.cloudity.com/salesforce-deployment-assistant-setup/#using-custom-cicd-pipeline) + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:---------------------|:-------:|:-----------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------:| +| check-only
-c | boolean | Use this option to send notifications from a Deployment simulation job | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| deploy-status
-s | option | Send success, failure or unknown (default) to indicate if the deployment or deployment simulation is in success or not | unknown | | valid
invalid
unknown | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| message
-m | option | Custom message that you want to be added in notifications (string or markdown format) | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:project:deploy:notify --check-only --deploy-status valid --message "This deployment check is valid\n\nYahooo !!" +``` + +```shell +$ sf hardis:project:deploy:notify --check-only --deploy-status invalid --message "This deployment check has failed !\n\Oh no !!" +``` + +```shell +$ sf hardis:project:deploy:notify --deploy-status valid --message "This deployment has been processed !\n\nYahooo !!" +``` + + diff --git a/docs/hardis/project/deploy/quick.md b/docs/hardis/project/deploy/quick.md new file mode 100644 index 000000000..83eebd85b --- /dev/null +++ b/docs/hardis/project/deploy/quick.md @@ -0,0 +1,70 @@ + +# hardis:project:deploy:quick + +## Description + +sfdx-hardis wrapper for **sf project deploy quick** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_quick_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +```yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:-----------------------|:-------:|:--------:|:-------:| +| --job-id
-i | option | job-id | | | | +| --use-most-recent
-r | boolean | use-most-recent | | | | +| api-version
-a | option | api-version | | | | +| async | boolean | async | | | | +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| target-org
-o | option | undefined | | | | +| tests | option | tests | | | | +| wait
-w | option | wait | 33 | | | + +## Examples + + diff --git a/docs/hardis/project/deploy/simulate.md b/docs/hardis/project/deploy/simulate.md new file mode 100644 index 000000000..7d1cc94fe --- /dev/null +++ b/docs/hardis/project/deploy/simulate.md @@ -0,0 +1,55 @@ + +# hardis:project:deploy:simulate + +## Description + + +## Command Behavior + +**Simulates the deployment of Salesforce metadata to a target org, primarily used by the VS Code Extension for quick validation.** + +This command allows developers to perform a dry run of a metadata deployment without actually committing changes to the Salesforce org. This is incredibly useful for: + +- **Pre-Deployment Validation:** Identifying potential errors, warnings, or conflicts before a full deployment. +- **Troubleshooting:** Quickly testing metadata changes and debugging issues in a safe environment. +- **Local Development:** Validating changes to individual metadata components (e.g., a Permission Set) without needing to run a full CI/CD pipeline. + +Key functionalities: + +- **Source Specification:** Takes a source file or directory (`--source-dir`) containing the metadata to be simulated. +- **Target Org Selection:** Prompts the user to select a Salesforce org for the simulation. This allows for flexible testing across different environments. +- **Dry Run Execution:** Executes the Salesforce CLI's `sf project deploy start --dry-run` command, which performs all validation steps but does not save any changes to the org. + +This command is primarily used by the VS Code Extension to provide immediate feedback to developers. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Org Prompt:** Uses `promptOrgUsernameDefault` to allow the user to select the target Salesforce org for the deployment simulation. +- **Salesforce CLI Integration:** It constructs and executes the `sf project deploy start` command with the `--dry-run` and `--ignore-conflicts` flags. The `--source-dir` and `--target-org` flags are dynamically populated based on user input. +- **`wrapSfdxCoreCommand`:** This utility is used to execute the Salesforce CLI command and capture its output. +- **Connection Variables:** Ensures Salesforce connection variables are set using `setConnectionVariables`. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| source-dir
-f | option | Source file or directory to simulate the deployment | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:project:deploy:simulate --source-dir force-app/defaut/main/permissionset/PS_Admin.permissionset-meta.xml +``` + + diff --git a/docs/hardis/project/deploy/smart.md b/docs/hardis/project/deploy/smart.md new file mode 100644 index 000000000..089d87acf --- /dev/null +++ b/docs/hardis/project/deploy/smart.md @@ -0,0 +1,284 @@ + +# hardis:project:deploy:smart + +## Description + +Smart deploy of SFDX sources to target org, with many useful options. + +In case of errors, [tips to fix them](https://sfdx-hardis.cloudity.com/deployTips/) will be included within the error messages. + +### Quick Deploy + +In case Pull Request comments are configured on the project, Quick Deploy will try to be used (equivalent to button Quick Deploy) + +If you do not want to use QuickDeploy, define variable `SFDX_HARDIS_QUICK_DEPLOY=false` + +- [GitHub Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-github/) +- [Gitlab Merge requests notes config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-gitlab/) +- [Azure Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure/) + +### Delta deployments + +To activate delta deployments, define property `useDeltaDeployment: true` in `config/.sfdx-hardis.yml`. + +This will activate delta deployments only between minor and major branches (major to major remains full deployment mode) + +If you want to force the delta deployment into major orgs (ex: preprod to prod), this is not recommended but you can use env variable ALWAYS_ENABLE_DELTA_DEPLOYMENT=true + +Delta deployment can optionally include some related metadata types even if they were not changed, but deploying them can improve the deployment itself or make validation more robust. For example, it would add CustomObjectTranslation to the delta package if you have changed a Layout. Set `useDeltaDeploymentWithDependencies: true` or use the environment variable `USE_DELTA_DEPLOYMENT_WITH_DEPENDENCIES=true` to activate this feature. + +### Smart Deployments Tests + +Not all metadata updates can break test classes, use Smart Deployment Tests to skip running test classes if ALL the following conditions are met: + +- Delta deployment is activated and applicable to the source and target branches +- Delta deployed metadatas are all matching the list of **NOT_IMPACTING_METADATA_TYPES** (see below) +- Target org is not a production org + +Activate Smart Deployment tests with: + +- env variable `USE_SMART_DEPLOYMENT_TESTS=true` +- .sfdx-hardis.yml config property `useSmartDeploymentTests: true` + +Defaut list for **NOT_IMPACTING_METADATA_TYPES** (can be overridden with comma-separated list on env var NOT_IMPACTING_METADATA_TYPES) + +- Audience +- AuraDefinitionBundle +- Bot +- BotVersion +- ContentAsset +- CustomObjectTranslation +- CustomSite +- CustomTab +- Dashboard +- ExperienceBundle +- Flexipage +- GlobalValueSetTranslation +- Layout +- LightningComponentBundle +- NavigationMenu +- ReportType +- Report +- SiteDotCom +- StandardValueSetTranslation +- StaticResource +- Translations + +Note: if you want to disable Smart test classes for a PR, add **nosmart** in the text of the latest commit. + +### Dynamic deployment items / Overwrite management + +If necessary,you can define the following files (that supports wildcards *): + +- `manifest/package-no-overwrite.xml`: Every element defined in this file will be deployed only if it is not existing yet in the target org (can be useful with ListView for example, if the client wants to update them directly in production org). + - Can be overridden for a branch using .sfdx-hardis.yml property **packageNoOverwritePath** or environment variable PACKAGE_NO_OVERWRITE_PATH (for example, define: `packageNoOverwritePath: manifest/package-no-overwrite-main.xml` in config file `config/.sfdx-hardis.main.yml`) +- `manifest/packageXmlOnChange.xml`: Every element defined in this file will not be deployed if it already has a similar definition in target org (can be useful for SharingRules for example) + +See [Overwrite management documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-overwrite/) + +### Packages installation + +You can define a list of package to install during deployments using property `installedPackages` + +- If `INSTALL_PACKAGES_DURING_CHECK_DEPLOY` is defined as `true` (or `installPackagesDuringCheckDeploy: true` in `.sfdx-hardis.yml`), packages will be installed even if the command is called with `--check` mode +- You can automatically update this property by listing all packages installed on an org using command `sf hardis:org:retrieve:packageconfig` + +Example: + +```yaml +installedPackages: + - Id: 0A35r0000009EtECAU + SubscriberPackageId: 033i0000000LVMYAA4 + SubscriberPackageName: Marketing Cloud + SubscriberPackageNamespace: et4ae5 + SubscriberPackageVersionId: 04t6S000000l11iQAA + SubscriberPackageVersionName: Marketing Cloud + SubscriberPackageVersionNumber: 236.0.0.2 + installOnScratchOrgs: true // true or false depending you want to install this package when creating a new scratch org + installDuringDeployments: true // set as true to install package during a deployment using sf hardis:project:deploy:smart + installationkey: xxxxxxxxxxxxxxxxxxxx // if the package has a password, write it in this property + - Id: 0A35r0000009F9CCAU + SubscriberPackageId: 033b0000000Pf2AAAS + SubscriberPackageName: Declarative Lookup Rollup Summaries Tool + SubscriberPackageNamespace: dlrs + SubscriberPackageVersionId: 04t5p000001BmLvAAK + SubscriberPackageVersionName: Release + SubscriberPackageVersionNumber: 2.15.0.9 + installOnScratchOrgs: true + installDuringDeployments: true +``` + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +```yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + +### Pull Requests Custom Behaviors + +If some words are found **in the Pull Request description**, special behaviors will be applied + +| Word | Behavior | +|:-------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| NO_DELTA | Even if delta deployments are activated, a deployment in mode **full** will be performed for this Pull Request | +| PURGE_FLOW_VERSIONS | After deployment, inactive and obsolete Flow Versions will be deleted (equivalent to command sf hardis:org:purge:flow)
**Caution: This will also purge active Flow Interviews !** | +| DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT | If a file manifest/destructiveChanges.xml is found, it will be executed in a separate step, after the deployment of the main package | + +> For example, define `PURGE_FLOW_VERSIONS` and `DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT` in your Pull Request comments if you want to delete fields that are used in an active flow. + +Note: it is also possible to define these behaviors as ENV variables: + +- For all deployments (example: `PURGE_FLOW_VERSIONS=true`) +- For a specific branch, by appending the target branch name (example: `PURGE_FLOW_VERSIONS_UAT=true`) + +### Deployment plan (deprecated) + +If you need to deploy in multiple steps, you can define a property `deploymentPlan` in `.sfdx-hardis.yml`. + +- If a file `manifest/package.xml` is found, it will be placed with order 0 in the deployment plan + +- If a file `manifest/destructiveChanges.xml` is found, it will be executed as --postdestructivechanges + +- If env var `SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES` is defined as `false` , split of package.xml will be applied + +Example: + +```yaml +deploymentPlan: + packages: + - label: Deploy Flow-Workflow + packageXmlFile: manifest/splits/packageXmlFlowWorkflow.xml + order: 6 + - label: Deploy SharingRules - Case + packageXmlFile: manifest/splits/packageXmlSharingRulesCase.xml + order: 30 + waitAfter: 30 +``` + +### Automated fixes post deployments + +#### List view with scope Mine + +If you defined a property **listViewsToSetToMine** in your .sfdx-hardis.yml, related ListViews will be set to Mine ( see command ) + +Example: + +```yaml +listViewsToSetToMine: + - "Operation__c:MyCurrentOperations" + - "Operation__c:MyFinalizedOperations" + - "Opportunity:Default_Opportunity_Pipeline" + - "Opportunity:MyCurrentSubscriptions" + - "Opportunity:MySubscriptions" + - "Account:MyActivePartners" +``` + +Troubleshooting: if you need to fix ListViews with mine from an alpine-linux based docker image, use this workaround in your dockerfile: + +```dockerfile +# Do not use puppeteer embedded chromium +RUN apk add --update --no-cache chromium +ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" +ENV CHROMIUM_PATH="/usr/bin/chromium-browser" +ENV PUPPETEER_EXECUTABLE_PATH="$\{CHROMIUM_PATH}" // remove \ before { +``` + +If you need to increase the deployment waiting time (sf project deploy start --wait arg), you can define env variable SFDX_DEPLOY_WAIT_MINUTES (default: 120) + +If you need notifications to be sent using the current Pull Request and not the one just merged ([see use case](https://github.com/hardisgroupcom/sfdx-hardis/issues/637#issuecomment-2230798904)), define env variable SFDX_HARDIS_DEPLOY_BEFORE_MERGE=true + +If you want to disable the calculation and display of Flow Visual Git Diff in Pull Request comments, define variable **SFDX_DISABLE_FLOW_DIFF=true** + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| check
-c | boolean | Only checks the deployment, there is no impact on target org | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| delta | boolean | Applies sfdx-git-delta to package.xml before other deployment processes | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| packagexml
-p | option | Path to package.xml containing what you want to deploy in target org | | | | +|runtests
-r|option|If testlevel=RunSpecifiedTests, please provide a list of classes. +If testlevel=RunRepositoryTests, can contain a regular expression to keep only class names matching it. If not set, will run all test classes found in the repo.|||| +|skipauth|boolean|Skip authentication check when a default username is required|||| +|target-org
-o|option|undefined|||| +|testlevel
-l|option|Level of tests to validate deployment. RunRepositoryTests auto-detect and run all repository test classes|||NoTestRun
RunSpecifiedTests
RunRepositoryTests
RunRepositoryTestsExceptSeeAllData
RunLocalTests
RunAllTestsInOrg| +|websocket|option|Websocket host:port for VsCode SFDX Hardis UI integration|||| + +## Examples + +```shell +$ sf hardis:project:deploy:smart +``` + +```shell +$ sf hardis:project:deploy:smart --check +``` + +```shell +$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTests +``` + +```shell +$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTests --runtests '^(?!FLI|MyPrefix).*' +``` + +```shell +$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTestsExceptSeeAllData +``` + +```shell +$ sf hardis:project:deploy:smart +``` + +```shell +$ FORCE_TARGET_BRANCH=preprod NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org nicolas.vuillamy@myclient.com.preprod +``` + +```shell +$ SYSTEM_ACCESSTOKEN=xxxxxx SYSTEM_COLLECTIONURI=https://dev.azure.com/xxxxxxx/ SYSTEM_TEAMPROJECT="xxxxxxx" BUILD_REPOSITORY_ID=xxxxx SYSTEM_PULLREQUEST_PULLREQUESTID=1418 FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my.salesforce@org.com +``` + +```shell +$ CI_SFDX_HARDIS_BITBUCKET_TOKEN=xxxxxx BITBUCKET_WORKSPACE=sfdxhardis-demo BITBUCKET_REPO_SLUG=test BITBUCKET_BUILD_NUMBER=1 BITBUCKET_BRANCH=uat BITBUCKET_PR_ID=2 FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my-salesforce-org@client.com +``` + +```shell +$ GITHUB_TOKEN=xxxx GITHUB_REPOSITORY=my-user/my-repo FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my-salesforce-org@client.com +``` + + diff --git a/docs/hardis/project/deploy/sources/dx.md b/docs/hardis/project/deploy/sources/dx.md index e6e36596e..f6b1018d9 100644 --- a/docs/hardis/project/deploy/sources/dx.md +++ b/docs/hardis/project/deploy/sources/dx.md @@ -1,9 +1,9 @@ - + # hardis:project:deploy:sources:dx ## Description -Deploy SFDX source to org, following deploymentPlan in .sfdx-hardis.yml +Smart deploy of SFDX sources to target org, with many useful options. In case of errors, [tips to fix them](https://sfdx-hardis.cloudity.com/deployTips/) will be included within the error messages. @@ -25,45 +25,61 @@ This will activate delta deployments only between minor and major branches (majo If you want to force the delta deployment into major orgs (ex: preprod to prod), this is not recommended but you can use env variable ALWAYS_ENABLE_DELTA_DEPLOYMENT=true +### Smart Deployments Tests + +Not all metadata updates can break test classes, use Smart Deployment Tests to skip running test classes if ALL the following conditions are met: + +- Delta deployment is activated and applicable to the source and target branches +- Delta deployed metadatas are all matching the list of **NOT_IMPACTING_METADATA_TYPES** (see below) +- Target org is not a production org + +Activate Smart Deployment tests with: + +- env variable `USE_SMART_DEPLOYMENT_TESTS=true` +- .sfdx-hardis.yml config property `useSmartDeploymentTests: true` + +Defaut list for **NOT_IMPACTING_METADATA_TYPES** (can be overridden with comma-separated list on env var NOT_IMPACTING_METADATA_TYPES) + +- Audience +- AuraDefinitionBundle +- Bot +- BotVersion +- ContentAsset +- CustomObjectTranslation +- CustomSite +- CustomTab +- Dashboard +- ExperienceBundle +- Flexipage +- GlobalValueSetTranslation +- Layout +- LightningComponentBundle +- NavigationMenu +- ReportType +- Report +- SiteDotCom +- StandardValueSetTranslation +- StaticResource +- Translations + +Note: if you want to disable Smart test classes for a PR, add **nosmart** in the text of the latest commit. + ### Dynamic deployment items / Overwrite management If necessary,you can define the following files (that supports wildcards *): -- `manifest/package-no-overwrite.xml`: Every element defined in this file will be deployed only if it is not existing yet in the target org (can be useful with ListView for example, if the client wants to update them directly in production org) +- `manifest/package-no-overwrite.xml`: Every element defined in this file will be deployed only if it is not existing yet in the target org (can be useful with ListView for example, if the client wants to update them directly in production org). + - Can be overridden for a branch using .sfdx-hardis.yml property **packageNoOverwritePath** or environment variable PACKAGE_NO_OVERWRITE_PATH (for example, define: `packageNoOverwritePath: manifest/package-no-overwrite-main.xml` in config file `config/.sfdx-hardis.main.yml`) - `manifest/packageXmlOnChange.xml`: Every element defined in this file will not be deployed if it already has a similar definition in target org (can be useful for SharingRules for example) See [Overwrite management documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-overwrite/) -### Deployment plan - -If you need to deploy in multiple steps, you can define a property `deploymentPlan` in `.sfdx-hardis.yml`. - -- If a file `manifest/package.xml` is found, it will be placed with order 0 in the deployment plan - -- If a file `manifest/destructiveChanges.xml` is found, it will be executed as --postdestructivechanges - -- If env var `SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES` is defined as `false` , split of package.xml will be applied - -Example: - -```yaml -deploymentPlan: - packages: - - label: Deploy Flow-Workflow - packageXmlFile: manifest/splits/packageXmlFlowWorkflow.xml - order: 6 - - label: Deploy SharingRules - Case - packageXmlFile: manifest/splits/packageXmlSharingRulesCase.xml - order: 30 - waitAfter: 30 -``` - ### Packages installation You can define a list of package to install during deployments using property `installedPackages` - If `INSTALL_PACKAGES_DURING_CHECK_DEPLOY` is defined as `true` (or `installPackagesDuringCheckDeploy: true` in `.sfdx-hardis.yml`), packages will be installed even if the command is called with `--check` mode -- You can automatically update this property by listing all packages installed on an org using command `sfdx hardis:org:retrieve:packageconfig` +- You can automatically update this property by listing all packages installed on an org using command `sf hardis:org:retrieve:packageconfig` Example: @@ -77,7 +93,7 @@ installedPackages: SubscriberPackageVersionName: Marketing Cloud SubscriberPackageVersionNumber: 236.0.0.2 installOnScratchOrgs: true // true or false depending you want to install this package when creating a new scratch org - installDuringDeployments: true // set as true to install package during a deployment using sfdx hardis:project:deploy:sources:dx + installDuringDeployments: true // set as true to install package during a deployment using sf hardis:project:deploy:smart installationkey: xxxxxxxxxxxxxxxxxxxx // if the package has a password, write it in this property - Id: 0A35r0000009F9CCAU SubscriberPackageId: 033b0000000Pf2AAAS @@ -92,7 +108,13 @@ installedPackages: ### Deployment pre or post commands -You can define command lines to run before or after a deployment +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** @@ -106,6 +128,7 @@ commandsPreDeploy: - id: knowledgeAssign label: Assign Knowledge user to the deployment user command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + commandsPostDeploy: - id: knowledgeUnassign label: Remove KnowledgeUser right to the user who has it @@ -113,6 +136,53 @@ commandsPostDeploy: - id: knowledgeAssign label: Assign Knowledge user to desired username command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + +### Pull Requests Custom Behaviors + +If some words are found **in the Pull Request description**, special behaviors will be applied + +| Word | Behavior | +|:-------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| NO_DELTA | Even if delta deployments are activated, a deployment in mode **full** will be performed for this Pull Request | +| PURGE_FLOW_VERSIONS | After deployment, inactive and obsolete Flow Versions will be deleted (equivalent to command sf hardis:org:purge:flow)
**Caution: This will also purge active Flow Interviews !** | +| DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT | If a file manifest/destructiveChanges.xml is found, it will be executed in a separate step, after the deployment of the main package | + +> For example, define `PURGE_FLOW_VERSIONS` and `DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT` in your Pull Request comments if you want to delete fields that are used in an active flow. + +Note: it is also possible to define these behaviors as ENV variables: + +- For all deployments (example: `PURGE_FLOW_VERSIONS=true`) +- For a specific branch, by appending the target branch name (example: `PURGE_FLOW_VERSIONS_UAT=true`) + +### Deployment plan (deprecated) + +If you need to deploy in multiple steps, you can define a property `deploymentPlan` in `.sfdx-hardis.yml`. + +- If a file `manifest/package.xml` is found, it will be placed with order 0 in the deployment plan + +- If a file `manifest/destructiveChanges.xml` is found, it will be executed as --postdestructivechanges + +- If env var `SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES` is defined as `false` , split of package.xml will be applied + +Example: + +```yaml +deploymentPlan: + packages: + - label: Deploy Flow-Workflow + packageXmlFile: manifest/splits/packageXmlFlowWorkflow.xml + order: 6 + - label: Deploy SharingRules - Case + packageXmlFile: manifest/splits/packageXmlSharingRulesCase.xml + order: 30 + waitAfter: 30 ``` ### Automated fixes post deployments @@ -143,34 +213,70 @@ ENV CHROMIUM_PATH="/usr/bin/chromium-browser" ENV PUPPETEER_EXECUTABLE_PATH="$\{CHROMIUM_PATH}" // remove \ before { ``` -If you need to increase the deployment waiting time (force:source:deploy --wait arg), you can define env var SFDX_DEPLOY_WAIT_MINUTES - +If you need to increase the deployment waiting time (sf project deploy start --wait arg), you can define env variable SFDX_DEPLOY_WAIT_MINUTES (default: 120) + +If you need notifications to be sent using the current Pull Request and not the one just merged ([see use case](https://github.com/hardisgroupcom/sfdx-hardis/issues/637#issuecomment-2230798904)), define env variable SFDX_HARDIS_DEPLOY_BEFORE_MERGE=true + +If you want to disable the calculation and display of Flow Visual Git Diff in Pull Request comments, define variable **SFDX_DISABLE_FLOW_DIFF=true** + ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:----------------------------------------------------------------------------------------------------------|:-------------:|:--------:|:---------------------------------------------------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| check
-c | boolean | Only checks the deployment, there is no impact on target org | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| delta | boolean | Applies sfdx-git-delta to package.xml before other deployment processes | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| packagexml
-p | option | Path to package.xml containing what you want to deploy in target org | | | | -| runtests
-r | option | Apex test classes to run if --testlevel is RunSpecifiedTests | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| testlevel
-l | option | Level of tests to validate deployment. RunRepositoryTests auto-detect and run all repository test classes | RunLocalTests | | NoTestRun
RunSpecifiedTests
RunRepositoryTests
RunLocalTests
RunAllTestsInOrg | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| check
-c | boolean | Only checks the deployment, there is no impact on target org | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| delta | boolean | Applies sfdx-git-delta to package.xml before other deployment processes | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| packagexml
-p | option | Path to package.xml containing what you want to deploy in target org | | | | +|runtests
-r|option|If testlevel=RunSpecifiedTests, please provide a list of classes. +If testlevel=RunRepositoryTests, can contain a regular expression to keep only class names matching it. If not set, will run all test classes found in the repo.|||| +|skipauth|boolean|Skip authentication check when a default username is required|||| +|target-org
-o|option|undefined|||| +|testlevel
-l|option|Level of tests to validate deployment. RunRepositoryTests auto-detect and run all repository test classes|||NoTestRun
RunSpecifiedTests
RunRepositoryTests
RunRepositoryTestsExceptSeeAllData
RunLocalTests
RunAllTestsInOrg| +|websocket|option|Websocket host:port for VsCode SFDX Hardis UI integration|||| ## Examples ```shell -sfdx hardis:project:deploy:sources:dx +$ sf hardis:project:deploy:smart +``` + +```shell +$ sf hardis:project:deploy:smart --check +``` + +```shell +$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTests +``` + +```shell +$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTests --runtests '^(?!FLI|MyPrefix).*' +``` + +```shell +$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTestsExceptSeeAllData +``` + +```shell +$ sf hardis:project:deploy:smart +``` + +```shell +$ FORCE_TARGET_BRANCH=preprod NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org nicolas.vuillamy@myclient.com.preprod +``` + +```shell +$ SYSTEM_ACCESSTOKEN=xxxxxx SYSTEM_COLLECTIONURI=https://dev.azure.com/xxxxxxx/ SYSTEM_TEAMPROJECT="xxxxxxx" BUILD_REPOSITORY_ID=xxxxx SYSTEM_PULLREQUEST_PULLREQUESTID=1418 FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my.salesforce@org.com +``` + +```shell +$ CI_SFDX_HARDIS_BITBUCKET_TOKEN=xxxxxx BITBUCKET_WORKSPACE=sfdxhardis-demo BITBUCKET_REPO_SLUG=test BITBUCKET_BUILD_NUMBER=1 BITBUCKET_BRANCH=uat BITBUCKET_PR_ID=2 FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my-salesforce-org@client.com ``` ```shell -sfdx hardis:project:deploy:sources:dx --check +$ GITHUB_TOKEN=xxxx GITHUB_REPOSITORY=my-user/my-repo FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my-salesforce-org@client.com ``` diff --git a/docs/hardis/project/deploy/sources/metadata.md b/docs/hardis/project/deploy/sources/metadata.md index e4c3d91cd..da9f79562 100644 --- a/docs/hardis/project/deploy/sources/metadata.md +++ b/docs/hardis/project/deploy/sources/metadata.md @@ -1,4 +1,4 @@ - + # hardis:project:deploy:sources:metadata ## Description @@ -7,26 +7,25 @@ Deploy metadatas to source org ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-----------------------------|:-------:|:--------------------------------------------------------------------|:-------------:|:--------:|:----------------------------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| check
-c | boolean | Only checks the deployment, there is no impact on target org | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| deploydir
-x | option | Deploy directory | . | | | -| destructivepackagexml
-k | option | Path to destructiveChanges.xml file to deploy | | | | -| filter
-f | boolean | Filter metadatas before deploying | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| packagexml
-p | option | Path to package.xml file to deploy | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| testlevel
-l | option | Level of tests to apply to validate deployment | RunLocalTests | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-----------------------------|:-------:|:--------------------------------------------------------------|:-------------:|:--------:|:----------------------------------------------------------------------:| +| check
-c | boolean | Only checks the deployment, there is no impact on target org | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| deploydir
-x | option | Deploy directory | . | | | +| destructivepackagexml
-k | option | Path to destructiveChanges.xml file to deploy | | | | +| filter
-f | boolean | Filter metadatas before deploying | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| packagexml
-p | option | Path to package.xml file to deploy | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| testlevel
-l | option | Level of tests to apply to validate deployment | RunLocalTests | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:deploy:sources:metadata +$ sf hardis:project:deploy:sources:metadata ``` diff --git a/docs/hardis/project/deploy/start.md b/docs/hardis/project/deploy/start.md new file mode 100644 index 000000000..4f64dc891 --- /dev/null +++ b/docs/hardis/project/deploy/start.md @@ -0,0 +1,84 @@ + +# hardis:project:deploy:start + +## Description + +sfdx-hardis wrapper for **sf project deploy start** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_start_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +```yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:-------------------------|:-------:|:--------:|:-------:| +| api-version
-a | option | api-version | | | | +| async | boolean | async | | | | +| coverage-formatters | option | coverage-formatters | | | | +| debug | boolean | debug | | | | +| dry-run | boolean | dry-run | | | | +| flags-dir | option | undefined | | | | +| ignore-conflicts
-c | boolean | ignore-conflicts | | | | +| ignore-errors
-r | boolean | ignore-errors | | | | +| ignore-warnings
-g | boolean | ignore-warnings | | | | +| json | boolean | Format output as json. | | | | +| junit | boolean | junit | | | | +| manifest
-x | option | manifest | | | | +| metadata
-m | option | metadata | | | | +| metadata-dir | option | metadata-dir | | | | +| post-destructive-changes | option | post-destructive-changes | | | | +| pre-destructive-changes | option | pre-destructive-changes | | | | +| purge-on-delete | boolean | purge-on-delete | | | | +| results-dir | option | results-dir | | | | +| single-package | boolean | single-package | | | | +| source-dir
-d | option | source-dir | | | | +| target-org
-o | option | undefined | | | | +| test-level | option | test-level | | | | +| tests | option | tests | | | | +| wait
-w | option | wait | 33 | | | + +## Examples + + diff --git a/docs/hardis/project/deploy/validate.md b/docs/hardis/project/deploy/validate.md new file mode 100644 index 000000000..c31dec8c0 --- /dev/null +++ b/docs/hardis/project/deploy/validate.md @@ -0,0 +1,84 @@ + +# hardis:project:deploy:validate + +## Description + +sfdx-hardis wrapper for **sf project deploy validate** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_validate_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +```yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +``` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-------------------------|:-------:|:-------------------------|:-------:|:--------:|:-------:| +| api-version
-a | option | api-version | | | | +| async | boolean | async | | | | +| coverage-formatters | option | coverage-formatters | | | | +| debug | boolean | debug | | | | +| dry-run | boolean | dry-run | | | | +| flags-dir | option | undefined | | | | +| ignore-conflicts
-c | boolean | ignore-conflicts | | | | +| ignore-errors
-r | boolean | ignore-errors | | | | +| ignore-warnings
-g | boolean | ignore-warnings | | | | +| json | boolean | Format output as json. | | | | +| junit | boolean | junit | | | | +| manifest
-x | option | manifest | | | | +| metadata
-m | option | metadata | | | | +| metadata-dir | option | metadata-dir | | | | +| post-destructive-changes | option | post-destructive-changes | | | | +| pre-destructive-changes | option | pre-destructive-changes | | | | +| purge-on-delete | boolean | purge-on-delete | | | | +| results-dir | option | results-dir | | | | +| single-package | boolean | single-package | | | | +| source-dir
-d | option | source-dir | | | | +| target-org
-o | option | undefined | | | | +| test-level | option | test-level | | | | +| tests | option | tests | | | | +| wait
-w | option | wait | 33 | | | + +## Examples + + diff --git a/docs/hardis/project/fix/profiletabs.md b/docs/hardis/project/fix/profiletabs.md index af71ccf50..13be9e9dd 100644 --- a/docs/hardis/project/fix/profiletabs.md +++ b/docs/hardis/project/fix/profiletabs.md @@ -1,27 +1,53 @@ - + # hardis:project:fix:profiletabs ## Description -Interactive prompts to add tab visibilities that are not retrieved by force:source:pull + +## Command Behavior + +**Interactively updates tab visibility settings in Salesforce profiles, addressing a common issue where tab visibilities are not correctly retrieved by `sf project retrieve start`.** + +This command provides a user-friendly interface to manage tab settings within your profile XML files, ensuring that your local project accurately reflects the intended tab configurations in your Salesforce org. + +Key functionalities: + +- **Interactive Tab Selection:** Displays a multi-select menu of all available tabs in your org, allowing you to choose which tabs to update. +- **Visibility Control:** Lets you set the visibility for the selected tabs to either `DefaultOn` (Visible) or `Hidden`. +- **Profile Selection:** Presents a multi-select menu of all .profile-meta.xml files in your project, allowing you to apply the tab visibility changes to specific profiles. +- **XML Updates:** Modifies the section of the selected profile XML files to reflect the chosen tab settings. If a tab visibility setting already exists for a selected tab, it will be updated; otherwise, a new one will be added. +- **Sorted Output:** The in the updated profile XML files are sorted alphabetically for consistency and readability. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Tooling API):** It queries the `TabDefinition` object using `soqlQueryTooling` to retrieve a list of all available tabs in the target org. +- **File Discovery:** Uses `glob` to find all .profile-meta.xml files within the specified project path. +- **Interactive Prompts:** Leverages the `prompts` library to create interactive menus for selecting tabs, visibility settings, and profiles. +- **XML Parsing and Manipulation:** Uses `parseXmlFile` to read the content of profile XML files and `writeXmlFile` to write the modified content back. It manipulates the `tabVisibilities` array within the parsed XML to add or update tab settings. +- **Array Sorting:** Employs the `sort-array` library to sort the `tabVisibilities` alphabetically by tab name. +- **Logging:** Provides feedback to the user about which profiles have been updated and a summary of the changes. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-----------------------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| path
-p | option | Root folder | C:\git\pro\sfdx-hardis2 | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-----------------------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| path
-p | option | Root folder | C:\git\pro\sfdx-hardis2 | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:fix:profiletabs +$ sf hardis:project:fix:profiletabs ``` diff --git a/docs/hardis/project/fix/v53flexipages.md b/docs/hardis/project/fix/v53flexipages.md index 9db9cf51d..927a002c7 100644 --- a/docs/hardis/project/fix/v53flexipages.md +++ b/docs/hardis/project/fix/v53flexipages.md @@ -1,27 +1,51 @@ - + # hardis:project:fix:v53flexipages ## Description -Fix flexipages for apiVersion v53 (Winter22). -Note: Update api version to 53.0 in package.xml and sfdx-project.json +## Command Behavior + +**Fixes Salesforce FlexiPages for compatibility with API Version 53.0 (Winter '22 release) by adding missing identifiers to component instances.** + +Salesforce introduced a change in API Version 53.0 that requires `identifier` tags within `componentInstance` and `fieldInstance` elements in FlexiPage metadata. If these identifiers are missing, deployments to orgs with API version 53.0 or higher will fail. This command automates the process of adding these missing identifiers, ensuring your FlexiPages remain deployable. + +Key functionalities: + +- **Targeted FlexiPage Processing:** Scans all .flexipage-meta.xml files within the specified root folder (defaults to current working directory). +- **Identifier Injection:** Inserts a unique `identifier` tag (e.g., `SFDX_HARDIS_REPLACEMENT_ID`) into `componentInstance` and `fieldInstance` elements that lack one. + +**Important Note:** After running this command, ensure you update your `apiVersion` to `53.0` (or higher) in your `package.xml` and `sfdx-project.json` files. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses `glob` to find all .flexipage-meta.xml files. +- **Content Reading:** Reads the XML content of each FlexiPage file. +- **Regular Expression Replacement:** Employs a set of regular expressions to identify specific XML patterns (componentName.../componentName.../componentInstance, componentName.../componentName.../visibilityRule, fieldItem.../fieldItem.../fieldInstance) that are missing the `identifier` tag. +- **Dynamic ID Generation:** For each match, it generates a unique identifier (e.g., `sfdxHardisIdX`) and injects it into the XML structure. +- **File Writing:** If changes are made, the modified XML content is written back to the FlexiPage file using `fs.writeFile`. +- **Logging:** Provides messages about which FlexiPages are being processed and a summary of the total number of identifiers added. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-----------------------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| path
-p | option | Root folder | C:\git\pro\sfdx-hardis2 | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-----------------------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| path
-p | option | Root folder | C:\git\pro\sfdx-hardis2 | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:fix:v53flexipages +$ sf hardis:project:fix:v53flexipages ``` diff --git a/docs/hardis/project/generate/bypass.md b/docs/hardis/project/generate/bypass.md new file mode 100644 index 000000000..7d19c76d9 --- /dev/null +++ b/docs/hardis/project/generate/bypass.md @@ -0,0 +1,97 @@ + +# hardis:project:generate:bypass + +## Description + + +## Command Behavior + +**Generates custom permissions and permission sets to bypass specified Salesforce automations (Flows, Triggers, and Validation Rules) for specific sObjects.** + +This command provides a controlled mechanism to temporarily or permanently disable automations for certain sObjects, which is invaluable for: + +- **Data Loading:** Bypassing validation rules or triggers during large data imports. +- **Troubleshooting:** Isolating automation issues by temporarily disabling them. +- **Development:** Allowing developers to work on specific sObjects without triggering complex automations. + +Key functionalities: + +- **sObject Selection:** You can specify a comma-separated list of sObjects to bypass (e.g., `Account,Contact`). If omitted, an interactive prompt will allow you to select from available sObjects. +- **Automation Type Selection:** Choose which types of automations to bypass: `Flow`, `Trigger`, or `VR` (Validation Rules). If omitted, an interactive prompt will guide your selection. +- **Automatic Bypass Application:** Optionally, the command can automatically inject bypass logic into Validation Rules and Triggers. This involves modifying the Apex code for Triggers and the XML for Validation Rules. +- **Metadata Source:** You can choose to retrieve the metadata elements (Validation Rules, Triggers) from the org (`--metadata-source org`) or use local files (`--metadata-source local`). Retrieving from the org is recommended for accuracy. +- **Custom Permission and Permission Set Generation:** For each selected sObject and automation type, it generates: + - A **Custom Permission** (e.g., `BypassAccountFlows`) that acts as the bypass switch. + - A **Permission Set** (e.g., `BypassAccountFlows`) that grants the generated Custom Permission. +- **Credits Inclusion:** By default, generated XML files include a comment indicating they were generated by sfdx-hardis. This can be skipped using `--skip-credits`. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Tooling API):** It queries `EntityDefinition` to list customizable sObjects and `ValidationRule` and `ApexTrigger` to find existing automations. +- **Interactive Prompts:** Uses the `prompts` library to guide the user through selecting sObjects, automation types, and bypass application options. +- **XML Generation:** Dynamically generates XML content for Custom Permissions and Permission Sets, including descriptions and labels that clearly indicate their purpose. +- **File System Operations:** Uses `fs-extra` to create directories and write the generated Custom Permission and Permission Set XML files. +- **Metadata Retrieval (for Bypass Application):** If `apply-to-vrs` or `apply-to-triggers` is used and `metadata-source` is `org`, it retrieves the relevant Validation Rule or Apex Trigger metadata from the org using `sf project retrieve start`. +- **XML/Apex Code Modification:** + - For Validation Rules, it modifies the `errorConditionFormula` in the XML to include a check for the bypass Custom Permission. + - For Apex Triggers, it injects an `if` statement at the beginning of the trigger body to check for the bypass Custom Permission. +- **`parseXmlFile` and `writeXmlFile`:** Used for reading and writing XML metadata files. +- **`execCommand`:** Used for executing Salesforce CLI commands, particularly for metadata retrieval. +- **Error Handling:** Includes checks for invalid sObject or automation selections and provides informative error messages. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:-----------------------|:-------:|:----------------------------------------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| apply-to-triggers | boolean | Apply bypass to Triggers | | | | +| apply-to-vrs | boolean | Apply bypass to Validation Rules | | | | +| automations
-a | option | Comma-separated automations to bypass: Flow, Trigger, VR | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| metadata-source
-r | option | Source of metadata elements to apply bypass to. Options: 'org' or 'local'. | | | | +| objects
-s | option | Comma-separated list of sObjects to bypass (e.g., Account,Contact,Opportunity). If omitted, you will be prompted to select. | | | | +| skip-credits
-k | boolean | Omit the "Generated by" line in the XML files | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:project:generate:bypass +``` + +```shell +$ sf hardis:project:generate:bypass --sObjects Account,Contact,Opportunity +``` + +```shell +$ sf hardis:project:generate:bypass --automations Flow,Trigger,VR +``` + +```shell +$ sf hardis:project:generate:bypass --sObjects Account,Opportunity --automations Flow,Trigger +``` + +```shell +$ sf hardis:project:generate:bypass --skipCredits +``` + +```shell +$ sf hardis:project:generate:bypass --apply-to-vrs +``` + +```shell +$ sf hardis:project:generate:bypass --apply-to-triggers +``` + +```shell +$ sf hardis:project:generate:bypass --metadata-source org +``` + + diff --git a/docs/hardis/project/generate/flow-git-diff.md b/docs/hardis/project/generate/flow-git-diff.md new file mode 100644 index 000000000..5e58d0d91 --- /dev/null +++ b/docs/hardis/project/generate/flow-git-diff.md @@ -0,0 +1,36 @@ + +# hardis:project:generate:flow-git-diff + +## Description + +Generate Flow Visual Git Diff markdown between 2 commits + +Note: This command might requires @mermaid-js/mermaid-cli to be installed. + +Run `npm install @mermaid-js/mermaid-cli --global` + + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:--------------|:-------:|:--------------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| commit-after | option | Hash of the commit of the new flow state (will be prompted if not set) | | | | +| commit-before | option | Hash of the commit of the previous flow state, or "allStates" (will be prompted if not set) | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| flow | option | Path to flow file (will be prompted if not set) | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | + +## Examples + +```shell +$ sf hardis:project:generate:flow-git-diff +``` + +```shell +$ sf hardis:project:generate:flow-git-diff --flow "force-app/main/default/flows/Opportunity_AfterUpdate_Cloudity.flow-meta.xml" --commit-before 8bd290e914c9dbdde859dad7e3c399776160d704 --commit-after e0835251bef6e400fb91e42f3a31022f37840f65 +``` + + diff --git a/docs/hardis/project/generate/gitdelta.md b/docs/hardis/project/generate/gitdelta.md index 0683355f1..680f5b080 100644 --- a/docs/hardis/project/generate/gitdelta.md +++ b/docs/hardis/project/generate/gitdelta.md @@ -1,27 +1,54 @@ - + # hardis:project:generate:gitdelta ## Description -Generate package.xml git delta between 2 commits + +## Command Behavior + +**Generates a `package.xml` and `destructiveChanges.xml` representing the metadata differences between two Git commits.** + +This command is a powerful tool for managing Salesforce metadata deployments by focusing only on the changes between specific points in your version control history. It leverages `sfdx-git-delta` to accurately identify added, modified, and deleted metadata components. + +Key functionalities: + +- **Commit-Based Comparison:** Allows you to specify a starting commit (`--fromcommit`) and an ending commit (`--tocommit`) to define the scope of the delta. If not provided, interactive prompts will guide you through selecting commits from your Git history. +- **Branch Selection:** You can specify a Git branch (`--branch`) to work with. If not provided, it will prompt you to select one. +- **`package.xml` Generation:** Creates a `package.xml` file that lists all metadata components that have been added or modified between the specified commits. +- **`destructiveChanges.xml` Generation:** Creates a `destructiveChanges.xml` file that lists all metadata components that have been deleted between the specified commits. +- **Temporary File Output:** The generated `package.xml` and `destructiveChanges.xml` files are placed in a temporary directory. + +
+Technical explanations + +The command's technical implementation involves: + +- **Git Integration:** Uses `simple-git` (`git()`) to interact with the Git repository, including fetching branches (`git().fetch()`), checking out branches (`git().checkoutBranch()`), and listing commit history (`git().log()`). +- **Interactive Prompts:** Leverages the `prompts` library to guide the user through selecting a Git branch and specific commits for delta generation if they are not provided as command-line arguments. +- **`sfdx-git-delta` Integration:** The core of the delta generation is handled by the `callSfdxGitDelta` utility function, which wraps the `sfdx-git-delta` tool. This tool performs the actual Git comparison and generates the `package.xml` and `destructiveChanges.xml` files. +- **Temporary Directory Management:** Uses `createTempDir` to create a temporary directory for storing the generated XML files, ensuring a clean working environment. +- **File System Operations:** Uses `fs-extra` to manage temporary files and directories. +- **User Feedback:** Provides clear messages to the user about the generated files and their locations. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| branch | option | Git branch to use to generate delta | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| fromcommit | option | Hash of commit to start from | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| tocommit | option | Hash of commit to stop at | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| branch | option | Git branch to use to generate delta | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| fromcommit | option | Hash of commit to start from | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| tocommit | option | Hash of commit to stop at | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:generate:gitdelta +$ sf hardis:project:generate:gitdelta ``` diff --git a/docs/hardis/project/lint.md b/docs/hardis/project/lint.md index 4fd9af2f0..9a75b0e1d 100644 --- a/docs/hardis/project/lint.md +++ b/docs/hardis/project/lint.md @@ -1,31 +1,54 @@ - + # hardis:project:lint ## Description -Apply syntactic analysis (linters) on the repository sources, using Mega-Linter +## Command Behavior + +**Applies syntactic analysis (linting) to your repository sources using Mega-Linter, ensuring code quality and adherence to coding standards.** + +This command integrates Mega-Linter, a comprehensive linter orchestrator, into your Salesforce DX project. It helps identify and fix code style violations, potential bugs, and other issues across various file types relevant to Salesforce development. + +Key functionalities: + +- **Automated Linting:** Runs a suite of linters configured for Salesforce projects. +- **Fixing Issues (`--fix` flag):** Automatically attempts to fix detected linting issues, saving manual effort. +- **Configuration Management:** If `.mega-linter.yml` is not found, it guides you through the initial setup of Mega-Linter, prompting for the Salesforce flavor. +- **CI/CD Integration:** Designed to be used in CI/CD pipelines to enforce code quality gates. + +
+Technical explanations + +The command's technical implementation involves: + +- **Mega-Linter Integration:** It leverages the `mega-linter-runner` library to execute Mega-Linter. +- **Configuration Check:** Before running, it checks for the presence of `.mega-linter.yml`. If not found and not in a CI environment, it initiates an interactive setup process using `MegaLinterRunner().run({ install: true })`. +- **Linter Execution:** It calls `MegaLinterRunner().run(megaLinterOptions)` with the `salesforce` flavor and the `fix` flag (if provided). +- **Exit Code Handling:** The `process.exitCode` is set based on the Mega-Linter's exit status, allowing CI/CD pipelines to react to linting failures. +- **User Feedback:** Provides clear messages about the success or failure of the linting process. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| fix
-f | boolean | Apply linters fixes | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| fix
-f | boolean | Apply linters fixes | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:project:lint +$ sf hardis:project:lint ``` ```shell -sfdx hardis:project:lint --fix +$ sf hardis:project:lint --fix ``` diff --git a/docs/hardis/project/metadata/findduplicates.md b/docs/hardis/project/metadata/findduplicates.md index 801322cb3..9c7ae960b 100644 --- a/docs/hardis/project/metadata/findduplicates.md +++ b/docs/hardis/project/metadata/findduplicates.md @@ -1,4 +1,4 @@ - + # hardis:project:metadata:findduplicates ## Description @@ -13,13 +13,13 @@ metadataDuplicateFindKeys : ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| files
-f | option | XML metadata files path | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| files
-f | option | XML metadata files path | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples @@ -46,7 +46,7 @@ metadataDuplicateFindKeys : ```shell -$ sfdx hardis:project:metadata:findduplicates --file layout.layout-meta.xml +$ sf hardis:project:metadata:findduplicates --file layout.layout-meta.xml [sfdx-hardis] Duplicate values in layout.layout-meta.xml - Key : Layout.layoutSections.layoutColumns.layoutItems.field - Values : Name @@ -55,7 +55,7 @@ $ sfdx hardis:project:metadata:findduplicates --file layout.layout-meta.xml ```shell -$ sfdx hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xml" +$ sf hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xml" [sfdx-hardis] hardis:project:metadata:findduplicates execution time 0:00:00.397 [sfdx-hardis] Duplicate values in layout1.layout-meta.xml - Key : Layout.layoutSections.layoutColumns.layoutItems.field diff --git a/docs/hardis/scratch/create.md b/docs/hardis/scratch/create.md index 4ec5b3ac0..12c84c927 100644 --- a/docs/hardis/scratch/create.md +++ b/docs/hardis/scratch/create.md @@ -1,4 +1,4 @@ - + # hardis:scratch:create ## Description @@ -18,22 +18,21 @@ Create and initialize a scratch org or a source-tracked sandbox (config can be d ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| forcenew
-n | boolean | If an existing scratch org exists, do not reuse it but create a new one | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| pool
-d | boolean | Creates the scratch org for a scratch org pool | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| forcenew
-n | boolean | If an existing scratch org exists, do not reuse it but create a new one | | | | +| json | boolean | Format output as json. | | | | +| pool | boolean | Creates the scratch org for a scratch org pool | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:create +$ sf hardis:scratch:create ``` diff --git a/docs/hardis/scratch/delete.md b/docs/hardis/scratch/delete.md index a0ffb9821..08c010867 100644 --- a/docs/hardis/scratch/delete.md +++ b/docs/hardis/scratch/delete.md @@ -1,26 +1,49 @@ - + # hardis:scratch:delete ## Description -Assisted menu to delete scratch orgs associated to a DevHub +## Command Behavior + +**Provides an assisted menu to delete Salesforce scratch orgs associated with a Dev Hub.** + +This command simplifies the process of cleaning up your Salesforce development environments by allowing you to easily select and delete multiple scratch orgs. This is crucial for managing your scratch org limits and ensuring that you don't accumulate unnecessary or expired orgs. + +Key functionalities: + +- **Interactive Scratch Org Selection:** Displays a list of all active scratch orgs linked to your Dev Hub, including their usernames, instance URLs, and last used dates. +- **Multi-Selection:** Allows you to select multiple scratch orgs for deletion. +- **Confirmation Prompt:** Prompts for confirmation before proceeding with the deletion, ensuring that you don't accidentally delete important orgs. +- **Dev Hub Integration:** Works with your configured Dev Hub to manage scratch orgs. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce CLI Integration:** It executes the `sf org list` command to retrieve a list of all scratch orgs associated with the current Dev Hub. It then filters this list to show only active orgs. +- **Interactive Prompts:** Uses the `prompts` library to present a multi-select menu of scratch orgs to the user. +- **Scratch Org Deletion:** For each selected scratch org, it executes the `sf org delete scratch --no-prompt` command to perform the deletion. +- **Error Handling:** Includes basic error handling for Salesforce CLI commands. +- **Data Sorting:** Sorts the list of scratch orgs by username, alias, and instance URL for better readability in the interactive menu. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:delete +$ sf hardis:scratch:delete ``` diff --git a/docs/hardis/scratch/pool/create.md b/docs/hardis/scratch/pool/create.md index 93fd41652..253335c0f 100644 --- a/docs/hardis/scratch/pool/create.md +++ b/docs/hardis/scratch/pool/create.md @@ -1,4 +1,4 @@ - + # hardis:scratch:pool:create ## Description @@ -7,32 +7,31 @@ Select a data storage service and configure information to build a scratch org p Run the command, follow instruction, then you need to schedule a daily CI job for the pool maintenance: -- Define CI ENV variable SCRATCH_ORG_POOL with value "true" + - Define CI ENV variable SCRATCH_ORG_POOL with value "true" -- Call the following lines in the CI job: + - Call the following lines in the CI job: ```shell - sfdx hardis:auth:login --devhub - sfdx hardis:scratch:pool:refresh + sf hardis:auth:login --devhub + sf hardis:scratch:pool:refresh ``` ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:pool:configure +$ sf hardis:scratch:pool:configure ``` diff --git a/docs/hardis/scratch/pool/localauth.md b/docs/hardis/scratch/pool/localauth.md index 8d578e2e9..d4d86c40a 100644 --- a/docs/hardis/scratch/pool/localauth.md +++ b/docs/hardis/scratch/pool/localauth.md @@ -1,26 +1,48 @@ - + # hardis:scratch:pool:localauth ## Description -Calls the related storage service to request api keys and secrets that allows a local user to fetch a scratch org from scratch org pool + +## Command Behavior + +**Authenticates a local user to the configured scratch org pool storage service, enabling them to fetch and manage scratch orgs from the pool.** + +This command is essential for developers who want to utilize a shared scratch org pool for their local development. It establishes the necessary authentication with the backend storage service (e.g., Salesforce Custom Object, Redis) that manages the pool's state, allowing the user to retrieve available scratch orgs for their work. + +Key functionalities: + +- **Storage Service Authentication:** Initiates the authentication process with the chosen storage service to obtain the required API keys or secrets. +- **Enables Pool Access:** Once authenticated, the local user can then use other sfdx-hardis commands to fetch, use, and return scratch orgs from the pool. +- **Configuration Check:** Verifies if a scratch org pool is already configured for the current project and provides guidance if it's not. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the `poolConfig` from the project's .sfdx-hardis.yml file to identify the configured storage service. +- **Provider Instantiation:** It uses the `instantiateProvider` utility function to create an instance of the `KeyValueProviderInterface` corresponding to the configured storage service. +- **User Authentication:** It then calls the `userAuthenticate()` method on the instantiated provider. This method encapsulates the specific logic for authenticating with the chosen storage service (e.g., prompting for API keys, performing OAuth flows). +- **Error Handling:** It checks for the absence of a configured scratch org pool and provides a user-friendly message. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:pool:localauth +$ sf hardis:scratch:pool:localauth ``` diff --git a/docs/hardis/scratch/pool/refresh.md b/docs/hardis/scratch/pool/refresh.md index 738020768..1f25aff91 100644 --- a/docs/hardis/scratch/pool/refresh.md +++ b/docs/hardis/scratch/pool/refresh.md @@ -1,26 +1,51 @@ - + # hardis:scratch:pool:refresh ## Description -Create enough scratch orgs to fill the pool +## Command Behavior + +**Refreshes a scratch org pool by creating new scratch orgs to fill the pool and deleting expired ones.** + +This command is designed to maintain a healthy and adequately sized scratch org pool, ensuring that developers and CI/CD pipelines always have access to ready-to-use scratch orgs. It automates the lifecycle management of scratch orgs within the pool. + +Key functionalities: + +- **Expired Org Cleanup:** Identifies and deletes scratch orgs from the pool that are nearing their expiration date (configurable via `minScratchOrgRemainingDays` in `.sfdx-hardis.yml`). +- **Pool Replenishment:** Creates new scratch orgs to replace expired ones and to reach the `maxScratchOrgsNumber` defined in the pool configuration. +- **Parallel Creation:** New scratch orgs are created in parallel using child processes, optimizing the replenishment process. +- **Authentication Handling:** Authenticates to scratch orgs before deletion or creation, ensuring proper access. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the `poolConfig` from the project's `.sfdx-hardis.yml` file to get parameters like `maxScratchOrgsNumber`, `maxScratchOrgsNumberToCreateOnce`, and `minScratchOrgRemainingDays`. +- **Pool Storage Interaction:** It uses `getPoolStorage` and `setPoolStorage` to interact with the configured storage service (e.g., Salesforce Custom Object, Redis) to retrieve and update the list of scratch orgs in the pool. +- **Expiration Check:** It calculates the remaining days for each scratch org in the pool using moment and flags those below the `minScratchOrgRemainingDays` threshold for deletion. +- **Scratch Org Deletion:** For expired orgs, it authenticates to them using `authenticateWithSfdxUrlStore` and then executes `sf org delete scratch` via `execCommand`. +- **Scratch Org Creation:** To replenish the pool, it spawns new child processes that run the `sf hardis:scratch:create --pool` command. This allows for parallel creation of multiple scratch orgs. +- **Error Handling:** It includes error handling for scratch org creation failures, logging them and updating the pool storage accordingly. +- **Logging:** Provides detailed logs about the status of scratch orgs (kept, deleted, created, failed creations) and a summary of the refresh operation. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:pool:refresh +$ sf hardis:scratch:pool:refresh ``` diff --git a/docs/hardis/scratch/pool/reset.md b/docs/hardis/scratch/pool/reset.md index ddc08a8cf..9a8f428b4 100644 --- a/docs/hardis/scratch/pool/reset.md +++ b/docs/hardis/scratch/pool/reset.md @@ -1,26 +1,51 @@ - + # hardis:scratch:pool:reset ## Description -Reset scratch org pool (delete all scratches in the pool) + +## Command Behavior + +**Resets the scratch org pool by deleting all existing scratch orgs within it.** + +This command provides a way to clear out the entire scratch org pool, effectively starting fresh. This can be useful for: + +- **Troubleshooting:** If the pool becomes corrupted or contains problematic scratch orgs. +- **Major Changes:** When there are significant changes to the scratch org definition or initialization process that require all existing orgs to be recreated. +- **Cleanup:** Periodically cleaning up the pool to ensure only the latest and most relevant scratch orgs are available. + +Key functionalities: + +- **Full Pool Deletion:** Identifies all scratch orgs currently in the pool and initiates their deletion. +- **Dev Hub Integration:** Works with your configured Dev Hub to manage the scratch orgs within the pool. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the `poolConfig` from the project's .sfdx-hardis.yml file to ensure a pool is configured. +- **Pool Storage Interaction:** It uses `getPoolStorage` to retrieve the current list of scratch orgs in the pool and `setPoolStorage` to clear the pool's record. +- **Scratch Org Deletion:** It iterates through each scratch org in the retrieved list. For each org, it authenticates to it using `authenticateWithSfdxUrlStore` and then executes `sf org delete scratch` via `execCommand`. +- **Logging:** Provides clear messages about the deletion process and the status of each scratch org. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:pool:refresh +$ sf hardis:scratch:pool:refresh ``` diff --git a/docs/hardis/scratch/pool/view.md b/docs/hardis/scratch/pool/view.md index c275c57bd..eaf15f637 100644 --- a/docs/hardis/scratch/pool/view.md +++ b/docs/hardis/scratch/pool/view.md @@ -1,26 +1,48 @@ - + # hardis:scratch:pool:view ## Description -Displays all stored content of project scratch org pool if defined + +## Command Behavior + +**Displays information about the configured scratch org pool, including its current state and available scratch orgs.** + +This command provides visibility into your scratch org pool, allowing you to monitor its health, check the number of available orgs, and verify its configuration. It's a useful tool for administrators and developers managing shared scratch org environments. + +Key functionalities: + +- **Pool Configuration Display:** Shows the `poolConfig` defined in your ".sfdx-hardis.yml" file, including the chosen storage service and the maximum number of scratch orgs. +- **Pool Storage Content:** Displays the raw content of the pool storage, which includes details about each scratch org in the pool (e.g., alias, username, expiration date). +- **Available Scratch Org Count:** Provides a summary of how many scratch orgs are currently available in the pool. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the `poolConfig` from the project's ".sfdx-hardis.yml" file using `getConfig`. +- **Pool Storage Retrieval:** It uses `getPoolStorage` to connect to the configured storage service (e.g., Salesforce Custom Object, Redis) and retrieve the current state of the scratch org pool. +- **Data Display:** It logs the retrieved pool configuration and pool storage content to the console in a human-readable format. +- **Error Handling:** It checks if a scratch org pool is configured for the project and provides a warning message if it's not. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:pool:view +$ sf hardis:scratch:pool:view ``` diff --git a/docs/hardis/scratch/pull.md b/docs/hardis/scratch/pull.md index 680d9cfd6..f293d36c1 100644 --- a/docs/hardis/scratch/pull.md +++ b/docs/hardis/scratch/pull.md @@ -1,19 +1,23 @@ - + # hardis:scratch:pull ## Description -This commands pulls the updates you performed in your scratch or sandbox org, into your local files -Then, you probably want to stage and commit the files containing the updates you want to keep, as explained in this video. +## Command Behavior - +**Pulls metadata changes from your scratch org or source-tracked sandbox into your local project files.** + +This command is essential for synchronizing your local development environment with the changes you've made directly in your Salesforce org. After pulling, you can then stage and commit the relevant files to your version control system. + +Key features and considerations: -- Calls sfdx force:source:pull under the hood -- If there are errors, proposes to automatically add erroneous item in `.forceignore`, then pull again -- If you want to always retrieve sources like CustomApplication that are not always detected as updates by force:source:pull , you can define property **autoRetrieveWhenPull** in .sfdx-hardis.yml +- **Underlying Command:** Internally, this command executes `sf project retrieve start` to fetch the metadata. +- **Error Handling:** If the pull operation encounters errors, it offers to automatically add the problematic items to your `.forceignore` file and then attempts to pull again, helping you resolve conflicts and ignore unwanted metadata. +- **Missing Updates:** If you don't see certain updated items in the pull results, you might need to manually retrieve them using the Salesforce Extension's **Org Browser** or the **Salesforce CLI** directly. Refer to the [Retrieve Metadatas documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/#retrieve-metadatas) for more details. +- **Automatic Retrieval:** You can configure the `autoRetrieveWhenPull` property in your `.sfdx-hardis.yml` file to always retrieve specific metadata types (e.g., `CustomApplication`) that might not always be detected as updates by `project:retrieve:start`. -Example: +Example `.sfdx-hardis.yml` configuration for `autoRetrieveWhenPull`: ```yaml autoRetrieveWhenPull: - CustomApplication:MyCustomApplication @@ -21,23 +25,37 @@ autoRetrieveWhenPull: - CustomApplication:MyThirdCustomApp ``` +For a visual explanation of the process, watch this video: + + + +
+Technical explanations + +The command's technical implementation focuses on robust metadata synchronization: + +- **Salesforce CLI Wrapper:** It acts as a wrapper around the standard Salesforce CLI `sf project retrieve start` command, providing enhanced error handling and configuration options. +- **Force Source Pull Utility:** The core logic resides in the `forceSourcePull` utility function, which orchestrates the retrieval process, including handling `.forceignore` updates. +- **Configuration Integration:** It reads the `autoRetrieveWhenPull` setting from the project's `.sfdx-hardis.yml` to determine additional metadata to retrieve automatically. +- **User Feedback:** Provides clear messages to the user regarding the pull status and guidance for troubleshooting. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:pull +$ sf hardis:scratch:pull ``` diff --git a/docs/hardis/scratch/push.md b/docs/hardis/scratch/push.md index 7636654f0..d67e66f51 100644 --- a/docs/hardis/scratch/push.md +++ b/docs/hardis/scratch/push.md @@ -1,29 +1,45 @@ - + # hardis:scratch:push ## Description -Push local files to scratch org +## Command Behavior -Calls `sfdx force:source:push` under the hood +**Pushes local Salesforce DX source files to a scratch org or source-tracked sandbox.** + +This command is a fundamental operation in Salesforce DX development, allowing developers to synchronize their local codebase with their development org. It ensures that changes made locally are reflected in the scratch org, enabling testing and validation. + +Key functionalities: + +- **Source Synchronization:** Deploys all local changes (metadata and code) to the target scratch org. +- **Underlying Command:** Internally, this command executes `sf project deploy start` to perform the push operation. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce CLI Wrapper:** It acts as a wrapper around the standard Salesforce CLI `sf project deploy start` command. +- **`forceSourcePush` Utility:** The core logic resides in the `forceSourcePush` utility function, which orchestrates the deployment process. +- **Connection Handling:** It uses the connection to the target org to perform the push operation. +
## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:scratch:push +$ sf hardis:scratch:push ``` diff --git a/docs/hardis/source/deploy.md b/docs/hardis/source/deploy.md index 524c12b6b..088e73cd1 100644 --- a/docs/hardis/source/deploy.md +++ b/docs/hardis/source/deploy.md @@ -1,4 +1,4 @@ - + # hardis:source:deploy ## Description @@ -20,7 +20,13 @@ You can also have deployment results as pull request comments, on: ### Deployment pre or post commands -You can define command lines to run before or after a deployment +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** @@ -34,6 +40,7 @@ commandsPreDeploy: - id: knowledgeAssign label: Assign Knowledge user to the deployment user command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + commandsPostDeploy: - id: knowledgeUnassign label: Remove KnowledgeUser right to the user who has it @@ -41,6 +48,12 @@ commandsPostDeploy: - id: knowledgeAssign label: Assign Knowledge user to desired username command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true ``` Notes: @@ -52,39 +65,38 @@ Notes: ## Parameters -| Name | Type | Description | Default | Required | Options | -|:--------------------------------|:-------:|:--------------------------------------------------------------------|:----------:|:--------:|:----------------------------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| checkcoverage | boolean | Check Apex org coverage | | | | -| checkonly
-c | boolean | checkonly | | | | -| coverageformatters | option | coverageformatters | | | | -| debug | boolean | debug | | | | -| forceoverwrite
-f | boolean | forceoverwrite | | | | -| ignoreerrors
-o | boolean | ignoreErrors | | | | -| ignorewarnings
-g | boolean | ignoreWarnings | | | | -| json | boolean | format output as json | | | | -| junit | boolean | junit | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| manifest
-x | option | flagsLong.manifest | | | | -| metadata
-m | option | metadata | | | | -| postdestructivechanges | option | postdestructivechanges | | | | -| predestructivechanges | option | predestructivechanges | | | | -| resultsdir | option | resultsdir | | | | -| runtests
-r | option | runTests | | | | -| soapdeploy | boolean | soapDeploy | | | | -| sourcepath
-p | option | sourcePath | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| testlevel
-l | option | testlevel | NoTestRun | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | -| tracksource
-t | boolean | tracksource | | | | -| validateddeployrequestid
-q | option | validateDeployRequestId | | | | -| verbose | boolean | verbose | | | | -| wait
-w | option | wait | 60 minutes | | | -| websocket | option | websocket | | | | +| Name | Type | Description | Default | Required | Options | +|:--------------------------------|:-------:|:------------------------|:---------:|:--------:|:----------------------------------------------------------------------:| +| checkcoverage | boolean | Check Apex org coverage | | | | +| checkonly
-c | boolean | checkonly | | | | +| coverageformatters | option | coverageformatters | | | | +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| forceoverwrite
-f | boolean | forceoverwrite | | | | +| ignoreerrors | boolean | ignoreErrors | | | | +| ignorewarnings
-g | boolean | ignoreWarnings | | | | +| json | boolean | Format output as json. | | | | +| junit | boolean | junit | | | | +| manifest
-x | option | flagsLong.manifest | | | | +| metadata
-m | option | metadata | | | | +| postdestructivechanges | option | postdestructivechanges | | | | +| predestructivechanges | option | predestructivechanges | | | | +| resultsdir | option | resultsdir | | | | +| runtests
-r | option | runTests | | | | +| soapdeploy | boolean | soapDeploy | | | | +| sourcepath
-p | option | sourcePath | | | | +| target-org
-o | option | undefined | | | | +| testlevel
-l | option | testlevel | NoTestRun | | NoTestRun
RunSpecifiedTests
RunLocalTests
RunAllTestsInOrg | +| tracksource
-t | boolean | tracksource | | | | +| validateddeployrequestid
-q | option | validateDeployRequestId | | | | +| verbose | boolean | verbose | | | | +| wait
-w | option | wait | 60 | | | +| websocket | option | websocket | | | | ## Examples ```shell -sfdx hardis:source:deploy -x manifest/package.xml --wait 60 --ignorewarnings --testlevel RunLocalTests --postdestructivechanges ./manifest/destructiveChanges.xml --targetusername nicolas.vuillamy@cloudity.com.sfdxhardis --checkonly --checkcoverage --verbose --coverageformatters json-summary +$ sf hardis:source:deploy -x manifest/package.xml --wait 60 --ignorewarnings --testlevel RunLocalTests --postdestructivechanges ./manifest/destructiveChanges.xml --target-org nicolas.vuillamy@cloudity.com.sfdxhardis --checkonly --checkcoverage --verbose --coverageformatters json-summary ``` diff --git a/docs/hardis/source/push.md b/docs/hardis/source/push.md index c998d5fe2..53f9f15c6 100644 --- a/docs/hardis/source/push.md +++ b/docs/hardis/source/push.md @@ -1,4 +1,4 @@ - + # hardis:source:push ## Description @@ -12,18 +12,17 @@ sfdx-hardis wrapper for sfdx force:source:push that displays tips to solve deplo ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:----------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug | boolean | debug | | | | -| forceoverwrite
-f | boolean | forceoverwrite | | | | -| ignorewarnings
-g | boolean | ignorewarnings | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| quiet | boolean | quiet | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| wait
-w | option | wait | 60 minutes | | | -| websocket | option | websocket | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:-----------------------|:-------:|:--------:|:-------:| +| debug | boolean | debug | | | | +| flags-dir | option | undefined | | | | +| forceoverwrite
-f | boolean | forceoverwrite | | | | +| ignorewarnings
-g | boolean | ignorewarnings | | | | +| json | boolean | Format output as json. | | | | +| quiet | boolean | quiet | | | | +| target-org
-o | option | undefined | | | | +| wait
-w | option | wait | 60 | | | +| websocket | option | websocket | | | | ## Examples diff --git a/docs/hardis/source/retrieve.md b/docs/hardis/source/retrieve.md index ad7a24610..e6f365a4d 100644 --- a/docs/hardis/source/retrieve.md +++ b/docs/hardis/source/retrieve.md @@ -1,35 +1,55 @@ - + # hardis:source:retrieve ## Description -sfdx-hardis wrapper for sfdx force:source:retrieve -- If no retrieve constraint is sent, as assisted menu will request the list of metadatas to retrieve -- If no org is selected , an assisted menu will request the user to choose one +## Command Behavior -[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_retrieve) +**A wrapper command for Salesforce CLI's `sf project retrieve start` (formerly `sfdx force:source:retrieve`), with enhanced interactive features.** + +This command facilitates the retrieval of metadata from a Salesforce org into your local project. It provides an assisted experience, especially when no specific retrieval constraints are provided. + +Key features: + +- **Assisted Metadata Selection:** If no `sourcepath`, `manifest`, `metadata`, or `packagenames` flags are specified, an interactive menu will prompt you to select the metadata types you wish to retrieve. +- **Assisted Org Selection:** If no target org is specified, an interactive menu will guide you to choose an org for the retrieval operation. +- **Backward Compatibility:** While this command wraps the newer `sf project retrieve start`, it maintains compatibility with the older `sfdx force:source:retrieve` flags. + +**Important Note:** The underlying Salesforce CLI command `sfdx force:source:retrieve` is being deprecated by Salesforce in November 2024. It is recommended to migrate to `sf project retrieve start` for future compatibility. See [Salesforce CLI Migration Guide](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm) for more information. + +
+Technical explanations + +This command acts as an intelligent wrapper around the Salesforce CLI's source retrieval functionality: + +- **Command Wrapping:** It uses the `wrapSfdxCoreCommand` utility to execute the `sfdx force:source:retrieve` (or its equivalent `sf project retrieve start`) command, passing through all relevant flags and arguments. +- **Interactive Prompts:** It leverages `MetadataUtils.promptMetadataTypes()` and `promptOrgUsernameDefault()` to provide interactive menus for metadata and org selection when the user does not provide them as flags. +- **Argument Transformation:** It dynamically constructs the command-line arguments for the underlying Salesforce CLI command based on user selections and provided flags. +- **Error Handling:** It includes basic error handling, such as prompting the user to re-select an org if an issue occurs during org selection. +- **Deprecation Warning:** It explicitly logs warnings about the deprecation of `sfdx force:source:retrieve` to inform users about upcoming changes. +
## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion
-a | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | debugMode | | | | -| forceoverwrite
-f | boolean | forceoverwrite | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| manifest
-x | option | manifest | | | | -| metadata
-m | option | metadata | | | | -| packagenames
-n | option | packagenames | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| sourcepath
-p | option | sourcePath | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| tracksource
-t | boolean | tracksource | | | | -| verbose | boolean | verbose | | | | -| wait
-w | option | wait | | | | -| websocket | option | websocket | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-------:| +| apiversion
-a | option | Override the api version used for api requests made by this command | | | | +| debug
-d | boolean | debugMode | | | | +| flags-dir | option | undefined | | | | +| forceoverwrite
-f | boolean | forceoverwrite | | | | +| json | boolean | Format output as json. | | | | +| manifest
-x | option | manifest | | | | +| metadata
-m | option | metadata | | | | +| packagenames
-n | option | packagenames | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| sourcepath
-p | option | sourcePath | | | | +| target-org
-o | option | undefined | | | | +| tracksource
-t | boolean | tracksource | | | | +| verbose | boolean | verbose | | | | +| wait
-w | option | wait | | | | +| websocket | option | websocket | | | | ## Examples diff --git a/docs/hardis/work/new.md b/docs/hardis/work/new.md index a7e30e894..489c3d266 100644 --- a/docs/hardis/work/new.md +++ b/docs/hardis/work/new.md @@ -1,47 +1,55 @@ - + # hardis:work:new ## Description -Assisted menu to start working on a Salesforce task. -Advanced instructions in [Create New Task documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-create-new-task/) +## Command Behavior -At the end of the command, it will allow you to work on either a scratch org or a sandbox, depending on your choices. +**Assisted menu to start working on a Salesforce User Story, streamlining the setup of your development environment.** -Under the hood, it can: +This command guides you through the process of preparing your local environment and a Salesforce org for a new development or configuration based User Story. It automates several steps, ensuring consistency and adherence to project standards. -- Make **git pull** to be up to date with target branch -- Create **new git branch** with formatted name (you can override the choices using .sfdx-hardis.yml property **branchPrefixChoices**) -- Create and initialize a scratch org or a source-tracked sandbox (config can be defined using `config/.sfdx-hardis.yml`): -- (and for scratch org only for now): - - **Install packages** - - Use property `installedPackages` - - **Push sources** - - **Assign permission sets** - - Use property `initPermissionSets` - - **Run apex initialization scripts** - - Use property `scratchOrgInitApexScripts` - - **Load data** - - Use property `dataPackages` +Key features include: + +- **Git Branch Management:** Ensures your local Git repository is up-to-date with the target branch and creates a new Git branch with a formatted name based on your User Story details. Branch naming conventions can be customized via the `branchPrefixChoices` property in `.sfdx-hardis.yml`. + +- **Org Provisioning & Initialization:** Facilitates the creation and initialization of either a scratch org or a source-tracked sandbox. The configuration for org initialization (e.g., package installation, source push, permission set assignments, Apex script execution, data loading) can be defined in `config/.sfdx-hardis.yml +- **Project-Specific Configuration:** Supports defining multiple target branches (`availableTargetBranches`) and projects (`availableProjects`) in `.sfdx-hardis.yml`, allowing for tailored User Stories workflows. + +- **User Story Name Validation:** Enforces User Story name formatting using `newTaskNameRegex` and provides examples via `newTaskNameRegexExample +- **Shared Development Sandboxes:** Accounts for scenarios with shared development sandboxes, adjusting prompts to prevent accidental overwrites. + +Advanced instructions are available in the [Create New User Story documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-create-new-task/). + +
+Technical explanations + +The command's logic orchestrates various underlying processes: + +- **Git Operations:** Utilizes `checkGitClean`, `ensureGitBranch`, `gitCheckOutRemote`, and `git().pull()` to manage Git repository state and branches. +- **Interactive Prompts:** Leverages the `prompts` library to gather user input for User Story type, source types, and User Story names. +- **Configuration Management:** Reads and applies project-specific configurations from `.sfdx-hardis.yml` using `getConfig` and `setConfig- **Org Initialization Utilities:** Calls a suite of utility functions for org setup, including `initApexScripts`, `initOrgData`, `initOrgMetadatas`, `initPermissionSetAssignments`, `installPackages`, and `makeSureOrgIsConnected- **Salesforce CLI Interaction:** Executes Salesforce CLI commands (e.g., `sf config set target-org`, `sf org open`, `sf project delete tracking`) via `execCommand` and `execSfdxJson- **Dynamic Org Selection:** Presents choices for scratch orgs or sandboxes based on project configuration and existing orgs, dynamically calling `ScratchCreate.run` or `SandboxCreate.run` as needed. +- **WebSocket Communication:** Sends refresh status messages via `WebSocketClient.sendRefreshStatusMessage()` to update connected VS Code clients. +
## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------------|:-------:|:---------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetdevhubusername
-v | option | username or alias for the dev hub org; overrides default dev hub org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:----------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-dev-hub
-v | option | undefined | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:work:task:new +$ sf hardis:work:new ``` diff --git a/docs/hardis/work/refresh.md b/docs/hardis/work/refresh.md index e9f2c1452..fc7220fc2 100644 --- a/docs/hardis/work/refresh.md +++ b/docs/hardis/work/refresh.md @@ -1,27 +1,63 @@ - + # hardis:work:refresh ## Description -Make my local branch and my scratch org up to date with the most recent sources + +## Command Behavior + +**Refreshes your local Git branch and Salesforce org with the latest content from another Git branch.** + +This command is designed to help developers keep their local development environment synchronized with changes made by other team members. It automates the process of pulling updates from a designated branch, merging them into your current working branch, and then pushing those changes to your scratch org or source-tracked sandbox. + +Key functionalities: + +- **Pre-Merge Check:** Prompts the user to confirm that they have saved their current work before proceeding with the merge, preventing accidental data loss. +- **Branch Selection:** Allows you to select a target Git branch (e.g., `integration`, `preprod`) from which to pull updates. +- **Git Operations:** Performs a series of Git operations: + - Pulls the latest version of the selected merge branch. + - Stashes your uncommitted local changes before merging. + - Merges the selected branch into your current local branch. + - Handles merge conflicts interactively, prompting the user to resolve them. + - Restores your stashed changes after the merge. +- **Org Synchronization:** Pushes the updated local branch content to your scratch org or source-tracked sandbox, ensuring your org reflects the latest merged code. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves project configurations using `getConfig` to determine the default development branch. +- **Git Integration:** Extensively uses `simple-git` (`git()`) for various Git operations: + - `git().branch()`: Lists local and remote branches. + - `git().stash()`: Saves and restores uncommitted changes. + - `git().fetch()`: Fetches updates from remote repositories. + - `git().checkout()`: Switches between branches. + - `git().pull()`: Pulls changes from a remote branch. + - `git().merge()`: Merges one branch into another, handling conflicts. +- **Interactive Prompts:** Uses the `prompts` library to guide the user through confirmations (e.g., saving work) and branch selection. +- **Salesforce CLI Integration:** It uses `forceSourcePull` to pull changes from the scratch org and `forceSourcePush` to push changes to the scratch org. +- **Error Handling:** Includes robust error handling for Git operations (e.g., merge conflicts) and provides guidance to the user for resolution. +- **Environment Variable Check:** Checks for an `EXPERIMENTAL` environment variable to gate access to this command, indicating it might not be fully stable. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| nopull
-n | boolean | No scratch pull before save (careful if you use that!) | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| nopull
-n | boolean | No scratch pull before save (careful if you use that!) | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:work:refresh +$ sf hardis:work:refresh ``` diff --git a/docs/hardis/work/resetselection.md b/docs/hardis/work/resetselection.md index b166701cf..fe548a608 100644 --- a/docs/hardis/work/resetselection.md +++ b/docs/hardis/work/resetselection.md @@ -1,29 +1,54 @@ - + # hardis:work:resetselection ## Description -Resets the selection that we want to add in the merge request -Calls a soft git reset behind the hood +## Command Behavior + +**Resets the local Git repository to allow for a new selection of files to be included in a merge request.** + +This command is designed to be used when you need to re-evaluate which changes should be part of your next merge request. It performs a soft Git reset, effectively unstaging all committed changes since the last merge with the target branch, and then cleans up any generated files. + +Key functionalities: + +- **Target Branch Selection:** Prompts you to select the target branch of your current or future merge request. +- **Soft Git Reset:** Performs a `git reset --soft` operation to uncommit changes, moving the HEAD pointer back but keeping the changes in your working directory. +- **Generated File Cleanup:** Resets and checks out `manifest/package.xml` and `manifest/destructiveChanges.xml` to their state before the reset, ensuring a clean slate for new selections. +- **Force Push Authorization:** Sets a flag in your user configuration (`canForcePush: true`) to allow a force push in the subsequent `hardis:work:save` command, as the history will have been rewritten. + +
+Technical explanations + +The command's technical implementation involves: + +- **Git Integration:** Uses `simple-git` (`git()`) to interact with the Git repository: + - `git().branch()`: Retrieves information about local and remote branches. + - `git().log()`: Fetches the commit history to determine which commits to reset. + - `git().reset()`: Performs the soft reset operation. + - `git().checkout()`: Resets specific files (`package.xml`, `destructiveChanges.xml`) to their previous state. + - `git().status()`: Displays the current status of the Git repository after the reset. +- **Interactive Prompts:** Uses the `prompts` library to confirm the reset operation with the user and to select the target branch. +- **Configuration Management:** Updates the user's configuration (`.sfdx-hardis.yml`) using `setConfig` to set the `canForcePush` flag. +- **Error Handling:** Includes a check to prevent resetting protected branches. +
## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:work:resetsave +$ sf hardis:work:resetsave ``` diff --git a/docs/hardis/work/save.md b/docs/hardis/work/save.md index cc2b287e8..f8c361ab9 100644 --- a/docs/hardis/work/save.md +++ b/docs/hardis/work/save.md @@ -1,19 +1,28 @@ - + # hardis:work:save ## Description -When a work task is completed, guide user to create a merge request -Advanced instructions in [Publish a task](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/) +## Command Behavior -- Generate package-xml diff using sfdx-git-delta -- Automatically update `manifest/package.xml` and `manifest/destructiveChanges.xml` according to the committed updates -- Automatically Clean XML files using `.sfdx-hardis.yml` properties - - `autocleantypes`: List of auto-performed sources cleanings, available on command [hardis:project:clean:references](https://sfdx-hardis.cloudity.com/hardis/project/clean/references/) - - `autoRemoveUserPermissions`: List of userPermission to automatically remove from profile metadatas +**Guides the user through the process of saving their work, preparing it for a Merge Request (also named Pull Request), and pushing changes to the remote Git repository.** -Example: +This command automates several critical steps involved in finalizing a development User Story and integrating it into the main codebase. It ensures that your local changes are properly synchronized, cleaned, and committed before being pushed. + +Key functionalities include: + +- **Git Status Management:** Ensures a clean Git working directory by handling ongoing merges and unstaging files. +- **Org Synchronization (Optional):** Prompts the user to pull the latest metadata updates from their scratch org or source-tracked sandbox, ensuring local files reflect the org's state. +- **Package.xml Updates:** Automatically generates `package.xml` and `destructiveChanges.xml` files based on the Git delta between your current branch and the target branch, reflecting added, modified, and deleted metadata. +- **Automated Source Cleaning:** Applies predefined cleaning operations to your local Salesforce sources, such as removing unwanted references, minimizing profiles, or cleaning XML files based on configurations in your `.sfdx-hardis.yml`. + - `autoCleanTypes`: A list of automated source cleanings, configurable via [hardis:project:clean:references](${CONSTANTS.DOC_URL_ROOT}/hardis/project/clean/references/). + - `autoRemoveUserPermissions`: A list of user permissions to automatically remove from profile metadata. +- **Deployment Plan Generation:** Builds an automated deployment plan based on the updated `package.xml` and configured deployment splits. +- **Commit and Push:** Guides the user to commit the changes and push them to the remote Git repository, optionally handling force pushes if a branch reset occurred. +- **Merge Request Guidance:** Provides information and links to facilitate the creation of a merge request after the changes are pushed. + +Example `.sfdx-hardis.yml` configuration: ```yaml autoCleanTypes: @@ -34,34 +43,49 @@ autoRemoveUserPermissions: - WorkCalibrationUser ``` -- Push commit to server - +Advanced instructions are available in the [Publish a User Story documentation](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-publish-task/). + +
+Technical explanations + +The command's technical implementation involves a series of orchestrated steps: + +- **Git Integration:** Extensively uses the `git` utility for status checks, adding files, committing, and pushing. It also leverages `sfdx-git-delta` for generating metadata differences between Git revisions. +- **Interactive Prompts:** Employs the `prompts` library to interact with the user for decisions like pulling sources or pushing commits. +- **Configuration Management:** Reads and updates project and user configurations using `getConfig` and `setConfig` to store preferences and deployment plans. +- **Metadata Synchronization:** Calls `forceSourcePull` to retrieve metadata from the org and `callSfdxGitDelta` to generate `package.xml` and `destructiveChanges.xml` based on Git changes. +- **XML Manipulation:** Utilizes `appendPackageXmlFilesContent`, `removePackageXmlFilesContent`, `parseXmlFile`, and `writeXmlFile` for modifying `package.xml` and `destructiveChanges.xml` files. +- **Automated Cleaning:** Integrates with `CleanReferences.run` and `CleanXml.run` commands to perform automated cleaning operations on the Salesforce source files. +- **Deployment Plan Building:** Dynamically constructs a deployment plan by analyzing the `package.xml` content and applying configured deployment splits. +- **WebSocket Communication:** Uses `WebSocketClient.sendRefreshStatusMessage` to notify connected VS Code clients about status updates. +- **External Tool Integration:** Requires the `sfdx-git-delta` plugin to be installed for its core functionality. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:----------------------|:-------:|:--------------------------------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| apiversion | option | override the api version used for api requests made by this command | | | | -| auto | boolean | No user prompts (when called from CI for example) | | | | -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| noclean
-c | boolean | No cleaning of local sources | | | | -| nogit
-g | boolean | No automated git operations | | | | -| nopull
-n | boolean | No scratch pull before save | | | | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| targetbranch | option | Name of the Merge Request target branch. Will be guessed or prompted if not provided. | | | | -| targetusername
-u | option | username or alias for the target org; overrides default target org | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:------------------|:-------:|:--------------------------------------------------------------------------------------|:-------:|:--------:|:-------:| +| auto | boolean | No user prompts (when called from CI for example) | | | | +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| noclean
-c | boolean | No cleaning of local sources | | | | +| nogit
-g | boolean | No automated git operations | | | | +| nopull
-n | boolean | No scratch pull before save | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| target-org
-o | option | undefined | | | | +| targetbranch | option | Name of the Merge Request target branch. Will be guessed or prompted if not provided. | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:work:task:save +$ sf hardis:work:task:save ``` ```shell -sfdx hardis:work:task:save --nopull --nogit --noclean +$ sf hardis:work:task:save --nopull --nogit --noclean ``` diff --git a/docs/hardis/work/ws.md b/docs/hardis/work/ws.md index f12127e25..d46f3a3d0 100644 --- a/docs/hardis/work/ws.md +++ b/docs/hardis/work/ws.md @@ -1,25 +1,46 @@ - + # hardis:work:ws ## Description -Technical calls to WebSocket functions + +## Command Behavior + +**Performs technical operations related to WebSocket communication, primarily for internal use by the sfdx-hardis VS Code Extension.** + +This command is not intended for direct end-user interaction. It facilitates communication between the sfdx-hardis CLI and the VS Code Extension, enabling features like real-time status updates and plugin refreshes. + +Key functionalities: + +- **Refresh Status (`--event refreshStatus`):** Sends a message to the VS Code Extension to refresh its displayed status, ensuring that the UI reflects the latest state of Salesforce orgs or project activities. +- **Refresh Plugins (`--event refreshPlugins`):** Sends a message to the VS Code Extension to refresh its loaded plugins, useful after installing or updating sfdx-hardis or other related extensions. + +
+Technical explanations + +The command's technical implementation involves: + +- **WebSocketClient:** It utilizes the `WebSocketClient` utility to establish and manage WebSocket connections. +- **Event-Driven Communication:** It listens for specific events (e.g., `refreshStatus`, `refreshPlugins`) and triggers corresponding actions on the connected WebSocket client. +- **Internal Use:** This command is primarily called programmatically by the VS Code Extension to maintain synchronization and provide a seamless user experience. +
+ ## Parameters -| Name | Type | Description | Default | Required | Options | -|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-----------------------------------------------------:| -| debug
-d | boolean | Activate debug mode (more logs) | | | | -| event
-e | option | WebSocket event | | | | -| json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | -| skipauth | boolean | Skip authentication check when a default username is required | | | | -| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | +| Name | Type | Description | Default | Required | Options | +|:-------------|:-------:|:--------------------------------------------------------------|:-------:|:--------:|:-------:| +| debug
-d | boolean | Activate debug mode (more logs) | | | | +| event
-e | option | WebSocket event | | | | +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| skipauth | boolean | Skip authentication check when a default username is required | | | | +| websocket | option | Websocket host:port for VsCode SFDX Hardis UI integration | | | | ## Examples ```shell -sfdx hardis:work:ws --event refreshStatus +$ sf hardis:work:ws --event refreshStatus ``` diff --git a/docs/hello/world.md b/docs/hello/world.md new file mode 100644 index 000000000..2bc7b3cc8 --- /dev/null +++ b/docs/hello/world.md @@ -0,0 +1,49 @@ + +# hello:world + +## Description + + +## Command Behavior + +**Says hello to the world or a specified person.** + +This is a simple command used for demonstration purposes. It outputs a greeting message to the console. + +Key functionalities: + +- **Customizable Greeting:** You can specify a name using the `--name` flag to personalize the greeting. +- **Timestamp:** The greeting includes the current date. + +
+Technical explanations + +The command's technical implementation involves: + +- **Flag Parsing:** It parses the `--name` flag to get the recipient of the greeting. +- **Date Retrieval:** It gets the current date using `new Date().toDateString()`. +- **Console Output:** It constructs the greeting message using the provided name and the current date, and then logs it to the console using `this.log()`. +
+ + +## Parameters + +| Name | Type | Description | Default | Required | Options | +|:------------|:-------:|:----------------------------------------|:-------:|:--------:|:-------:| +| flags-dir | option | undefined | | | | +| json | boolean | Format output as json. | | | | +| name
-n | option | This person can be anyone in the world! | World | | | + +## Examples + +```shell +Say hello to the world: +<%= config.bin %> <%= command.id %> +``` + +```shell +Say hello to someone you know: +<%= config.bin %> <%= command.id %> --name Astro +``` + + diff --git a/docs/index.md b/docs/index.md index e1f8d9a3e..a3a287d70 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,10 +1,9 @@ - + -[![sfdx-hardis by Cloudity Banner](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/sfdx-hardis-banner.png)](https://sfdx-hardis.cloudity.com) -# sfdx-hardis +[![sfdx-hardis by Cloudity Banner](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/sfdx-hardis-banner.png)](https://sfdx-hardis.cloudity.com) -[_Presented at Dreamforce 23!_](https://reg.salesforce.com/flow/plus/df23/sessioncatalog/page/catalog/session/1684196389783001OqEl) +_Presented at_ [_Dreamforce 23_](https://reg.salesforce.com/flow/plus/df23/sessioncatalog/page/catalog/session/1684196389783001OqEl) _and [_Dreamforce 24!_](https://reg.salesforce.com/flow/plus/df24/sessioncatalog/page/catalog/session/1718915808069001Q7HH)_ [![Version](https://img.shields.io/npm/v/sfdx-hardis.svg)](https://npmjs.org/package/sfdx-hardis) [![Downloads/week](https://img.shields.io/npm/dw/sfdx-hardis.svg)](https://npmjs.org/package/sfdx-hardis) @@ -17,35 +16,44 @@ [![License](https://img.shields.io/npm/l/sfdx-hardis.svg)](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/package.json) [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](http://makeapullrequest.com) -Toolbox for Salesforce DX, by [Cloudity](https://cloudity.com/), natively compliant with most platforms and tools +Sfdx-hardis is a Salesforce CLI Plugin, by [**Cloudity**](https://cloudity.com/) & friends, natively compliant with most Git platforms, messaging tools, ticketing systems and AI providers. ![Native Integrations](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/integrations.png) It will allow you to: - Do with simple commands what could be done manually in minutes/hours -- [Define a complete CI/CD Pipeline for your Salesforce project](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) -- [Backup Metadatas and monitor any Salesforce org](https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/) +- [Define a **ready to use CI/CD Pipeline** for your Salesforce project](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) +- [**Backup Metadatas** and **monitor any Salesforce org**](https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/) +- [Generate your **project documentation**](https://sfdx-hardis.cloudity.com/salesforce-project-documentation/), including AI-generated description and Flow Visual History -[**Please see the full list of commands in Online documentation**](https://sfdx-hardis.cloudity.com) +[_Please see the full list of commands in Online documentation_](https://sfdx-hardis.cloudity.com) -**sfdx-hardis** commands are also available with UI in [**SFDX Hardis Visual Studio Code Extension**](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) +___ + +**sfdx-hardis** commands and configuration are best used with an UI in [**SFDX Hardis Visual Studio Code Extension**](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) [![VsCode SFDX Hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/extension-demo.gif)](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) +___ + _See Dreamforce presentation_ -[![See Dreamforce presentation](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs) +[![See Dreamforce presentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/play-dreamforce-session.png)](https://www.youtube.com/watch?v=o0Mm9F07UFs) ## Installation + + ### With IDE You can install [Visual Studio Code](https://code.visualstudio.com/) extension [VsCode SFDX Hardis](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) Once installed, click on ![Hardis Group button](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/hardis-button.jpg) in VsCode left bar, and follow the additional installation instructions -[![Installation tutorial](https://img.youtube.com/vi/LA8m-t7CjHA/0.jpg)](https://www.youtube.com/watch?v=LA8m-t7CjHA) +[![Installation tutorial](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/play-install-tuto.png)](https://www.youtube.com/watch?v=LA8m-t7CjHA) + +___ ### As SFDX Plugin @@ -57,26 +65,27 @@ Once installed, click on ![Hardis Group button](https://github.com/hardisgroupco #### Plugin installation ```sh-session -sfdx plugins:install sfdx-hardis +sf plugins install sfdx-hardis ``` For advanced use, please also install dependencies ```sh-session sf plugins install @salesforce/plugin-packaging -sfdx plugins:install sfdmu -sfdx plugins:install sfdx-git-delta -sfdx plugins:install sfdx-essentials -sfdx plugins:install texei-sfdx-plugin +sf plugins install sfdx-git-delta +sf plugins install sfdmu ``` -If you are using CI/CD scripts, use `echo y | sfdx plugins:install ...` to bypass prompt. +If you are using CI/CD scripts, use `echo y | sf plugins install ...` to bypass prompt. + +___ ### Docker You can use sfdx-hardis docker images to run in CI - Docker Hub + - [**hardisgroupcom/sfdx-hardis:latest**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with latest @salesforce/cli version) - [**hardisgroupcom/sfdx-hardis:latest-sfdx-recommended**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) @@ -86,18 +95,85 @@ You can use sfdx-hardis docker images to run in CI _See [Dockerfile](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/Dockerfile)_ + + ## Usage ```sh-session -sfdx hardis: +sf hardis: ``` -## Articles +## Events + + + +### London's Calling '25, London + +Auto-generate your SF project Documentation site with open-source and Agentforce + +![image](https://github.com/user-attachments/assets/9b99120c-b660-4f67-b734-793148ac9d00) + +### Czech Dreamin '25, Prague + +Auto-generate your SF project Documentation site with open-source and Agentforce, with [Mariia Pyvovarchuk](https://www.linkedin.com/in/mpyvo/) + +![Czech Dreamin 2025](https://github.com/user-attachments/assets/fa7b7f12-6d6a-437c-badd-20a626bb2163) + +### Trailblazer Admin Group '25, Lyon + +Techs for Admins: Afterwork Salesforce Inspector Reloaded & sfdx-hardis, with Thomas Prouvot + +![](https://github.com/user-attachments/assets/90621fe0-6527-4a34-8a0b-c14bd6d21cbd) + +### Dreamforce 2024, San Francisco + +[Save the Day by Monitoring Your Org with Open-Source Tools](https://reg.salesforce.com/flow/plus/df24/sessioncatalog/page/catalog/session/1718915808069001Q7HH), with Olga Shirikova + +[![Dreamforce 2024 Video](https://img.youtube.com/vi/NxiLiYeo11A/0.jpg)](https://www.youtube.com/watch?v=NxiLiYeo11A) + +### Wir Sind Ohana '24, Berlin + +Automate the Monitoring of your Salesforce orgs with open-source tools only!, with Yosra Saidani + +[![Wir Sind Ohana Video](https://img.youtube.com/vi/xGbT6at7RZ0/0.jpg)](https://www.youtube.com/watch?v=xGbT6at7RZ0) + +### Polish Dreamin '24, Wroclaw, Poland + +[Easy and complete Salesforce CI/CD with open-source only!](https://coffeeforce.pl/dreamin/speaker/nicolas-vuillamy/), with Wojciech Suwiński + +![Polish Dreamin 2024](https://github.com/nvuillam/nvuillam/assets/17500430/e843cc08-bf8a-452d-b7f0-c64a314f1b60) + +### French Touch Dreamin '23, Paris + +[Automate the Monitoring of your Salesforce orgs with open-source tools only!](https://frenchtouchdreamin.com/index.php/schedule/), with Maxime Guenego + +![French Touch Dreamin 2023](https://github.com/nvuillam/nvuillam/assets/17500430/8a2e1bbf-3402-4929-966d-5f99cb13cd29) + +### Dreamforce 2023, San Francisco + +[Easy Salesforce CI/CD with open-source and clicks only thanks to sfdx-hardis!](https://reg.salesforce.com/flow/plus/df23/sessioncatalog/page/catalog/session/1684196389783001OqEl), with Jean-Pierre Rizzi + +[![Dreamforce 2023 Video](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs) + +### Yeur Dreamin' 2023, Brussels + +An easy and complete Salesforce CI/CD release management with open-source only !, with Angélique Picoreau + +[![image](https://github.com/nvuillam/nvuillam/assets/17500430/6470df20-7449-444b-a0a5-7dc22f5f6188)](https://www.linkedin.com/posts/nicolas-vuillamy_cicd-opensource-trailblazercommunity-activity-7076859027321704448-F1g-?utm_source=share&utm_medium=member_desktop) + + + +## Articles & Videos + + + +### Web Articles Here are some articles about [sfdx-hardis](https://sfdx-hardis.cloudity.com/) - English +[![Conga Deployment Cheat Sheet](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-conga-banner.jpg)](https://nicolas.vuillamy.fr/how-to-deploy-conga-composer-configuration-using-salesforce-cli-plugins-c2899641f36b) [![Questions/Answers](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-questions-answers.jpg)](https://nicolas.vuillamy.fr/what-devops-experts-want-to-know-about-salesforce-ci-cd-with-sfdx-hardis-q-a-1f412db34476) [![Salesforce Developers Podcast](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-sfdev.jpg)](https://developer.salesforce.com/podcast/2023/06/sfdx) [![sfdx-hardis: A release management tool for open-source](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-cicd-salesforcedevopsnet.jpg)](https://salesforcedevops.net/index.php/2023/03/01/sfdx-hardis-open-source-salesforce-release-management/) @@ -114,9 +190,115 @@ Here are some articles about [sfdx-hardis](https://sfdx-hardis.cloudity.com/) - [Exporter en masse les fichiers d’une org Salesforce](https://leblog.hardis-group.com/portfolio/exporter-en-masse-les-fichiers-dune-org-salesforce/) - [Suspendre l’accès aux utilisateurs lors d’une mise en production Salesforce](https://leblog.hardis-group.com/portfolio/suspendre-lacces-aux-utilisateurs-lors-dune-mise-en-production-salesforce/) +### Recorded Conferences + +#### Dreamforce Sessions + +- Dreamforce 2024 - Save the Day by Monitoring Your Org with Open-Source Tools (with Olga Shirikova) + +[![Dreamforce 2024: Save the Day by Monitoring Your Org with Open-Source Tools](https://img.youtube.com/vi/NxiLiYeo11A/0.jpg)](https://www.youtube.com/watch?v=NxiLiYeo11A){target=blank} + +- Dreamforce 2023 - Easy Salesforce CI/CD with open-source and clicks only thanks to sfdx-hardis! (with Jean-Pierre Rizzi) + +[![Dreamforce 2023: Easy Salesforce CI/CD with open-source](https://img.youtube.com/vi/o0Mm9F07UFs/0.jpg)](https://www.youtube.com/watch?v=o0Mm9F07UFs){target=blank} + +#### Community Events + +- Wir Sind Ohana 2024 - Automate the Monitoring of your Salesforce orgs with open-source tools only! (with Yosra Saidani) + +[![Wir Sind Ohana 2024: Automate Monitoring with Open-Source](https://img.youtube.com/vi/xGbT6at7RZ0/0.jpg)](https://www.youtube.com/watch?v=xGbT6at7RZ0){target=blank} + +### Podcasts + +- Apex Hours 2025 - Org monitoring with Grafana + AI generated doc + +[![Apex Hours 2025: Org monitoring with Grafana + AI generated doc](https://img.youtube.com/vi/oDaCh66pRcI/0.jpg)](https://www.youtube.com/watch?v=oDaCh66pRcI){target=blank} + +- Salesforce Way Podcast #102 - Sfdx-hardis with Nicolas Vuillamy + +[![Salesforce Way Podcast: Sfdx-hardis](https://img.youtube.com/vi/sfdx-hardis/0.jpg)](https://salesforceway.com/podcast/sfdx-hardis/){target=blank} + +- Salesforce Developers Podcast Episode 182: SFDX-Hardis with Nicolas Vuillamy + +[![Salesforce Developers Podcast](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-sfdev.jpg)](https://developer.salesforce.com/podcast/2023/06/sfdx){target=blank} + +### sfdx-hardis Usage + +#### Features Overview + +- sfdx-hardis 2025 new features overview + +[![sfdx-hardis 2025 new features](https://img.youtube.com/vi/JRKH5COUVQ0/0.jpg)](https://youtu.be/JRKH5COUVQ0){target=blank} + +- SFDX-HARDIS – A demo with Nicolas Vuillamy from Cloudity + +[![SalesforceDevOps.net Demo](https://img.youtube.com/vi/qP6MaZUGzik/0.jpg)](https://www.youtube.com/watch?v=qP6MaZUGzik){target=blank} + +#### Installation & Setup + +- Complete installation tutorial for sfdx-hardis - [📖 Documentation](https://sfdx-hardis.cloudity.com/installation/) + +[![Installation Tutorial](https://img.youtube.com/vi/LA8m-t7CjHA/0.jpg)](https://www.youtube.com/watch?v=LA8m-t7CjHA){target=blank} + +#### CI/CD Workflows + +- Complete CI/CD workflow for Salesforce projects - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) + +[![Dreamforce demo video: Easy Salesforce CI/CD with sfdx-hardis and open-source only !](https://img.youtube.com/vi/zEYqTd2txU4/0.jpg)](https://www.youtube.com/watch?v=zEYqTd2txU4){target=blank} + +- How to start a new User Story in sandbox - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-create-new-task/) + +[![Create New User Story](https://img.youtube.com/vi/WOqssZwjPhw/0.jpg)](https://www.youtube.com/watch?v=WOqssZwjPhw){target=blank} + +- How to commit updates and create merge requests - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/) + +[![Publish User Story Tutorial](https://img.youtube.com/vi/Ik6whtflmfY/0.jpg)](https://www.youtube.com/watch?v=Ik6whtflmfY){target=blank} + +- How to resolve git merge conflicts in Visual Studio Code - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-validate-merge-request/) + +[![Merge Conflicts Resolution](https://img.youtube.com/vi/lz5OuKzvadQ/0.jpg)](https://www.youtube.com/watch?v=lz5OuKzvadQ){target=blank} + +- How to install packages in your org - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-work-on-task-install-packages/) + +[![Install Packages Tutorial](https://img.youtube.com/vi/5-MgqoSLUls/0.jpg)](https://www.youtube.com/watch?v=5-MgqoSLUls){target=blank} + +- Configure CI server authentication to Salesforce orgs - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/) + +[![Configure CI Authentication](https://img.youtube.com/vi/OzREUu5utVI/0.jpg)](https://www.youtube.com/watch?v=OzREUu5utVI){target=blank} + +#### Monitoring + +- How to configure monitoring for your Salesforce org - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-monitoring-config-home/) + +[![Org Monitoring Setup](https://img.youtube.com/vi/bcVdN0XItSc/0.jpg)](https://www.youtube.com/watch?v=bcVdN0XItSc){target=blank} + +#### Integrations + +- Configure Slack integration for deployment notifications - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-slack/) + +[![Slack Integration](https://img.youtube.com/vi/se292ABGUmI/0.jpg)](https://www.youtube.com/watch?v=se292ABGUmI){target=blank} + +- How to create a Personal Access Token in GitLab - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-clone-repository/) + +[![GitLab Personal Access Token](https://img.youtube.com/vi/9y5VmmYHuIg/0.jpg)](https://www.youtube.com/watch?v=9y5VmmYHuIg){target=blank} + +#### Documentation + +- How to generate AI-enhanced Salesforce project documentation - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-project-doc-generate/) + +[![Generate Project Documentation](https://img.youtube.com/vi/ZrVPN3jp1Ac/0.jpg)](https://www.youtube.com/watch?v=ZrVPN3jp1Ac){target=blank} + +- Host your documentation on Cloudflare free tier - [📖 Documentation](https://sfdx-hardis.cloudity.com/salesforce-project-doc-cloudflare/) + +[![Cloudflare Doc Hosting Setup](https://img.youtube.com/vi/AUipbKjgsDI/0.jpg)](https://www.youtube.com/watch?v=AUipbKjgsDI){target=blank} + + + ## Contributing -Anyone is welcome to contribute to this sfdx-hardis + + +Everyone is welcome to contribute to sfdx-hardis (even juniors: we'll assist you !) - Install Node.js ([recommended version](https://nodejs.org/en/)) - Install typescript by running `npm install typescript --global` @@ -127,23 +309,43 @@ Anyone is welcome to contribute to this sfdx-hardis - Run `yarn` to install dependencies - Run `sf plugins link` to link the local sfdx-hardis to SFDX CLI - Run `tsc --watch` to transpile typescript into js everytime you update a TS file -- Debug commands using `NODE_OPTIONS=--inspect-brk sfdx hardis:somecommand -someparameter somevalue` +- Debug commands using `NODE_OPTIONS=--inspect-brk sf hardis:somecommand -someparameter somevalue` + + ## Dependencies **sfdx-hardis** partially relies on the following SFDX Open-Source packages - [Salesforce Data Move Utility](https://github.com/forcedotcom/SFDX-Data-Move-Utility) -- [SFDX Essentials](https://github.com/nvuillam/sfdx-essentials) - [SFDX Git Delta](https://github.com/scolladon/sfdx-git-delta) -- [Texei Sfdx Plugin](https://github.com/texei/texei-sfdx-plugin) ## Contributors + + +### Organization + +sfdx-hardis is primarily led by Nicolas Vuillamy & [Cloudity](https://www.cloudity.com/), but has many external contributors that we cant thank enough ! + +### Pull Requests Authors + +### Special Thanks + +- [Roman Hentschke](https://www.linkedin.com/in/derroman/), for building the BitBucket CI/CD integration +- [Leo Jokinen](https://www.linkedin.com/in/leojokinen/), for building the GitHub CI/CD integration +- [Mariia Pyvovarchuk](https://www.linkedin.com/in/mpyvo/), for her work about generating automations documentation +- [Matheus Delazeri](https://www.linkedin.com/in/matheus-delazeri-souza/), for the PDF output of documentation +- [Taha Basri](https://www.linkedin.com/in/tahabasri/), for his work about generating documentation of LWC +- [Anush Poudel](https://www.linkedin.com/in/anushpoudel/), for integrating sfdx-hardis with multiple LLMs using langchainJs +- [Sebastien Colladon](https://www.linkedin.com/in/sebastien-colladon/), for providing sfdx-git-delta which is highly used within sfdx-hardis + + + ## Commands @@ -152,155 +354,211 @@ Anyone is welcome to contribute to this sfdx-hardis | Command | Title | |:----------------------------------------------|:------| -| [**hardis:auth:login**](hardis/auth/login.md) | Login | +| [**hardis:auth:login**](hardis/auth/login.md) | | ### hardis:cache -| Command | Title | -|:------------------------------------------------|:------------------------| -| [**hardis:cache:clear**](hardis/cache/clear.md) | Clear sfdx-hardis cache | +| Command | Title | +|:------------------------------------------------|:------| +| [**hardis:cache:clear**](hardis/cache/clear.md) | | ### hardis:config -| Command | Title | -|:----------------------------------------------|:-------------------------------| -| [**hardis:config:get**](hardis/config/get.md) | Deploy metadata sources to org | +| Command | Title | +|:----------------------------------------------|:------| +| [**hardis:config:get**](hardis/config/get.md) | | + +### hardis:deploy + +| Command | Title | +|:--------------------------------------------------------|:------| +| [**hardis:deploy:quick**](hardis/deploy/quick.md) | | +| [**hardis:deploy:start**](hardis/deploy/start.md) | | +| [**hardis:deploy:validate**](hardis/deploy/validate.md) | | ### hardis:doc -| Command | Title | -|:----------------------------------------------------------------------------|:-----------------------------------| -| [**hardis:doc:extract:permsetgroups**](hardis/doc/extract/permsetgroups.md) | Generate project documentation | -| [**hardis:doc:plugin:generate**](hardis/doc/plugin/generate.md) | Generate SFDX Plugin Documentation | +| Command | Title | +|:----------------------------------------------------------------------------|:------| +| [**hardis:doc:extract:permsetgroups**](hardis/doc/extract/permsetgroups.md) | | +| [**hardis:doc:fieldusage**](hardis/doc/fieldusage.md) | | +| [**hardis:doc:flow2markdown**](hardis/doc/flow2markdown.md) | | +| [**hardis:doc:mkdocs-to-cf**](hardis/doc/mkdocs-to-cf.md) | | +| [**hardis:doc:mkdocs-to-salesforce**](hardis/doc/mkdocs-to-salesforce.md) | | +| [**hardis:doc:override-prompts**](hardis/doc/override-prompts.md) | | +| [**hardis:doc:packagexml2markdown**](hardis/doc/packagexml2markdown.md) | | +| [**hardis:doc:plugin:generate**](hardis/doc/plugin/generate.md) | | +| [**hardis:doc:project2markdown**](hardis/doc/project2markdown.md) | | + +### hardis:git + +| Command | Title | +|:----------------------------------------------------------------------------|:------| +| [**hardis:git:pull-requests:extract**](hardis/git/pull-requests/extract.md) | | ### hardis:lint -| Command | Title | -|:----------------------------------------------------------------------|:-------------------------------------------| -| [**hardis:lint:access**](hardis/lint/access.md) | check permission access | -| [**hardis:lint:metadatastatus**](hardis/lint/metadatastatus.md) | check inactive metadatas | -| [**hardis:lint:missingattributes**](hardis/lint/missingattributes.md) | check missing description on custom fields | -| [**hardis:lint:unusedmetadatas**](hardis/lint/unusedmetadatas.md) | check unused labels and custom permissions | +| Command | Title | +|:----------------------------------------------------------------------|:------| +| [**hardis:lint:access**](hardis/lint/access.md) | | +| [**hardis:lint:metadatastatus**](hardis/lint/metadatastatus.md) | | +| [**hardis:lint:missingattributes**](hardis/lint/missingattributes.md) | | +| [**hardis:lint:unusedmetadatas**](hardis/lint/unusedmetadatas.md) | | ### hardis:mdapi -| Command | Title | -|:--------------------------------------------------|:-----------------------------------------------------------------------------------------------| -| [**hardis:mdapi:deploy**](hardis/mdapi/deploy.md) | sfdx-hardis wrapper for sfdx force:mdapi:deploy that displays tips to solve deployment errors. | +| Command | Title | +|:--------------------------------------------------|:------| +| [**hardis:mdapi:deploy**](hardis/mdapi/deploy.md) | | ### hardis:misc -| Command | Title | -|:----------------------------------------------------|:------------| -| [**hardis:misc:toml2csv**](hardis/misc/toml2csv.md) | TOML to CSV | +| Command | Title | +|:--------------------------------------------------------------------------------------|:------| +| [**hardis:misc:custom-label-translations**](hardis/misc/custom-label-translations.md) | | +| [**hardis:misc:purge-references**](hardis/misc/purge-references.md) | | +| [**hardis:misc:servicenow-report**](hardis/misc/servicenow-report.md) | | +| [**hardis:misc:toml2csv**](hardis/misc/toml2csv.md) | | ### hardis:org -| Command | Title | -|:--------------------------------------------------------------------------------------|:------------------------------------------------------| -| [**hardis:org:configure:data**](hardis/org/configure/data.md) | Configure Data project | -| [**hardis:org:configure:files**](hardis/org/configure/files.md) | Configure File export project | -| [**hardis:org:configure:monitoring**](hardis/org/configure/monitoring.md) | Configure org monitoring | -| [**hardis:org:connect**](hardis/org/connect.md) | Connect to an org | -| [**hardis:org:create**](hardis/org/create.md) | Create sandbox org | -| [**hardis:org:data:delete**](hardis/org/data/delete.md) | Delete data | -| [**hardis:org:data:export**](hardis/org/data/export.md) | Export data | -| [**hardis:org:data:import**](hardis/org/data/import.md) | Import data | -| [**hardis:org:diagnose:audittrail**](hardis/org/diagnose/audittrail.md) | Diagnose content of Setup Audit Trail | -| [**hardis:org:diagnose:legacyapi**](hardis/org/diagnose/legacyapi.md) | Check for legacy API use | -| [**hardis:org:diagnose:licenses**](hardis/org/diagnose/licenses.md) | List licenses subscribed and used in a Salesforce org | -| [**hardis:org:diagnose:unusedlicenses**](hardis/org/diagnose/unusedlicenses.md) | Detect unused Permission Set Licenses | -| [**hardis:org:diagnose:unusedusers**](hardis/org/diagnose/unusedusers.md) | Detect unused Users in Salesforce | -| [**hardis:org:files:export**](hardis/org/files/export.md) | Export files | -| [**hardis:org:fix:listviewmine**](hardis/org/fix/listviewmine.md) | Fix listviews with | -| [**hardis:org:generate:packagexmlfull**](hardis/org/generate/packagexmlfull.md) | Generate Full Org package.xml | -| [**hardis:org:monitor:all**](hardis/org/monitor/all.md) | Monitor org | -| [**hardis:org:monitor:backup**](hardis/org/monitor/backup.md) | Backup DX sources | -| [**hardis:org:monitor:limits**](hardis/org/monitor/limits.md) | Check org limits | -| [**hardis:org:purge:apexlog**](hardis/org/purge/apexlog.md) | Purge Apex Logs | -| [**hardis:org:purge:flow**](hardis/org/purge/flow.md) | Purge Flow versions | -| [**hardis:org:retrieve:packageconfig**](hardis/org/retrieve/packageconfig.md) | Retrieve package configuration from an org | -| [**hardis:org:retrieve:sources:analytics**](hardis/org/retrieve/sources/analytics.md) | Retrieve CRM Analytics configuration from an org | -| [**hardis:org:retrieve:sources:dx**](hardis/org/retrieve/sources/dx.md) | Retrieve sfdx sources from org | -| [**hardis:org:retrieve:sources:dx2**](hardis/org/retrieve/sources/dx2.md) | Retrieve sfdx sources from org (2) | -| [**hardis:org:retrieve:sources:metadata**](hardis/org/retrieve/sources/metadata.md) | Retrieve sfdx sources from org | -| [**hardis:org:retrieve:sources:retrofit**](hardis/org/retrieve/sources/retrofit.md) | Retrofit changes from an org | -| [**hardis:org:select**](hardis/org/select.md) | Select org | -| [**hardis:org:test:apex**](hardis/org/test/apex.md) | Run apex tests | -| [**hardis:org:user:activateinvalid**](hardis/org/user/activateinvalid.md) | Reactivate sandbox invalid users | -| [**hardis:org:user:freeze**](hardis/org/user/freeze.md) | Freeze user logins | -| [**hardis:org:user:unfreeze**](hardis/org/user/unfreeze.md) | Unfreeze user logins | +| Command | Title | +|:----------------------------------------------------------------------------------------------|:------| +| [**hardis:org:community:update**](hardis/org/community/update.md) | | +| [**hardis:org:configure:data**](hardis/org/configure/data.md) | | +| [**hardis:org:configure:files**](hardis/org/configure/files.md) | | +| [**hardis:org:configure:monitoring**](hardis/org/configure/monitoring.md) | | +| [**hardis:org:connect**](hardis/org/connect.md) | | +| [**hardis:org:create**](hardis/org/create.md) | | +| [**hardis:org:data:delete**](hardis/org/data/delete.md) | | +| [**hardis:org:data:export**](hardis/org/data/export.md) | | +| [**hardis:org:data:import**](hardis/org/data/import.md) | | +| [**hardis:org:diagnose:audittrail**](hardis/org/diagnose/audittrail.md) | | +| [**hardis:org:diagnose:instanceupgrade**](hardis/org/diagnose/instanceupgrade.md) | | +| [**hardis:org:diagnose:legacyapi**](hardis/org/diagnose/legacyapi.md) | | +| [**hardis:org:diagnose:licenses**](hardis/org/diagnose/licenses.md) | | +| [**hardis:org:diagnose:releaseupdates**](hardis/org/diagnose/releaseupdates.md) | | +| [**hardis:org:diagnose:unused-apex-classes**](hardis/org/diagnose/unused-apex-classes.md) | | +| [**hardis:org:diagnose:unused-connected-apps**](hardis/org/diagnose/unused-connected-apps.md) | | +| [**hardis:org:diagnose:unusedlicenses**](hardis/org/diagnose/unusedlicenses.md) | | +| [**hardis:org:diagnose:unusedusers**](hardis/org/diagnose/unusedusers.md) | | +| [**hardis:org:files:export**](hardis/org/files/export.md) | | +| [**hardis:org:files:import**](hardis/org/files/import.md) | | +| [**hardis:org:fix:listviewmine**](hardis/org/fix/listviewmine.md) | | +| [**hardis:org:generate:packagexmlfull**](hardis/org/generate/packagexmlfull.md) | | +| [**hardis:org:monitor:all**](hardis/org/monitor/all.md) | | +| [**hardis:org:monitor:backup**](hardis/org/monitor/backup.md) | | +| [**hardis:org:monitor:limits**](hardis/org/monitor/limits.md) | | +| [**hardis:org:multi-org-query**](hardis/org/multi-org-query.md) | | +| [**hardis:org:purge:apexlog**](hardis/org/purge/apexlog.md) | | +| [**hardis:org:purge:flow**](hardis/org/purge/flow.md) | | +| [**hardis:org:refresh:after-refresh**](hardis/org/refresh/after-refresh.md) | | +| [**hardis:org:refresh:before-refresh**](hardis/org/refresh/before-refresh.md) | | +| [**hardis:org:retrieve:packageconfig**](hardis/org/retrieve/packageconfig.md) | | +| [**hardis:org:retrieve:sources:analytics**](hardis/org/retrieve/sources/analytics.md) | | +| [**hardis:org:retrieve:sources:dx**](hardis/org/retrieve/sources/dx.md) | | +| [**hardis:org:retrieve:sources:dx2**](hardis/org/retrieve/sources/dx2.md) | | +| [**hardis:org:retrieve:sources:metadata**](hardis/org/retrieve/sources/metadata.md) | | +| [**hardis:org:retrieve:sources:retrofit**](hardis/org/retrieve/sources/retrofit.md) | | +| [**hardis:org:select**](hardis/org/select.md) | | +| [**hardis:org:test:apex**](hardis/org/test/apex.md) | | +| [**hardis:org:user:activateinvalid**](hardis/org/user/activateinvalid.md) | | +| [**hardis:org:user:freeze**](hardis/org/user/freeze.md) | | +| [**hardis:org:user:unfreeze**](hardis/org/user/unfreeze.md) | | ### hardis:package -| Command | Title | -|:------------------------------------------------------------------------|:-----------------------------------| -| [**hardis:package:create**](hardis/package/create.md) | Create a new package | -| [**hardis:package:install**](hardis/package/install.md) | Install packages in an org | -| [**hardis:package:mergexml**](hardis/package/mergexml.md) | Merge package.xml files | -| [**hardis:package:version:create**](hardis/package/version/create.md) | Create a new version of a package | -| [**hardis:package:version:list**](hardis/package/version/list.md) | Create a new version of a package | -| [**hardis:package:version:promote**](hardis/package/version/promote.md) | Promote new versions of package(s) | +| Command | Title | +|:------------------------------------------------------------------------|:------| +| [**hardis:package:create**](hardis/package/create.md) | | +| [**hardis:package:install**](hardis/package/install.md) | | +| [**hardis:package:mergexml**](hardis/package/mergexml.md) | | +| [**hardis:package:version:create**](hardis/package/version/create.md) | | +| [**hardis:package:version:list**](hardis/package/version/list.md) | | +| [**hardis:package:version:promote**](hardis/package/version/promote.md) | | + +### hardis:packagexml + +| Command | Title | +|:------------------------------------------------------------|:------| +| [**hardis:packagexml:append**](hardis/packagexml/append.md) | | +| [**hardis:packagexml:remove**](hardis/packagexml/remove.md) | | ### hardis:project -| Command | Title | -|:----------------------------------------------------------------------------------------------|:----------------------------------------------------------------| -| [**hardis:project:audit:apiversion**](hardis/project/audit/apiversion.md) | Audit Metadatas API Version | -| [**hardis:project:audit:callincallout**](hardis/project/audit/callincallout.md) | Audit CallIns and CallOuts | -| [**hardis:project:audit:duplicatefiles**](hardis/project/audit/duplicatefiles.md) | Find duplicate sfdx files | -| [**hardis:project:audit:remotesites**](hardis/project/audit/remotesites.md) | Audit Remote Sites | -| [**hardis:project:clean:emptyitems**](hardis/project/clean/emptyitems.md) | Clean retrieved empty items in dx sources | -| [**hardis:project:clean:flowpositions**](hardis/project/clean/flowpositions.md) | Clean Flow Positions | -| [**hardis:project:clean:hiddenitems**](hardis/project/clean/hiddenitems.md) | Clean retrieved hidden items in dx sources | -| [**hardis:project:clean:listviews**](hardis/project/clean/listviews.md) | Replace Mine by Everything in ListViews | -| [**hardis:project:clean:manageditems**](hardis/project/clean/manageditems.md) | Clean retrieved managed items in dx sources | -| [**hardis:project:clean:minimizeprofiles**](hardis/project/clean/minimizeprofiles.md) | Clean profiles of Permission Set attributes | -| [**hardis:project:clean:orgmissingitems**](hardis/project/clean/orgmissingitems.md) | Clean SFDX items using target org definition | -| [**hardis:project:clean:references**](hardis/project/clean/references.md) | Clean references in dx sources | -| [**hardis:project:clean:retrievefolders**](hardis/project/clean/retrievefolders.md) | Retrieve dashboards, documents and report folders in DX sources | -| [**hardis:project:clean:standarditems**](hardis/project/clean/standarditems.md) | Clean retrieved standard items in dx sources | -| [**hardis:project:clean:systemdebug**](hardis/project/clean/systemdebug.md) | Clean System debug | -| [**hardis:project:clean:xml**](hardis/project/clean/xml.md) | Clean retrieved empty items in dx sources | -| [**hardis:project:configure:auth**](hardis/project/configure/auth.md) | Configure authentication | -| [**hardis:project:convert:profilestopermsets**](hardis/project/convert/profilestopermsets.md) | Convert Profiles into Permission Sets | -| [**hardis:project:create**](hardis/project/create.md) | Login | -| [**hardis:project:deploy:sources:dx**](hardis/project/deploy/sources/dx.md) | Deploy sfdx sources to org | -| [**hardis:project:deploy:sources:metadata**](hardis/project/deploy/sources/metadata.md) | Deploy metadata sources to org | -| [**hardis:project:fix:profiletabs**](hardis/project/fix/profiletabs.md) | Fix profiles to add tabs that are not retrieved by SF CLI | -| [**hardis:project:fix:v53flexipages**](hardis/project/fix/v53flexipages.md) | Fix flexipages for v53 | -| [**hardis:project:generate:gitdelta**](hardis/project/generate/gitdelta.md) | Generate Git Delta | -| [**hardis:project:lint**](hardis/project/lint.md) | Lint | -| [**hardis:project:metadata:findduplicates**](hardis/project/metadata/findduplicates.md) | XML duplicate values finder | +| Command | Title | +|:----------------------------------------------------------------------------------------------|:------| +| [**hardis:project:audit:apiversion**](hardis/project/audit/apiversion.md) | | +| [**hardis:project:audit:callincallout**](hardis/project/audit/callincallout.md) | | +| [**hardis:project:audit:duplicatefiles**](hardis/project/audit/duplicatefiles.md) | | +| [**hardis:project:audit:remotesites**](hardis/project/audit/remotesites.md) | | +| [**hardis:project:clean:emptyitems**](hardis/project/clean/emptyitems.md) | | +| [**hardis:project:clean:filter-xml-content**](hardis/project/clean/filter-xml-content.md) | | +| [**hardis:project:clean:flowpositions**](hardis/project/clean/flowpositions.md) | | +| [**hardis:project:clean:hiddenitems**](hardis/project/clean/hiddenitems.md) | | +| [**hardis:project:clean:listviews**](hardis/project/clean/listviews.md) | | +| [**hardis:project:clean:manageditems**](hardis/project/clean/manageditems.md) | | +| [**hardis:project:clean:minimizeprofiles**](hardis/project/clean/minimizeprofiles.md) | | +| [**hardis:project:clean:orgmissingitems**](hardis/project/clean/orgmissingitems.md) | | +| [**hardis:project:clean:references**](hardis/project/clean/references.md) | | +| [**hardis:project:clean:retrievefolders**](hardis/project/clean/retrievefolders.md) | | +| [**hardis:project:clean:sensitive-metadatas**](hardis/project/clean/sensitive-metadatas.md) | | +| [**hardis:project:clean:standarditems**](hardis/project/clean/standarditems.md) | | +| [**hardis:project:clean:systemdebug**](hardis/project/clean/systemdebug.md) | | +| [**hardis:project:clean:xml**](hardis/project/clean/xml.md) | | +| [**hardis:project:configure:auth**](hardis/project/configure/auth.md) | | +| [**hardis:project:convert:profilestopermsets**](hardis/project/convert/profilestopermsets.md) | | +| [**hardis:project:create**](hardis/project/create.md) | | +| [**hardis:project:deploy:notify**](hardis/project/deploy/notify.md) | | +| [**hardis:project:deploy:quick**](hardis/project/deploy/quick.md) | | +| [**hardis:project:deploy:simulate**](hardis/project/deploy/simulate.md) | | +| [**hardis:project:deploy:smart**](hardis/project/deploy/smart.md) | | +| [**hardis:project:deploy:sources:dx**](hardis/project/deploy/sources/dx.md) | | +| [**hardis:project:deploy:sources:metadata**](hardis/project/deploy/sources/metadata.md) | | +| [**hardis:project:deploy:start**](hardis/project/deploy/start.md) | | +| [**hardis:project:deploy:validate**](hardis/project/deploy/validate.md) | | +| [**hardis:project:fix:profiletabs**](hardis/project/fix/profiletabs.md) | | +| [**hardis:project:fix:v53flexipages**](hardis/project/fix/v53flexipages.md) | | +| [**hardis:project:generate:bypass**](hardis/project/generate/bypass.md) | | +| [**hardis:project:generate:flow-git-diff**](hardis/project/generate/flow-git-diff.md) | | +| [**hardis:project:generate:gitdelta**](hardis/project/generate/gitdelta.md) | | +| [**hardis:project:lint**](hardis/project/lint.md) | | +| [**hardis:project:metadata:findduplicates**](hardis/project/metadata/findduplicates.md) | | ### hardis:scratch -| Command | Title | -|:----------------------------------------------------------------------|:-----------------------------------------| -| [**hardis:scratch:create**](hardis/scratch/create.md) | Create and initialize scratch org | -| [**hardis:scratch:delete**](hardis/scratch/delete.md) | Delete scratch orgs(s) | -| [**hardis:scratch:pool:create**](hardis/scratch/pool/create.md) | Create and configure scratch org pool | -| [**hardis:scratch:pool:localauth**](hardis/scratch/pool/localauth.md) | Authenticate locally to scratch org pool | -| [**hardis:scratch:pool:refresh**](hardis/scratch/pool/refresh.md) | Refresh scratch org pool | -| [**hardis:scratch:pool:reset**](hardis/scratch/pool/reset.md) | Reset scratch org pool | -| [**hardis:scratch:pool:view**](hardis/scratch/pool/view.md) | View scratch org pool info | -| [**hardis:scratch:pull**](hardis/scratch/pull.md) | Scratch PULL | -| [**hardis:scratch:push**](hardis/scratch/push.md) | Scratch PUSH | +| Command | Title | +|:----------------------------------------------------------------------|:------| +| [**hardis:scratch:create**](hardis/scratch/create.md) | | +| [**hardis:scratch:delete**](hardis/scratch/delete.md) | | +| [**hardis:scratch:pool:create**](hardis/scratch/pool/create.md) | | +| [**hardis:scratch:pool:localauth**](hardis/scratch/pool/localauth.md) | | +| [**hardis:scratch:pool:refresh**](hardis/scratch/pool/refresh.md) | | +| [**hardis:scratch:pool:reset**](hardis/scratch/pool/reset.md) | | +| [**hardis:scratch:pool:view**](hardis/scratch/pool/view.md) | | +| [**hardis:scratch:pull**](hardis/scratch/pull.md) | | +| [**hardis:scratch:push**](hardis/scratch/push.md) | | ### hardis:source -| Command | Title | -|:--------------------------------------------------------|:------------------------------------------------------------------------------------------------| -| [**hardis:source:deploy**](hardis/source/deploy.md) | sfdx-hardis wrapper for sfdx force:source:deploy that displays tips to solve deployment errors. | -| [**hardis:source:push**](hardis/source/push.md) | sfdx-hardis wrapper for sfdx force:source:push that displays tips to solve deployment errors. | -| [**hardis:source:retrieve**](hardis/source/retrieve.md) | sfdx-hardis wrapper for sfdx force:source:retrieve | +| Command | Title | +|:--------------------------------------------------------|:------| +| [**hardis:source:deploy**](hardis/source/deploy.md) | | +| [**hardis:source:push**](hardis/source/push.md) | | +| [**hardis:source:retrieve**](hardis/source/retrieve.md) | | ### hardis:work -| Command | Title | -|:----------------------------------------------------------------|:---------------------| -| [**hardis:work:new**](hardis/work/new.md) | New work task | -| [**hardis:work:refresh**](hardis/work/refresh.md) | Refresh work task | -| [**hardis:work:resetselection**](hardis/work/resetselection.md) | Select again | -| [**hardis:work:save**](hardis/work/save.md) | Save work task | -| [**hardis:work:ws**](hardis/work/ws.md) | WebSocket operations | +| Command | Title | +|:----------------------------------------------------------------|:------| +| [**hardis:work:new**](hardis/work/new.md) | | +| [**hardis:work:refresh**](hardis/work/refresh.md) | | +| [**hardis:work:resetselection**](hardis/work/resetselection.md) | | +| [**hardis:work:save**](hardis/work/save.md) | | +| [**hardis:work:ws**](hardis/work/ws.md) | | + +### hello:world + +| Command | Title | +|:----------------------------------|:------| +| [**hello:world**](hello/world.md) | | diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 000000000..795520aa4 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,74 @@ +### With IDE + +You can install [Visual Studio Code](https://code.visualstudio.com/), then VSCode Extension [VsCode SFDX Hardis](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) + +Once installed, click on ![Hardis Group button](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/hardis-button.jpg) in VsCode left bar, click on **Install dependencies** and follow the additional installation instructions :) + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/install-dependencies-highlight.png) + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/install-dependencies-screenshot.png) + +When you are all green, you are all good 😊 + +_You can also watch the video tutorial below_ + +[![Installation tutorial](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/play-install-tuto.png)](https://www.youtube.com/watch?v=LA8m-t7CjHA) + +___ + +### As SFDX Plugin + +#### Pre-requisites + +- Install Node.js ([recommended version](https://nodejs.org/en/)) +- Install Salesforce DX by running `npm install @salesforce/cli --global` command line + +#### Plugin installation + +```sh-session +sf plugins install sfdx-hardis +``` + +For advanced use, please also install dependencies + +```sh-session +sf plugins install @salesforce/plugin-packaging +sf plugins install sfdx-git-delta +sf plugins install sfdmu +``` + +If you are using CI/CD scripts, use `echo y | sf plugins install ...` to bypass prompt. + +___ + +### Docker + +You can use sfdx-hardis docker images to run in CI. + +> All our Docker images are checked for security issues with [MegaLinter by OX Security](https://megalinter.io/latest/) + +- Linux **Alpine** based images (works on Gitlab) + + - Docker Hub + + - [**hardisgroupcom/sfdx-hardis:latest**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with latest @salesforce/cli version) + - [**hardisgroupcom/sfdx-hardis:latest-sfdx-recommended**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + + - GitHub Packages (ghcr.io) + + - [**ghcr.io/hardisgroupcom/sfdx-hardis:latest**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis) (with latest @salesforce/cli version) + - [**ghcr.io/hardisgroupcom/sfdx-hardis:latest-sfdx-recommended**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + +_See [Dockerfile](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/Dockerfile)_ + +- Linux **Ubuntu** based images (works on GitHub, Azure & Bitbucket) + + - [**hardisgroupcom/sfdx-hardis-ubuntu:latest**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis-ubuntu) (with latest @salesforce/cli version) + - [**hardisgroupcom/sfdx-hardis-ubuntu:latest-sfdx-recommended**](https://hub.docker.com/r/hardisgroupcom/sfdx-hardis-ubuntu) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + + - GitHub Packages (ghcr.io) + + - [**ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:latest**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis-ubuntu) (with latest @salesforce/cli version) + - [**ghcr.io/hardisgroupcom/sfdx-hardis-ubuntu:latest-sfdx-recommended**](https://github.com/hardisgroupcom/sfdx-hardis/pkgs/container/sfdx-hardis-ubuntu) (with recommended @salesforce/cli version, in case the latest version of @salesforce/cli is buggy) + +_See [Dockerfile-ubuntu](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/Dockerfile-ubuntu)_ diff --git a/docs/prompt-templates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.md b/docs/prompt-templates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.md new file mode 100644 index 000000000..5bd272fec --- /dev/null +++ b/docs/prompt-templates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.md @@ -0,0 +1,58 @@ +--- +title: PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD +description: Prompt template for PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD +--- + +# PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD + +## Variables +| Name | Description | Example | +|:----------------|:---------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------| +| **OBJECT_NAME** | The API name of the Salesforce object whose fields and validation rules are being described. | `Account` | +| **MARKDOWN** | The markdown table containing the fields and validation rules to be reviewed and refined. | `\| Field \| Label \| Description \| ... \|
\|-------\|-------\|-------------\| ... \|` | + +## Prompt + +``` +You are a skilled Business Analyst working on a Salesforce project. Your task is to review and refine the fields and validation rules of the Salesforce object "{{OBJECT_NAME}}" and describe them in plain English. The goal is to create a detailed, user-friendly explanation of each field and validation rule that a non-technical business user can easily understand. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +## Instructions: +1. **Enhancing Fields Descriptions**: + - If an field's description is missing, generate a meaningful description using the context provided by the other column values (e.g., name, data type, or usage). + - If a field description already exists, improve its clarity and comprehensiveness by incorporating insights from the other column values. + - If an attribute's label is missing, generate a meaningful label using the context provided by the other column values. + +2. **Enhancing Validation Rules Descriptions**: + - If an field's description is missing, generate a meaningful description using the context provided by the other column values (especially formula column). + - If a validation rule description already exists, improve its clarity and comprehensiveness by incorporating insights from the other column values (especially formula column). + - If an validation rule label is missing, generate a meaningful label using the context provided by the other column values. + +3. **Output Format**: + - Return the updated descriptions in the **Markdown tables** format provided below. + - Ensure the tables aligns with Markdown syntax conventions for proper rendering. + +4. **Tone and Style**: + - Use plain English suitable for business users with minimal technical jargon. + - Focus on clarity, completeness, and practical usage examples if applicable. + +5. **Output Requirements**: + - Respond **only in Markdown** format. + - Do not include any additional text or commentary outside of the Markdown. + +## Reference Data: +- Use the following markdown as the basis for your updates: + {{MARKDOWN}} + +## Additional Guidance: +- **Consistency**: Maintain consistent formatting and ensure the descriptions are cohesive across all attributes. +- **Use Examples**: When applicable, include simple examples to illustrate the attribute's purpose or use case. + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_APEX.md b/docs/prompt-templates/PROMPT_DESCRIBE_APEX.md new file mode 100644 index 000000000..80f75e10c --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_APEX.md @@ -0,0 +1,42 @@ +--- +title: PROMPT_DESCRIBE_APEX +description: Prompt template for PROMPT_DESCRIBE_APEX +--- + +# PROMPT_DESCRIBE_APEX + +## Variables +| Name | Description | Example | +|:---------------|:---------------------------------------------------|:------------------------------------------| +| **CLASS_NAME** | The name of the Salesforce Apex class to describe. | `MyCustomController` | +| **APEX_CODE** | The full source code of the Apex class. | `public class MyCustomController { ... }` | + +## Prompt + +``` +You are a developer working on a Salesforce project. Your goal is to summarize the behavior of the Salesforce Apex class "{{CLASS_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the apex class. + - List the key functionalities and business logic implemented in the class. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The code for Apex class "{{CLASS_NAME}}" is: +{{APEX_CODE}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_APEX.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_APPROVAL_PROCESS.md b/docs/prompt-templates/PROMPT_DESCRIBE_APPROVAL_PROCESS.md new file mode 100644 index 000000000..57cf1a910 --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_APPROVAL_PROCESS.md @@ -0,0 +1,42 @@ +--- +title: PROMPT_DESCRIBE_APPROVAL_PROCESS +description: Prompt template for PROMPT_DESCRIBE_APPROVAL_PROCESS +--- + +# PROMPT_DESCRIBE_APPROVAL_PROCESS + +## Variables +| Name | Description | Example | +|:-------------------------|:---------------------------------------------------------|:-----------------------------------------| +| **APPROVALPROCESS_NAME** | The name of the Salesforce Approval Process to describe. | `Opportunity_Approval` | +| **APPROVALPROCESS_XML** | The XML metadata for the Salesforce Approval Process. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to explain the what is the Salesforce Approval Process "{{APPROVALPROCESS_NAME}}" about in plain English, provide a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose of the approval process. + - List the key functionalities and business logic implemented in the approval process. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Approval Process "{{APPROVALPROCESS_NAME}}" is: +{{APPROVALPROCESS_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_APPROVAL_PROCESS.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.md b/docs/prompt-templates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.md new file mode 100644 index 000000000..8e56cf664 --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.md @@ -0,0 +1,44 @@ +--- +title: PROMPT_DESCRIBE_ASSIGNMENT_RULES +description: Prompt template for PROMPT_DESCRIBE_ASSIGNMENT_RULES +--- + +# PROMPT_DESCRIBE_ASSIGNMENT_RULES + +## Variables +| Name | Description | Example | +|:-------------------------|:---------------------------------------------------------|:-----------------------------------------| +| **ASSIGNMENTRULES_NAME** | The name of the Salesforce Assignment Rules to describe. | `Case_Assignment_Rules` | +| **ASSIGNMENTRULES_XML** | The XML metadata for the Salesforce Assignment Rules. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Assignment Rules "{{ASSIGNMENTRULES_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by explaining the role of the Salesforce Assignment Rules that you can guess according to the content of the XML and the name. + Try to guess the role of users assigned to this assignment rule. Do not mention the email of assigned users, but you can mention type of assigned users. + Based by Criteria items, explain what should so the record will be assigned. + - Analyze all the assignment rules for objects and in the description tell what are the aim of those rules. What is the role of the object in the system, based by the assignment rules. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Assignment Rule "{{ASSIGNMENTRULES_NAME}}" is: +{{ASSIGNMENTRULES_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.md b/docs/prompt-templates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.md new file mode 100644 index 000000000..6871b68de --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.md @@ -0,0 +1,44 @@ +--- +title: PROMPT_DESCRIBE_AUTORESPONSE_RULES +description: Prompt template for PROMPT_DESCRIBE_AUTORESPONSE_RULES +--- + +# PROMPT_DESCRIBE_AUTORESPONSE_RULES + +## Variables +| Name | Description | Example | +|:---------------------------|:-----------------------------------------------------------|:---------------------------------------------| +| **AUTORESPONSERULES_NAME** | The name of the Salesforce AutoResponse Rules to describe. | `Case_AutoResponse_Rules` | +| **AUTORESPONSERULES_XML** | The XML metadata for the Salesforce AutoResponse Rules. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce AutoResponse Rules "{{AUTORESPONSERULES_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by explaining the role of the Salesforce AutoResponse Rules that you can guess according to the content of the XML and the name. + Try to guess the role of users assigned to this AutoResponse rule. Do not mention the email of assigned users, but you can mention type of assigned users. + - Analyze all the AutoResponse rules for objects and in the description tell what are the aim of those rules. What is the role of the object in the system, based by the AutoResponse rules. + - Based by Criteria items, explain what would be the response to the user, if the criteria are met. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce AutoResponse Rule "{{AUTORESPONSERULES_NAME}}" is: +{{AUTORESPONSERULES_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_ESCALATION_RULES.md b/docs/prompt-templates/PROMPT_DESCRIBE_ESCALATION_RULES.md new file mode 100644 index 000000000..3e320bb28 --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_ESCALATION_RULES.md @@ -0,0 +1,42 @@ +--- +title: PROMPT_DESCRIBE_ESCALATION_RULES +description: Prompt template for PROMPT_DESCRIBE_ESCALATION_RULES +--- + +# PROMPT_DESCRIBE_ESCALATION_RULES + +## Variables +| Name | Description | Example | +|:-------------------------|:--------------------------------------------------------|:-----------------------------------------| +| **ESCALATIONRULES_NAME** | The name of the Salesforce Escalation Rule to describe. | `Case_Escalation_Rule` | +| **ESCALATIONRULES_XML** | The XML metadata for the Salesforce Escalation Rule. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to explain the what is the Salesforce Escalation Rule "{{ESCALATIONRULES_NAME}}" about in plain English, provide a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose of the escalation rule. + - List the key functionalities and business logic implemented in the escalation rule. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Escalation Rule "{{ESCALATIONRULES_NAME}}" is: +{{ESCALATIONRULES_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_ESCALATION_RULES.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_FLOW.md b/docs/prompt-templates/PROMPT_DESCRIBE_FLOW.md new file mode 100644 index 000000000..04f790add --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_FLOW.md @@ -0,0 +1,46 @@ +--- +title: PROMPT_DESCRIBE_FLOW +description: Prompt template for PROMPT_DESCRIBE_FLOW +--- + +# PROMPT_DESCRIBE_FLOW + +## Variables +| Name | Description | Example | +|:-------------|:-------------------------------------------------------|:-------------------| +| **FLOW_XML** | The XML definition of the Salesforce Flow to describe. | `...` | + +## Prompt + +``` +You are a business analyst working on a Salesforce project. Your goal is to describe the Salesforce Flow in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose and business context of the flow. + - Explain what business process or automation this flow supports. + +2. **Step-by-Step Description**: + - Describe the main steps, decisions, and actions in the flow. + - Use plain English and avoid technical jargon when possible. + - If there are sub-flows or important conditions, mention them clearly. + +3. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The flow XML is: +{{FLOW_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_FLOW.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_FLOW_DIFF.md b/docs/prompt-templates/PROMPT_DESCRIBE_FLOW_DIFF.md new file mode 100644 index 000000000..401505096 --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_FLOW_DIFF.md @@ -0,0 +1,52 @@ +--- +title: PROMPT_DESCRIBE_FLOW_DIFF +description: Prompt template for PROMPT_DESCRIBE_FLOW_DIFF +--- + +# PROMPT_DESCRIBE_FLOW_DIFF + +## Variables +| Name | Description | Example | +|:----------------------|:-------------------------------------------------------------------|:-------------------| +| **FLOW_XML_NEW** | The XML definition of the new version of the Salesforce Flow. | `...` | +| **FLOW_XML_PREVIOUS** | The XML definition of the previous version of the Salesforce Flow. | `...` | + +## Prompt + +``` +You are a business analyst working on a Salesforce project. Your goal is to describe the differences between the new and previous versions of a Salesforce Flow in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose of the flow and the context for the changes. + - Explain why a new version was created if possible. + +2. **Describe the Differences**: + - List and explain the key changes between the new and previous versions. + - Ignore tags related to location attributes (locationX and locationY) or positions: do not mention them in your response. + - Ignore nodes and elements that have not changed: do not mention them in your response. + - Ignore connector changes: do not mention them in your response. + - Use plain English and avoid technical jargon when possible. + +{{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The new version flow XML is: +{{FLOW_XML_NEW}} + +- The previous version flow XML is: +{{FLOW_XML_PREVIOUS}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_FLOW_DIFF.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_LWC.md b/docs/prompt-templates/PROMPT_DESCRIBE_LWC.md new file mode 100644 index 000000000..5625b751b --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_LWC.md @@ -0,0 +1,63 @@ +--- +title: PROMPT_DESCRIBE_LWC +description: Prompt template for PROMPT_DESCRIBE_LWC +--- + +# PROMPT_DESCRIBE_LWC + +## Variables +| Name | Description | Example | +|:------------------|:-------------------------------------------------------------|:-----------------------------------------------------------| +| **LWC_NAME** | The name of the Lightning Web Component to describe. | `myCustomComponent` | +| **LWC_JS_CODE** | The JavaScript code of the Lightning Web Component. | `import { LightningElement } from 'lwc'; ...` | +| **LWC_HTML_CODE** | The HTML template code of the Lightning Web Component. | `` | +| **LWC_JS_META** | The meta configuration file for the Lightning Web Component. | `...` | + +## Prompt + +``` +You are a skilled Salesforce developer working on a Lightning Web Components (LWC) project. Your goal is to explain the Salesforce Lightning Web Component "{{LWC_NAME}}" in plain English, providing a detailed explanation suitable for other developers and business users. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose and functionality of the Lightning Web Component. + - Describe the key features and capabilities it provides to users. + - Explain how it interacts with Salesforce data or other components. + +2. **Technical Analysis**: + - Describe the main JavaScript methods and their purposes. + - Explain how the component handles data binding and events. + - Mention any wire services, apex methods, or external services the component uses. + - Identify any custom properties or special configurations. + +{{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The HTML template for component "{{LWC_NAME}}": +``` +{{LWC_HTML_CODE}} +``` + +- The JavaScript controller for component "{{LWC_NAME}}": +``` +{{LWC_JS_CODE}} +``` + +- The metadata configuration for component "{{LWC_NAME}}": +``` +{{LWC_JS_META}} +``` + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_LWC.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_OBJECT.md b/docs/prompt-templates/PROMPT_DESCRIBE_OBJECT.md new file mode 100644 index 000000000..c9bb8cc7c --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_OBJECT.md @@ -0,0 +1,60 @@ +--- +title: PROMPT_DESCRIBE_OBJECT +description: Prompt template for PROMPT_DESCRIBE_OBJECT +--- + +# PROMPT_DESCRIBE_OBJECT + +## Variables +| Name | Description | Example | +|:---------------------|:--------------------------------------------------------------------------|:-----------------------------------------------------------------| +| **OBJECT_NAME** | The API name of the Salesforce object to describe. | `Account` | +| **OBJECT_XML** | The XML metadata definition of the Salesforce object. | `...` | +| **ALL_OBJECTS_LIST** | A list of all objects in the Salesforce org. | `Account, Contact, Opportunity, ...` | +| **ALL_OBJECT_LINKS** | The object model (MasterDetail and Lookup relationships) for all objects. | `Account->Contact (Lookup), Opportunity->Account (MasterDetail)` | + +## Prompt + +``` +You are a business analyst working on a Salesforce project. Your goal is to describe the Salesforce object "{{OBJECT_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role and purpose of the object "{{OBJECT_NAME}}" in the Salesforce org. + - Explain its significance in the project, its purpose in the org's implementation, and any key business processes it supports. + +2. **Relationships**: + - Use the provided object model data to describe how "{{OBJECT_NAME}}" relates to other objects. + - Include: + - Direct relationships (MasterDetail and Lookup fields on the object). + - Inverse relationships (other objects referencing "{{OBJECT_NAME}}"). + - Highlight any key dependencies or implications of these relationships in plain English. + +3. **Additional Guidance**: + - **Do NOT include** fields table or validation rules table in the response + - Use the acronyms provided to interpret metadata names (e.g., TR: Trigger, VR: Validation Rule, WF: Workflow). + - If the XML metadata contains sensitive information (e.g., tokens, passwords), replace them with a placeholder (e.g., `[REDACTED]`). + +4. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The list of all objects in the Salesforce org is: {{ALL_OBJECTS_LIST}} + +- The object model (MasterDetail and Lookup relationships) is: {{ALL_OBJECT_LINKS}} + +- The metadata XML for "{{OBJECT_NAME}}" is: +{{OBJECT_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_OBJECT.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_PACKAGE.md b/docs/prompt-templates/PROMPT_DESCRIBE_PACKAGE.md new file mode 100644 index 000000000..ae482ae9a --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_PACKAGE.md @@ -0,0 +1,63 @@ +--- +title: PROMPT_DESCRIBE_PACKAGE +description: Prompt template for PROMPT_DESCRIBE_PACKAGE +--- + +# PROMPT_DESCRIBE_PACKAGE + +## Variables +| Name | Description | Example | +|:----------------------|:-----------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **PACKAGE_NAME** | The name of the package to describe. | `Pardot` | +| **PACKAGE_XML** | The JsonL metadata for the package | `{"SubscriberPackageName":"Pardot","SubscriberPackageNamespace":"pi","SubscriberPackageVersionNumber":"1.0.0","SubscriberPackageVersionId":"04t1t0000000abcAAA","SubscriberPackageVersionName":"Pardot Version 1.0"}` | +| **PACKAGE_METADATAS** | A list of all metadata items (Apex classes, objects, flows, etc.) in the org that are provided by this package (namespaced). | `ApexClass: pi__MyClass, CustomObject: pi__MyObject, Flow: pi__MyFlow` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Installed package "{{PACKAGE_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Browse the internet using Google to find the package's official documentation and provide an overview of its purpose and capabilities, with links to the documentation. + - If you found the package's official documentation, summarize its key features and functionalities. + - If you can not find the package's official documentation, provide a general overview based on the package attributes and its metadata components (but do not output the list of metadatas, it will be for paragraph 2). + - Include any relevant information about the package's intended use cases or target audience. + - If you can find other relevant information about the package, like articles or blog posts, in english or in the prompt reply language, provide them as a list of links + - If you find the AppExchange page, include it in your response. Otherwise, don't mention it. + - If you find the package's GitHub repository, include it in your response. Otherwise, don't mention it. + - If you find the vendor information, include it in your response. Otherwise, don't mention it. + - Make sure that hyperlinks are not dead links leading to 404 pages. + +2. **Package Metadata**: + - Review the list of metadata items (Apex classes, objects, flows, etc.) provided by this package, as listed in reference data. + - Highlight the most important or business-relevant components. + +3. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The attributes for Installed package "{{PACKAGE_NAME}}" are: +{{PACKAGE_XML}} + +- The list of metadata items provided by this package is: +{{PACKAGE_METADATAS}} + +- Many Salesforce managed packages are published by third-party vendors. You can find the package's vendor information in the Salesforce AppExchange (https://appexchange.salesforce.com/). + +- There are also many open-source packages available on GitHub (github.com) + +- Other relevant sources for articles or blog posts about the package may include the vendor's website, community forums, or Salesforce-related blogs, like Salesforce Ben or medium.com. Do not mention these source if you don't have a direct link to a page explicitly related to package "{{PACKAGE_NAME}}". + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_PACKAGE.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_PAGE.md b/docs/prompt-templates/PROMPT_DESCRIBE_PAGE.md new file mode 100644 index 000000000..4af6153a4 --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_PAGE.md @@ -0,0 +1,42 @@ +--- +title: PROMPT_DESCRIBE_PAGE +description: Prompt template for PROMPT_DESCRIBE_PAGE +--- + +# PROMPT_DESCRIBE_PAGE + +## Variables +| Name | Description | Example | +|:--------------|:-------------------------------------------------------|:-----------------------------| +| **PAGE_NAME** | The name of the Salesforce Lightning Page to describe. | `Account_Record_Page` | +| **PAGE_XML** | The XML metadata for the Lightning Page. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Lightning Page "{{PAGE_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the lightning page. + - List the key tabs, sections, views, related lists and actions described in the lightning page. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Lightning page "{{PAGE_NAME}}" is: +{{PAGE_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_PAGE.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET.md b/docs/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET.md new file mode 100644 index 000000000..5667366a0 --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET.md @@ -0,0 +1,45 @@ +--- +title: PROMPT_DESCRIBE_PERMISSION_SET +description: Prompt template for PROMPT_DESCRIBE_PERMISSION_SET +--- + +# PROMPT_DESCRIBE_PERMISSION_SET + +## Variables +| Name | Description | Example | +|:-----------------------|:-------------------------------------------------------|:-------------------------------------| +| **PERMISSIONSET_NAME** | The name of the Salesforce Permission Set to describe. | `PS_CloudityAccount` | +| **PERMISSIONSET_XML** | The XML metadata for the Salesforce Permission Set. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce PermissionSet "{{PERMISSIONSET_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the Salesforce PermissionSet that you can guess according to the content of the XML. Try to guess the role of users assigned to this permission set according to applicationVisibilities, objectVisibilities and userPermissions. + - List the key features of the Permission Set. + - The most important features are License, Applications, User Permissions ,features with default values ,Custom Objects and Record Types + - Ignore Apex classes and Custom Fields + - Ignore blocks who has access or visibility set to "false" + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Permission Set "{{PERMISSIONSET_NAME}}" is: +{{PERMISSIONSET_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.md b/docs/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.md new file mode 100644 index 000000000..627e0b7fc --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.md @@ -0,0 +1,42 @@ +--- +title: PROMPT_DESCRIBE_PERMISSION_SET_GROUP +description: Prompt template for PROMPT_DESCRIBE_PERMISSION_SET_GROUP +--- + +# PROMPT_DESCRIBE_PERMISSION_SET_GROUP + +## Variables +| Name | Description | Example | +|:----------------------------|:-------------------------------------------------------------|:-----------------------------------------------| +| **PERMISSIONSETGROUP_NAME** | The name of the Salesforce Permission Set Group to describe. | `PS_CloudityAdmin` | +| **PERMISSIONSETGROUP_XML** | The XML metadata for the Salesforce Permission Set Group. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce PermissionSetGroup "{{PERMISSIONSETGROUP_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the Salesforce PermissionSetGroup that you can guess according to the content of the XML. Try to guess the role of users assigned to this permission set group according to the name, description and related Permission Sets + - List the key features of the Permission Set. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Permission Set Group "{{PERMISSIONSETGROUP_NAME}}" is: +{{PERMISSIONSETGROUP_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_PROFILE.md b/docs/prompt-templates/PROMPT_DESCRIBE_PROFILE.md new file mode 100644 index 000000000..5f8259c6a --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_PROFILE.md @@ -0,0 +1,45 @@ +--- +title: PROMPT_DESCRIBE_PROFILE +description: Prompt template for PROMPT_DESCRIBE_PROFILE +--- + +# PROMPT_DESCRIBE_PROFILE + +## Variables +| Name | Description | Example | +|:-----------------|:------------------------------------------------|:-------------------------| +| **PROFILE_NAME** | The name of the Salesforce Profile to describe. | `Cloudity Sales` | +| **PROFILE_XML** | The XML metadata for the Salesforce Profile. | `...` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Profile "{{PROFILE_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the Salesforce Profile that you can guess according to the content of the XML. Try to guess the role of users assigned to this profile according to applicationVisibilities, objectVisibilities and userPermissions. + - List the key features of the Profiles. + - The most important features are License, Applications, User Permissions ,features with default values ,Custom Objects and Record Types + - Ignore Apex classes and Custom Fields + - Ignore blocks who has access or visibility set to "false" + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Profile "{{PROFILE_NAME}}" is: +{{PROFILE_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_PROFILE.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_DESCRIBE_ROLES.md b/docs/prompt-templates/PROMPT_DESCRIBE_ROLES.md new file mode 100644 index 000000000..42340ba69 --- /dev/null +++ b/docs/prompt-templates/PROMPT_DESCRIBE_ROLES.md @@ -0,0 +1,43 @@ +--- +title: PROMPT_DESCRIBE_ROLES +description: Prompt template for PROMPT_DESCRIBE_ROLES +--- + +# PROMPT_DESCRIBE_ROLES + +## Variables +| Name | Description | Example | +|:----------------------|:------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **ROLES_DESCRIPTION** | Description of all roles of the org | `- **Role Name (id:role_api_name)**: Role description (parentId: parent_role_id)
- **Another Role (id:another_role_api_name)**: Another role description (parentId: another_parent_role_id)
- **Root Role (id:root_role_api_name)**: Root role description (parentId: ROOT)` | + +## Prompt + +``` +You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the business organization of the company. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Analyze the provided role hierarchy data to understand the organizational structure. + - Identify key roles and their relationships within the hierarchy. + - Summarize the roles in a way that is clear and understandable for business stakeholders. + - Ensure the summary is concise yet comprehensive, highlighting the most important aspects of the role hierarchy. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The description of all role hierarchies is: +{{ROLES_DESCRIPTION}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_DESCRIBE_ROLES.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/PROMPT_SOLVE_DEPLOYMENT_ERROR.md b/docs/prompt-templates/PROMPT_SOLVE_DEPLOYMENT_ERROR.md new file mode 100644 index 000000000..d645085b8 --- /dev/null +++ b/docs/prompt-templates/PROMPT_SOLVE_DEPLOYMENT_ERROR.md @@ -0,0 +1,46 @@ +--- +title: PROMPT_SOLVE_DEPLOYMENT_ERROR +description: Prompt template for PROMPT_SOLVE_DEPLOYMENT_ERROR +--- + +# PROMPT_SOLVE_DEPLOYMENT_ERROR + +## Variables +| Name | Description | Example | +|:----------|:--------------------------------------------------------------|:-----------------------------------------------------------| +| **ERROR** | The Salesforce deployment error message to analyze and solve. | `Cannot deploy component: missing field 'X' on object 'Y'` | + +## Prompt + +``` +You are a Salesforce release manager using Salesforce CLI commands to perform deployments. Your goal is to help solve the following Salesforce deployment error in a clear, actionable way for a technical user. + +### Instructions: + +1. **Error Analysis**: + - Analyze the error message and identify the root cause. + - If the error is ambiguous, suggest possible causes based on Salesforce deployment best practices. + +2. **Solution Proposal**: + - Provide a step-by-step solution to resolve the error. + - If applicable, include the correct sfdx source format or XML example. + - Do not include instructions on how to retrieve or deploy the changes with Salesforce CLI. + +3. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The deployment error returned by Salesforce CLI is: +{{ERROR}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} + +``` + +## How to override + +To define your own prompt text, you can define a local file **config/prompt-templates/PROMPT_SOLVE_DEPLOYMENT_ERROR.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override template files at once. + +If you do so, please don't forget to use the replacement variables :) diff --git a/docs/prompt-templates/VARIABLE_ADDITIONAL_INSTRUCTIONS.md b/docs/prompt-templates/VARIABLE_ADDITIONAL_INSTRUCTIONS.md new file mode 100644 index 000000000..5051da93b --- /dev/null +++ b/docs/prompt-templates/VARIABLE_ADDITIONAL_INSTRUCTIONS.md @@ -0,0 +1,26 @@ +--- +title: VARIABLE_ADDITIONAL_INSTRUCTIONS +description: Prompt variable for VARIABLE_ADDITIONAL_INSTRUCTIONS +--- + +# VARIABLE_ADDITIONAL_INSTRUCTIONS + +## Description + +This is a reusable prompt variable that provides common instructions across multiple prompt templates. + +## Content + +``` +### Additional Instructions + +- Caution: Redact any sensitive information (tokens, passwords, API keys, etc.) and replace with `[HIDDEN_SENSITIVE_INFOS]`. +- Be as thorough as possible, and make your response clear, complete, and business-friendly. +``` + +## How to override + +To define your own variable content, you can define a local file **config/prompt-templates/VARIABLE_ADDITIONAL_INSTRUCTIONS.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override variable files at once. + diff --git a/docs/prompt-templates/VARIABLE_FORMATTING_REQUIREMENTS.md b/docs/prompt-templates/VARIABLE_FORMATTING_REQUIREMENTS.md new file mode 100644 index 000000000..a5db45106 --- /dev/null +++ b/docs/prompt-templates/VARIABLE_FORMATTING_REQUIREMENTS.md @@ -0,0 +1,28 @@ +--- +title: VARIABLE_FORMATTING_REQUIREMENTS +description: Prompt variable for VARIABLE_FORMATTING_REQUIREMENTS +--- + +# VARIABLE_FORMATTING_REQUIREMENTS + +## Description + +This is a reusable prompt variable that provides common instructions across multiple prompt templates. + +## Content + +``` +**Formatting Requirements**: + - Use markdown formatting suitable for embedding in a level 2 header (`##`). + - Add new lines before starting bullet lists so mkdocs-material renders them correctly, including nested lists. + - Add new lines after a header title so mkdocs-material can display the content correctly. + - Never truncate any information in the response. + - Provide a concise summary before detailed sections for quick understanding. +``` + +## How to override + +To define your own variable content, you can define a local file **config/prompt-templates/VARIABLE_FORMATTING_REQUIREMENTS.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override variable files at once. + diff --git a/docs/prompt-templates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.md b/docs/prompt-templates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.md new file mode 100644 index 000000000..24dc0ac64 --- /dev/null +++ b/docs/prompt-templates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.md @@ -0,0 +1,23 @@ +--- +title: VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC +description: Prompt variable for VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC +--- + +# VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC + +## Description + +This is a reusable prompt variable that provides common instructions across multiple prompt templates. + +## Content + +``` +The output will be in markdown format, which will be used in a documentation site aiming to retrospectively document the Salesforce org. +``` + +## How to override + +To define your own variable content, you can define a local file **config/prompt-templates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.txt** + +You can also use the command `sf hardis:doc:override-prompts` to automatically create all override variable files at once. + diff --git a/docs/salesforce-ai-prompts.md b/docs/salesforce-ai-prompts.md new file mode 100644 index 000000000..770b1e854 --- /dev/null +++ b/docs/salesforce-ai-prompts.md @@ -0,0 +1,61 @@ +--- +title: Sfdx-hardis prompt templates +description: Learn how to use and override prompt templates to generate documentation or solve Salesforce CLI deployment errors +--- + + +Sfdx-hardis uses a set of predefined prompt templates to interact with AI providers for various Salesforce-related tasks. These prompts are designed to cover common use cases such as describing metadata, generating documentation, and solving deployment errors. + +## How Prompts Work + +Each prompt template defines a specific task and includes variables that are dynamically filled in at runtime. The templates are maintained in the codebase and are documented for transparency and customization. + +## Overriding Prompts + +You can override any predefined prompt template by providing a local text file with the same name as the template. This allows you to tailor the AI's behavior to your organization's needs without modifying the core plugin code. + +- Place your custom prompt text files in the appropriate override directory as described in the documentation. +- The system will automatically use your custom version instead of the default if it is present. + +### Example + +Create a file `config/prompt-templates/PROMPT_DESCRIBE_APPROVAL_PROCESS.txt` with the following content. + +``` +Describe Salesforce Approval Process "{{APPROVALPROCESS_NAME}}". + +### Instructions: + +1. Some instructions that you'd like to use + +2. **Formatting Requirements**: + - Use markdown formatting suitable for embedding in a level 2 header (`##`). + - Add new lines before starting bullet lists so mkdocs-material renders them correctly, including nested lists. + - Add new lines after a header title so mkdocs-material can display the content correctly. + - Some other formatting requirements... + +### Reference Data: + +- The metadata XML for Approval Process "{{APPROVALPROCESS_NAME}}" is: +{{APPROVALPROCESS_XML}} +``` + +## Available Prompt Templates + +Below is the list of available prompt templates. Click on any template to view its documentation and variable details: + +- [Complete Object Attributes](prompt-templates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.md) +- [Describe Apex](prompt-templates/PROMPT_DESCRIBE_APEX.md) +- [Describe Approval Process](prompt-templates/PROMPT_DESCRIBE_APPROVAL_PROCESS.md) +- [Describe Assignment Rules](prompt-templates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.md) +- [Describe AutoResponse Rules](prompt-templates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.md) +- [Describe Escalation Rules](prompt-templates/PROMPT_DESCRIBE_ESCALATION_RULES.md) +- [Describe Flow](prompt-templates/PROMPT_DESCRIBE_FLOW.md) +- [Describe Flow Diff](prompt-templates/PROMPT_DESCRIBE_FLOW_DIFF.md) +- [Describe LWC](prompt-templates/PROMPT_DESCRIBE_LWC.md) +- [Describe Object](prompt-templates/PROMPT_DESCRIBE_OBJECT.md) +- [Describe Page](prompt-templates/PROMPT_DESCRIBE_PAGE.md) +- [Describe Permission Set](prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET.md) +- [Describe Permission Set Group](prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.md) +- [Describe Profile](prompt-templates/PROMPT_DESCRIBE_PROFILE.md) +- [Solve Deployment Error](prompt-templates/PROMPT_SOLVE_DEPLOYMENT_ERROR.md) diff --git a/docs/salesforce-ai-setup.md b/docs/salesforce-ai-setup.md index 219ed4e6c..a60d00456 100644 --- a/docs/salesforce-ai-setup.md +++ b/docs/salesforce-ai-setup.md @@ -6,12 +6,145 @@ description: Learn how to use AI to supercharge sfdx-hardis deployments # Setup AI for sfdx-hardis -You need to define at least env variable OPENAI_API_KEY and make it available to your CI/CD workflow. +## Security considerations -To get an OpenAi API key, [create an OpenAi Platform account](https://platform.openai.com/). +sfdx-hardis uses **prompt via API** to collect analysis: only **Metadata XML** or **JSON deployment errors** are sent in the prompts. -| Variable | Description | Default | -|------------------------|-------------------------------------------------------------------------------------------|----------| -| OPENAI_API_KEY | Your openai account API key | | -| OPENAI_MODEL | OpenAi model used to perform prompts (see [models list](https://openai.com/api/pricing/)) | `gpt-4o` | -| AI_MAXIMUM_CALL_NUMBER | Maximum allowed number of calls to OpenAi API during a single sfdx-hardis command | `10` | \ No newline at end of file +If you follow Flows best practices and **do not hardcode credentials / tokens in variables**, there is no serious risk to send metadata XML to an external LLM (**but be aware that you do !**) + +You can see the prompts content if you set env variable `DEBUG_PROMPTS=true`. + +See the [list of prompts used by sfdx-hardis](salesforce-ai-prompts.md) , and how to override them. + +| If you use AI for generated project documentation, it is highly recommended to run it locally the first time to generate and commit AI cache, as it can make hundreds of API calls, so take some time. + +## Main configuration + +> You're lost ? Contact [Cloudity](https://cloudity.com/#form), we can do it for you :) + +### Common variables + +| Variable | Description | Default | +|------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| AI_MAXIMUM_CALL_NUMBER | Maximum allowed number of calls to AI Providers during a single sfdx-hardis command | `10000` | +| PROMPTS_LANGUAGE | Language to use for prompts results (`en`,`fr`, or any [ISO Language code](https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes)) | `en` | +| DEBUG_PROMPTS | Set to true if you want prompts requests and responses in logs | `false` | +| MAX_DEPLOYMENT_TIPS_AI_CALLS | Maximum number of errors that will be analyzed by AI for a single Pull Request | `20` | +| DISABLE_AI | In case you want to disable API calls to API without removing your configuration, set to true | `false` | +| IGNORE_AI_CACHE | Some processes like Flow description use AI cache files to save calls to prompts API, disable by setting to true | `false` | +| AI_MAX_TIMEOUT_MINUTES | If you are running sfdx-hardis from a CI/CD job, AI will stopped to be called after 30 minutes, to not mess with the timeouts of other jobs. You can increase this value to however minutes you want :) | `30` | + +### With Agentforce + +- Agentforce must be activated on the default org used when you call the sfdx-hardis command + +> You can do that with Salesforce Freemium feature [Salesforce Foundations](https://www.salesforce.com/crm/foundations/), that offers 200000 Einstein Prompts + +![Salesforce Foundations free tier](assets/images/foundations.png) + +- A prompt template **SfdxHardisGenericPrompt** (type `Flex`) must exist in the default org, with input variable **PromptText** (type `FreeText`) +- The connected used must be assigned to permission set **Prompt Template User** + +| Variable | Description | Default | +|------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------| +| USE_AGENTFORCE | Set to true to activate the use of Agentforce prompts | false | +| GENERIC_AGENTFORCE_PROMPT_TEMPLATE | Set this variable to override default prompt template | `SfdxHardisGenericPrompt` | +| GENERIC_AGENTFORCE_PROMPT_URL | Set this variable to override default prompt url | `/services/data/v{{API_VERSION}}/einstein/prompt-templates/{{GENERIC_AGENTFORCE_PROMPT_TEMPLATE}}/generations` | +| SFDX_AUTH_URL_TECHNICAL_ORG | If you want to use another org to call Agentforce (like a [Developer Org](https://developer.salesforce.com/signup) just to test the feature), you can define this variable (get Auth Url using `sf org display --verbose --json`) | | + +![](assets/images//screenshot-agentforce-config-1.jpg) + +![](assets/images//screenshot-agentforce-config-2.jpg) + +### With LangChain + +[LangChainJs](https://js.langchain.com/docs/integrations/chat/) provides a unified interface to work with multiple LLM providers. This way to use AI provides better extensibility and future-proofing to extend support for more providers. + +Currently supported LangchainJS providers: + +- Ollama +- OpenAI +- Anthropic +- Google GenAI + +| Variable | Description | Default | +|-----------------------------|-------------------------------------------------------------------------------------------------|----------------------------------| +| USE_LANGCHAIN_LLM | Set to true to use LangChain integration | `false` | +| LANGCHAIN_LLM_PROVIDER | The LLM provider to use (currently supports `ollama`, `openai`, `anthropic` and `google-genai`) | | +| LANGCHAIN_LLM_MODEL | The model to use with the selected provider (e.g. `gpt-4o`, `qwen2.5-coder:14b`) | | +| LANGCHAIN_LLM_MODEL_API_KEY | API key for the selected provider (required for OpenAI, Anthropic and Gemini) | | +| LANGCHAIN_LLM_TEMPERATURE | Controls randomness (0-1) | | +| LANGCHAIN_LLM_MAX_TOKENS | Maximum number of tokens to generate | | +| LANGCHAIN_LLM_MAX_RETRIES | Number of retries for failed requests | | +| LANGCHAIN_LLM_BASE_URL | Base URL for the API (mainly for Ollama) | Ollama: `http://localhost:11434` | + +#### Example configurations + +For Ollama: + +- Visit [Ollama's official website](https://ollama.ai/) and download the appropriate version for your operating system +- Follow the installation instructions for your platform +- After installation, pull your preferred model e.g. `ollama pull qwen2.5-coder:14b` and start the Ollama service with `ollama serve` + +```sh +USE_LANGCHAIN_LLM=true +LANGCHAIN_LLM_PROVIDER=ollama +LANGCHAIN_LLM_MODEL=qwen2.5-coder:14b +LANGCHAIN_LLM_TEMPERATURE=1 +LANGCHAIN_LLM_BASE_URL=http://localhost:11434 +``` + +For OpenAI: + +```sh +USE_LANGCHAIN_LLM=true +LANGCHAIN_LLM_PROVIDER=openai +LANGCHAIN_LLM_MODEL=gpt-4o-mini +LANGCHAIN_LLM_MODEL_API_KEY=your-api-key +LANGCHAIN_LLM_TEMPERATURE=0.7 +LANGCHAIN_LLM_MAX_TOKENS=2000 +``` + +For Anthropic: + +```sh +USE_LANGCHAIN_LLM=true +LANGCHAIN_LLM_PROVIDER=anthropic +LANGCHAIN_LLM_MODEL=claude-3.5-sonnet +LANGCHAIN_LLM_MODEL_API_KEY=your-api-key +LANGCHAIN_LLM_TEMPERATURE=0.7 +LANGCHAIN_LLM_MAX_TOKENS=2000 +``` + +For Google Gen AI: + +```sh +USE_LANGCHAIN_LLM=true +LANGCHAIN_LLM_PROVIDER=google-genai +LANGCHAIN_LLM_MODEL=gemini-1.5-pro +LANGCHAIN_LLM_MODEL_API_KEY=your-api-key +``` + +### With OpenAI Directly + +You need to define env variable OPENAI_API_KEY and make it available to your CI/CD workflow. + +To get an OpenAi API key , register on [OpenAi Platform](https://platform.openai.com/). + +| Variable | Description | Default | +|----------------|-------------------------------------------------------------------------------------------|---------------| +| OPENAI_API_KEY | Your openai account API key | | +| OPENAI_MODEL | OpenAi model used to perform prompts (see [models list](https://openai.com/api/pricing/)) | `gpt-4o-mini` | + +## Templates + +You can override default prompts by defining the following environment variables. + +| Prompt Template | Description | Variables | +|--------------------------------------|-----------------------------------------------------------------------------------------------------|:-----------------------------------------------------------:| +| PROMPT_SOLVE_DEPLOYMENT_ERROR | Ask AI about how to solve a deployment error | ERROR | +| PROMPT_DESCRIBE_FLOW | Describe a flow from its XML | FLOW_XML | +| PROMPT_DESCRIBE_FLOW_DIFF | Describe the differences between 2 flow versions by comparing their XML | FLOW_XML_NEW, FLOW_XML_PREVIOUS | +| PROMPT_DESCRIBE_OBJECT | Describe Object using sfdx-hardis generated info based on project metadatas | OBJECT_NAME, OBJECT_XML, ALL_OBJECTS_LIST, ALL_OBJECT_LINKS | +| PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD | Complete fields and validation rules descriptions in input markdown tables generated by sfdx-hardis | OBJECT_NAME, MARKDOWN | +| PROMPT_DESCRIBE_APEX | Describe an Apex class from its code | CLASS_NAME, APEX_CODE | diff --git a/docs/salesforce-ci-cd-clone-repository.md b/docs/salesforce-ci-cd-clone-repository.md index 1c6ca36a1..cbcc9ad30 100644 --- a/docs/salesforce-ci-cd-clone-repository.md +++ b/docs/salesforce-ci-cd-clone-repository.md @@ -61,7 +61,7 @@ If later, it prompts several times the same password in VsCode, run the followin ![](assets/images/git-clone-3.jpg){ align=center } -- You are now ready to [create a new task](salesforce-ci-cd-create-new-task.md) ! +- You are now ready to [create a new User Story](salesforce-ci-cd-create-new-task.md) ! ![](assets/images/git-clone-4.jpg){ align=center } diff --git a/docs/salesforce-ci-cd-config-cleaning.md b/docs/salesforce-ci-cd-config-cleaning.md index 5a49a43f1..e1a3f6c95 100644 --- a/docs/salesforce-ci-cd-config-cleaning.md +++ b/docs/salesforce-ci-cd-config-cleaning.md @@ -22,7 +22,7 @@ ___ Salesforce CI/CD Pipelines does not natively work without many manual operations to update the XML... so the deployments passes ! -sfdx-hardis provides a set of commands to automate those boring XML updates that can be called every time a user [prepares a merge request](salesforce-ci-cd-publish-task.md#prepare-merge-request) using command [sfdx hardis:work:save](https://sfdx-hardis.cloudity.com/hardis/work/save/) +sfdx-hardis provides a set of commands to automate those boring XML updates that can be called every time a user [prepares a merge request](salesforce-ci-cd-publish-task.md#prepare-merge-request) using command [sf hardis:work:save](https://sfdx-hardis.cloudity.com/hardis/work/save/) Here is the list of available automated cleanings, that can also be called manually using command ![](assets/images/btn-clean-sources.jpg) diff --git a/docs/salesforce-ci-cd-config-delta-deployment.md b/docs/salesforce-ci-cd-config-delta-deployment.md index 766716948..090261cc8 100644 --- a/docs/salesforce-ci-cd-config-delta-deployment.md +++ b/docs/salesforce-ci-cd-config-delta-deployment.md @@ -7,9 +7,11 @@ description: Learn how to configure Delta Deployments using sfdx-git-delta on a - [Delta deployments](#delta-deployments) - [Full mode](#full-mode) - [Delta mode](#delta-mode) + - [Delta with dependencies (beta)](#delta-with-dependencies-beta) - [Configuration](#configuration) - [Base](#base) - - [Advanced](#advanced) + - [With Dependencies (beta)](#with-dependencies-beta) + - [Miscellaneous](#miscellaneous) ___ @@ -45,7 +47,34 @@ Examples: - **hotfixes/fix-stuff to preprod** will be **DELTA** DEPLOYMENT - **preprod to production** will be **FULL** DEPLOYMENT -> 💡 If you want to **force the use of full deployment for a PR/MR** on a delta project, add "**nodelta**" in your latest commit title or text. +> 💡 If you want to **force the use of full deployment for a PR/MR** on a delta project, add "**NO_DELTA**" in your latest commit title or text, or in your Pull Request description. + +___ + +### Delta with Dependencies (beta) + +Sometimes, using pure delta deployment is not enough: for example, if you delete a picklist value, simple delta deployment will pass, but later full deployment will fail because some references to the deleted value are remaining, like in Record Types, translations... + +[Stepan Stepanov](https://www.linkedin.com/in/stepan-stepanov-79a48734/) implemented a smart way to handle that with sfdx-hardis: Delta with dependencies. + +Delta with dependencies mode leverages a set of processors defined in `src/common/utils/deltaUtils.ts` to automatically detect and include related metadata dependencies in your deployment package. These processors analyze changes and ensure that dependent components are also deployed, reducing the risk of deployment failures due to missing references. + +**List of supported dependency processors:** + +- **CustomFieldPicklistProcessor**: Handles picklist value changes, ensuring related Record Types and translations are included. +- **CustomFieldProcessor**: Detects changes to custom fields and adds dependent layouts, validation rules, and field sets. +- **ObjectProcessor**: Manages object-level changes, including triggers, sharing rules, and compact layouts. +- **ProfilePermissionProcessor**: Ensures profile and permission set updates are deployed when related metadata changes. +- **RecordTypeProcessor**: Includes Record Type dependencies when fields or picklist values are modified. +- **TranslationProcessor**: Adds translation files for changed metadata, such as labels and picklist values. +- **WorkflowProcessor**: Handles workflow rule dependencies, including field updates and alerts. +- **LayoutProcessor**: Ensures layout changes are deployed when fields or objects are updated. + +> ℹ️ The list of processors may evolve as new metadata types and dependency scenarios are supported. For the latest details, refer to the [deltaUtils.ts source](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/deltaUtils.ts). + +**How it works:** + +When delta with dependencies is enabled, sfdx-hardis analyzes the changed files and applies each processor to detect and add required dependencies. This ensures that your deployment package contains all necessary components for a successful deployment, even in complex scenarios involving cross-referenced metadata. ___ @@ -66,10 +95,22 @@ In case of temporary deactivation of delta deployments, you can set variable `DI It is recommended to use opinionated default sfdx-hardis delta deployment configuration, but if you want to tweak the config you can use the following variables: -### Advanced +### With dependencies (beta) + +Delta mode must be activated and applicable to allow delta with dependencies to be activated. + +You can either: + +- Define `useDeltaDeploymentWithDependencies: true` in **config/.sfdx-hardis.yml** +- Define env var `USE_DELTA_DEPLOYMENT_WITH_DEPENDENCIES=true` + +### Miscellaneous + +> Standard sfdx-hardis pipeline does not recommend to use these modes, but if you really know what you're doing, like the artists of [BeyondTheCloud.dev](https://blog.beyondthecloud.dev/for-developers), you can use them :) - USE_DELTA_DEPLOYMENT_AFTER_MERGE - By default, after a merge sfdx-hardis will try to use [QuickDeploy](salesforce-ci-cd-setup-integrations-home.md#git-providers). If not available, it will perform a full deployment. If you want to use a delta deployment anyway, define `USE_DELTA_DEPLOYMENT_AFTER_MERGE=true` - ALWAYS_ENABLE_DELTA_DEPLOYMENT - - By default, delta deployment is allowed only from minor to major branches. You can force it for PR/MRs between major branches by defining variable `ALWAYS_ENABLE_DELTA_DEPLOYMENT=true` \ No newline at end of file + - By default, delta deployment is allowed only from minor to major branches. You can force it for PR/MRs between major branches by defining variable `ALWAYS_ENABLE_DELTA_DEPLOYMENT=true` + diff --git a/docs/salesforce-ci-cd-config-home.md b/docs/salesforce-ci-cd-config-home.md index 9b5c9a1ea..8076092d2 100644 --- a/docs/salesforce-ci-cd-config-home.md +++ b/docs/salesforce-ci-cd-config-home.md @@ -4,6 +4,7 @@ description: Learn how to configure your Salesforce CI/CD project so it works ea --- +- New User Story Options(#new-user-story-options) - [package.xml](#packagexml) * [Overwrite Management](#overwrite-management) * [Delta deployments](#delta-deployments) @@ -12,6 +13,10 @@ description: Learn how to configure your Salesforce CI/CD project so it works ea - [Source retrieve issues](#source-retrieve-issues) - [All configuration properties](#all-configuration-properties) +## New User Story options + +Look at all the [Overwrite properties of new User Story command](https://sfdx-hardis.cloudity.com/hardis/work/new/) to define the appropriate values of your project. + ## package.xml A Salesforce CI/CD repository contains a file **manifest/package.xml**. diff --git a/docs/salesforce-ci-cd-create-new-task.md b/docs/salesforce-ci-cd-create-new-task.md index b506fc6f5..4913af486 100644 --- a/docs/salesforce-ci-cd-create-new-task.md +++ b/docs/salesforce-ci-cd-create-new-task.md @@ -1,6 +1,6 @@ --- -title: Create new task on a Salesforce DX project -description: Learn how to create a new task using a tracked sandbox or a scratch org +title: Create new User Story on a Salesforce DX project +description: Learn how to create a new User Story using a tracked sandbox or a scratch org --- @@ -8,13 +8,13 @@ description: Learn how to create a new task using a tracked sandbox or a scratch - [Sandbox or scratch org ?](#sandbox-or-scratch-org-) - [Source-tracked sandbox mode](#source-tracked-sandbox-mode) - [Pre-requisites (sandbox)](#pre-requisites-sandbox) - - [Start new task on sandbox](#start-new-task-on-sandbox) + - [Start new User Story on sandbox](#start-new-user-story-on-sandbox) - [Select sandbox](#select-sandbox) - [Update your sandbox ?](#update-your-sandbox-) - [Work !](#work-) - [Scratch Org mode](#scratch-org-mode) - [Pre-requisites (scratch org)](#pre-requisites-scratch-org) - - [Start new task on scratch org](#start-new-task-on-scratch-org) + - [Start new User Story on scratch org](#start-new-user-story-on-scratch-org) ___ @@ -50,13 +50,13 @@ It is recommended to create sandbox from the org that is related to the target g ![](assets/images/sandbox-create.jpg){ align=center } -### Start new task on sandbox +### Start new User Story on sandbox #### Select sandbox - Open VsCode SFDX Hardis extension by clicking on ![Hardis Group button](assets/images/hardis-button.jpg) in VsCode left bar _(loading can task several seconds)_ -- Click on ![Start a new task](assets/images/btn-start-new-task.jpg) +- Click on ![Start a new User Story](assets/images/btn-start-new-task.jpg) - Answer the questions then **select Sandbox** when prompted - If the sandbox is not proposed in the list, select the option to authenticate to your desired sandbox @@ -93,11 +93,11 @@ ___ You need the credentials to login on the **Dev Hub Org** (usually the production environment) -### Start new task on scratch org +### Start new User Story on scratch org - Open VsCode SFDX Hardis extension by clicking on ![Hardis Group button](assets/images/hardis-button.jpg) in VsCode left bar -- Click on ![Start a new task](assets/images/btn-start-new-task.jpg) +- Click on ![Start a new User Story](assets/images/btn-start-new-task.jpg) - Answer the questions then **select Scratch Org** when prompted - If the sandbox is not proposed in the list, select the option to authenticate to your desired sandbox diff --git a/docs/salesforce-ci-cd-home.md b/docs/salesforce-ci-cd-home.md index 4ad8c210d..6221b8174 100644 --- a/docs/salesforce-ci-cd-home.md +++ b/docs/salesforce-ci-cd-home.md @@ -17,28 +17,41 @@ There are many ways to do DevOps with Salesforce, each of them have their advant ![](assets/images/devops-comparison.png){ align=center } -You can setup and use a full CI/CD pipeline for your Salesforce projects using sfdx-hardis, with advanced features: +Why choosing sfdx-hardis ? -- [Overwrite Management](salesforce-ci-cd-config-overwrite.md) -- [Delta Deployments](salesforce-ci-cd-config-delta-deployment.md) -- [Automated sources cleaning](salesforce-ci-cd-config-cleaning.md) -- [Messaging platforms integrations](salesforce-ci-cd-setup-integrations-home.md) (Slack, Microsoft Teams) +- **Admins** are autonomous to [build their pull requests](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/) **with clicks in VsCode Extension, no command lines** +- **Developers** understand what's happening under the hood with SF CLI thanks to the **Advanced mode** of the User Interface +- **Release Managers** can easily configure the CI/CD process using the **Visual DevOps Pipeline Builder** +- **Project Managers** can easily follow the Application Lifecycle Management on the project using sfdx-hardis native integration with Ticketing systems like JIRA and Azure Boards. -We provide ready to use CI/CD pipelines for the following Git platforms: +More advanced features are making sfdx-hardis a credible alternative to expensive SF DevOps vendor tools: + +- [**Delta Deployments**](salesforce-ci-cd-config-delta-deployment.md): Improve performances by deploying only updated metadatas +- [**Overwrite Management**](salesforce-ci-cd-config-overwrite.md): Define which metadatas will never be overwritten if they are already existing in the target org of a deployment +- [**Smart Apex Test Runs**](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/#smart-deployments-tests): If your Pull Request to a sandbox can not break Apex Tests, just don't run them to improve performances. +- [**Automated sources cleaning**](salesforce-ci-cd-config-cleaning.md): Clean profiles from attributes existing on permission sets, clean flow positions... +- [**Integration with Messaging platforms**](salesforce-ci-cd-setup-integrations-home.md): Receive detailed deployment notifications on **Slack**, **Microsoft Teams** and Emails +- Integration with **ticketing systems**: [**JIRA**](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-jira/), [**Azure Boards**](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure-boards/), or [any other tool](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-generic-ticketing/) +- [**Integration with AI**](salesforce-deployment-assistant-home.md) to easily solve Deployment issues + +We provide **ready to use CI/CD pipeline workflows** for the following Git platforms, with results of Deployment simulation jobs as comments on Pull Requests: - [Gitlab](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/ci/.gitlab-ci.yml) - [Azure](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/ci/azure-pipelines-checks.yml) -- [Github](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/ci/.github/workflows/deploy.yml) +- [Github & Gitea](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/ci/.github/workflows/deploy.yml) - [BitBucket](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/ci/bitbucket-pipelines.yml) +- [Jenkins](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/ci/Jenkinsfile) -Pipelines can easily be adapted to other platforms like [Jenkins](https://www.jenkins.io/) or [TeamCity](https://www.jetbrains.com/teamcity/) +Pipelines can easily be adapted to other platforms like [TeamCity](https://www.jetbrains.com/teamcity/) If you speak fluently **git**, **sfdx** & **DevOps**, you can be fully autonomous to setup and use Salesforce CI/CD, otherwise you can contact us at [**Cloudity**](https://cloudity.com/) (or your favorite Salesforce integrator) and we'll be glad to assist you :) -[![Questions/Answers](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-questions-answers.jpg)](https://nicolas.vuillamy.fr/what-devops-experts-want-to-know-about-salesforce-ci-cd-with-sfdx-hardis-q-a-1f412db34476) - As everything is **open-source**, there is **no license costs** ! +![DevOps Pipeline](assets/images/DevOpsPipelineUI.png) + +[![Questions/Answers](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-questions-answers.jpg)](https://nicolas.vuillamy.fr/what-devops-experts-want-to-know-about-salesforce-ci-cd-with-sfdx-hardis-q-a-1f412db34476) + _See presentation of sfdx-hardis at Dreamforce !_
diff --git a/docs/salesforce-ci-cd-hotfixes.md b/docs/salesforce-ci-cd-hotfixes.md index df084f52b..b3ef46827 100644 --- a/docs/salesforce-ci-cd-hotfixes.md +++ b/docs/salesforce-ci-cd-hotfixes.md @@ -6,7 +6,11 @@ description: Learn how to handle hotfixes in a sfdx-hardis CI/CD environment, ha ## BUILD & RUN -Except for projects in maintenance that contain RUN only, a project is separated into 2 sections: the BUILD and the RUN +Except for projects in maintenance that contain RUN only, a project is separated into 2 sections: + +- the RUN stream: Fast cycle, to often deploy minor changes and fixes + +- the BUILD stream: Project cycle, to build more advanced features and enhancements, that require User Acceptance Testing ![](assets/images/ci-cd-schema-build-run.jpg) @@ -22,17 +26,19 @@ Is it important that **major features or enhancements are not tested directly at ### THE RUN -Daily maintenance of the production Salesforce org must be very reactive, the RUN level will allow you to **deploy patch versions**. +Daily maintenance of the production Salesforce org must be very reactive, the RUN level will allow you to often **deploy patch versions**. -As we usually can not wait for the next minor or major version to be deployed in production, projects need a way to quickly deploy hot fixes into production. That layer is called the RUN, and is exclusively about **preprod** and **production branches**. +As we usually can not wait for the next minor or major version to be deployed in production, projects need a way to quickly deploy hot fixes into production. That layer is called the RUN, and is exclusively about **preprod** and **main** branches. To summarize, you will **publish at RUN level, but also at BUILD level**, so when the BUILD will be merged in the RUN, **there will be no overwrite triggering regressions**. The **hotfix process** is the following: +_Note: in this example, we merge directly in **preprod**, but in more advanced organizations we can define a branch/org **uat_run** as intermediate layer before merging to preprod_ + - IMPLEMENT **HOTFIX** _**(1)**_ - - Create new task with **preprod as target when prompted**, named `my-very-hot-hotfix` for example + - Create new User Story with **preprod as target when prompted**, named `my-very-hot-hotfix` for example - Work on a dev sandbox that has been cloned from production @@ -40,12 +46,19 @@ The **hotfix process** is the following: - Create Pull Request from `my-very-hot-hotfix` to `preprod` & merge it after controls (do not select “delete after merge” ) - - Create Pull request from `preprod` to `production` + - Create Pull request from `preprod` to `main` - - Merge `preprod` to `production` after control checks are green. + - Merge `preprod` to `main` after control checks are green. - RETROFIT IN **BUILD LAYER** _**(3)**_ - - Create Pull Request from `my-very-hot-hotfix` to `integration` & merge it after controls + - Create a sub-branch to `integration`, named `retrofit-from-run` for example + + - Using Git IDE, manually merge `main` (or `preprod`) branch into `retrofit-from-run` + + - If there are git conflicts, solve them before committing + + - Create Pull Request from `retrofit-from-run` to `integration` - - If there are issues (conflicts…), you can solve them directly in my-very-hot-hotfix then complete your retrofit PR to integration + - Merge the Pull Request into `integration`: your retrofit from the RUN to the BUILD is over :) + - You might refresh dev sandboxes if your retrofits have lots of impacts \ No newline at end of file diff --git a/docs/salesforce-ci-cd-packaging.md b/docs/salesforce-ci-cd-packaging.md index 865795aa2..6647ccb5b 100644 --- a/docs/salesforce-ci-cd-packaging.md +++ b/docs/salesforce-ci-cd-packaging.md @@ -16,7 +16,7 @@ sfdx-hardis menus allow to create new packages and new package versions, but to - Instructions in [Salesforce Documentation](https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_dev2gp_create_namespace.htm) -- Then you need a sfdx-hardis flavored sfdx project, that you can create with `sfdx hardis:project:create`, using scratch orgs only option. +- Then you need a sfdx-hardis flavored sfdx project, that you can create with `sf hardis:project:create`, using scratch orgs only option. - Last, create your package folder (ex: `my-package`) at the root of the repository, following the same structure than `force-app`: it will contain your package content. @@ -120,9 +120,9 @@ Once you merged your PR in packaging branch (after checking the control jobs are To promote a package version, run the following command -`sfdx force:package:version:promote -p PACKAGE_VERSION_ID` +`sf hardis:package:version:promote` -Example: `sfdx force:package:version:promote -p 04t7S000000gYp7QAG` +Example: `sf hardis:package:version:promote` Note: When later you will **create a new scratch org** in a new development branch, if you have issues, just increment again the `versionNumber` in `sfdx-project.json` diff --git a/docs/salesforce-ci-cd-publish-task.md b/docs/salesforce-ci-cd-publish-task.md index 4a817b12d..ff0b42028 100644 --- a/docs/salesforce-ci-cd-publish-task.md +++ b/docs/salesforce-ci-cd-publish-task.md @@ -1,5 +1,5 @@ --- -title: Publish a task on a Salesforce CI/CD project +title: Publish a User Story on a Salesforce CI/CD project description: Learn how to commit and create a merge request --- @@ -24,7 +24,7 @@ _The following video shows how to perform theses operations_ ### Retrieve metadatas -If you made updates on your org that you have not pulled yet, Use command ![Pull from org button](assets/images/btn-pull-from-org.jpg) to **pull your latest updates in local files** +If you made updates on your org that you have not pulled yet, Use command [![Pull from org button](assets/images/btn-pull-from-org.jpg)](https://sfdx-hardis.cloudity.com/hardis/scratch/pull/) to **pull your latest updates in local files** If you updated config elements that you do not see in your local files, you may discuss with your release manager to [automate force retrieve metadatas](salesforce-ci-cd-retrieve.md) @@ -66,7 +66,7 @@ ___ ## Prepare merge request -- **Once your commit is completed**, run command ![Save / publish my current task button](assets/images/btn-save-publish-task.jpg) to prepare your merge request. +- **Once your commit is completed**, run command ![Save User Story button](assets/images/btn-save-publish-task.jpg) to prepare your merge request. - As you committed your files like explained in the previous section, select option ![Message my commit is ready](assets/images/msg-commit-ready.jpg) when prompted. diff --git a/docs/salesforce-ci-cd-pull-request-github.md b/docs/salesforce-ci-cd-pull-request-github.md index 2270397cd..13145b8e0 100644 --- a/docs/salesforce-ci-cd-pull-request-github.md +++ b/docs/salesforce-ci-cd-pull-request-github.md @@ -6,6 +6,24 @@ description: Learn how to create a pull request using GitHub on a Salesforce CI/ ## Create a Pull Request using GitHub -Not documented yet, see [Create a pull request using Azure](salesforce-ci-cd-pull-request-azure.md), it's almost the same ! +- Go in your online repository in your web browser (example: `https://github.com/mycompany/trailheadapps/dreamhouse-lwc`) + +- Go to the **Pull Requests** tab + +- Click on **New pull request** + +![](assets/images/github-pull-request-1.png){ align=center } + +- Select the **base** as the target environment/branch and **compare** as the branch you've made the changes. Click **Create Pull Request** + +![](assets/images/github-pull-request-2.png){ align=center } + +- Add a meaningful title and description. Click on **Create Pull Request** + +![](assets/images/github-pull-request-create.png){ align=center } + +- Controlling jobs are automatically launched, you can now ask your release manager to [**validate the merge request**](salesforce-ci-cd-validate-merge-request.md) + - _If you are a developer, (or even a business consultant depending on the project organization), you may have the responsibility to make sure than controlling jobs are valid (**check-deploy job** and **code-quality job** in **success**) and eventually fix the errors (See [Handle merge requests errors](salesforce-ci-cd-handle-merge-request-results.md))_ + +- If you need to add additional updates to an existing merge requests, you just this to follow again [this guide](salesforce-ci-cd-publish-task.md) from the beginning, except the part "Create a merge request". Any new commit pushed on a branch where there is already a merge request will trigger again the [control jobs](salesforce-ci-cd-validate-merge-request.md#control-jobs). -You can also create a pull request [here](https://github.com/hardisgroupcom/sfdx-hardis) to update this documentation :) diff --git a/docs/salesforce-ci-cd-retrieve.md b/docs/salesforce-ci-cd-retrieve.md index 79e6699e5..582762ace 100644 --- a/docs/salesforce-ci-cd-retrieve.md +++ b/docs/salesforce-ci-cd-retrieve.md @@ -6,7 +6,7 @@ description: Learn how to retrieve updated metadatas when force:source:pull forg ## Automated force retrieve -It happens that when calling [sfdx hardis:scratch:pull](https://sfdx-hardis.cloudity.com/hardis/scratch/pull/), some elements are not retrieved. +It happens that when calling [sf hardis:scratch:pull](https://sfdx-hardis.cloudity.com/hardis/scratch/pull/), some elements are not retrieved. The most usual cases are updates on: diff --git a/docs/salesforce-ci-cd-setup-activate-org.md b/docs/salesforce-ci-cd-setup-activate-org.md index 942902051..11abe9fe3 100644 --- a/docs/salesforce-ci-cd-setup-activate-org.md +++ b/docs/salesforce-ci-cd-setup-activate-org.md @@ -1,10 +1,12 @@ --- -title: Activate Dev Hub and Sandbox Tracking -description: Learn how to activate Dev Hub and Sandbox Tracking on a CI/CD Salesforce project +title: Configure Orgs +description: Learn how to activate Dev Hub and Sandbox Tracking on a CI/CD Salesforce project, and apply org settings --- -## Dev Hub and sandbox tracking +## Production Org Settings + +### Dev Hub and Sandbox Tracking You must declare an org (usually production) as a DevHub and activate sandbox tracking to be able to work with advanced features of Salesforce DX @@ -18,6 +20,12 @@ You must declare an org (usually production) as a DevHub and activate sandbox tr - If sandbox were already existing, you need to refresh them if you want their source tracking to be activated - To use **Create from** from an existing sandbox, you need to refresh it before the cloning, else the nex sandbox won't have the tracking activated +### Activate Experience Bundle Metadata + +- Go to Setup -> Digital Experiences + +- Activate **Enable ExperienceBundle Metadata API** + ## Major orgs When there is a new state of a major branch (after a merge), a deployment to the related major Org will be automatically triggered by the CI server. diff --git a/docs/salesforce-ci-cd-setup-auth.md b/docs/salesforce-ci-cd-setup-auth.md index b5879ca75..8141df58b 100644 --- a/docs/salesforce-ci-cd-setup-auth.md +++ b/docs/salesforce-ci-cd-setup-auth.md @@ -11,7 +11,7 @@ To automate [deployments from major branches to their related org](salesforce-ci Note: _You need [openssl](https://www.openssl.org/) installed on your computer (available in `Git bash`)_ - Remain in your initialization branch `cicd`, or a sub branch of your lowest level major branch (usually `integration`) -- For each major branch to link to an org, run the sfdx-hardis command **Configuration ->** ![Configure Org CI Authentication](assets/images/btn-configure-ci-auth.jpg) (`sfdx hardis:project:configure:auth`) +- For each major branch to link to an org, run the sfdx-hardis command **Configuration ->** ![Configure Org CI Authentication](assets/images/btn-configure-ci-auth.jpg) (`sf hardis:project:configure:auth`) For example, run the command for `integration`, `uat`, `preprod` and `production` major branches. @@ -49,7 +49,7 @@ If you are **using scratch orgs**, you need to also **configure authentication f To do that, run the following command ```shell -sfdx hardis:project:configure:auth --devhub +sf hardis:project:configure:auth --devhub ``` diff --git a/docs/salesforce-ci-cd-setup-existing-org.md b/docs/salesforce-ci-cd-setup-existing-org.md index bf352054d..7c0af34c4 100644 --- a/docs/salesforce-ci-cd-setup-existing-org.md +++ b/docs/salesforce-ci-cd-setup-existing-org.md @@ -2,6 +2,7 @@ title: Initialize sfdx sources from Salesforce org description: Learn how to initialize sfdx sources from a Salesforce org --- + If this is a new Salesforce project, or if you want to setup CI/CD in **incremental mode**, you can skip this step and directly go to [Create first merge request](#create-first-merge-request). @@ -24,16 +25,16 @@ If you want to go for a **full init setup**, follow the steps below ! - Run the following command that will retrieve locally all the metadatas of production org -`sfdx hardis:org:retrieve:sources:dx --shape -u YOURSOURCEORGUSERNAME` +`sf hardis:org:retrieve:sources:dx --shape -u YOURSOURCEORGUSERNAME` - In case you get an error: - Run the generate package xml command : [hardis:org:generate:packagexmlfull](https://sfdx-hardis.cloudity.com/hardis/org/generate/packagexmlfull/) - Clean up the generated package created by removing the unnecessary metadatas - - Run retrieve metadata command : [hardis:source:retrieve](https://sfdx-hardis.cloudity.com/hardis/source/retrieve/) + - Run retrieve metadata command : [sf project:retrieve:start](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_retrieve_start_unified) Example : - -- `sfdx hardis:org:generate:packagexmlfull --targetusername nico@example.com --outputfile ./packagexmlfull.xml` + +- `sf hardis:org:generate:packagexmlfull --targetusername nico@example.com --outputfile ./packagexmlfull.xml` - Remove Document part on packagexmlfull.xml ```xml @@ -43,10 +44,7 @@ Example : Document ``` -- `sfdx hardis:source:retrieve -x ./packagexmlfull.xml` - - - +- `sf project:retrieve:start -x ./packagexmlfull.xml --ignore-conflicts` ## Automated Metadatas Cleaning @@ -59,7 +57,7 @@ Proceed to the following steps to automatically remove many of them, then procee Run the following command to delete all elements with a namespace. ```shell -sfdx hardis:project:clean:manageditems --namespace SOMENAMESPACE +sf hardis:project:clean:manageditems --namespace SOMENAMESPACE ``` ### Remove (hidden) files @@ -67,7 +65,7 @@ sfdx hardis:project:clean:manageditems --namespace SOMENAMESPACE Some items have no namespace but are managed anyway, and contain `(hidden)`, so they must me deleted with the following command. ```shell -sfdx hardis:project:clean:hiddenitems +sf hardis:project:clean:hiddenitems ``` ### Remove empty items @@ -75,7 +73,7 @@ sfdx hardis:project:clean:hiddenitems Some files are empty and do not need to be kept in repository, remove them using the following command. ```shell -sfdx hardis:project:clean:emptyitems +sf hardis:project:clean:emptyitems ``` ### Standard objects without custom @@ -85,7 +83,7 @@ The retrieve command pulled all standard objects and fields. Those which has never been customized do not need to remain in repository, delete them using the following command (that can take some time) ```shell -sfdx hardis:project:clean:standarditems +sf hardis:project:clean:standarditems ``` ## Manual Metadata Cleaning @@ -109,9 +107,11 @@ Manually delete files (or even folders) that are maintained directly in producti ## Retrieve installed packages -Run the following command to retrieve packages installed on production org +Use **DevOps Pipeline -> Installed Packages** to retrieve the list of Packages of your project. + +See [Retrieve Packages Documentation](salesforce-ci-cd-work-on-task-install-packages.md) -`sfdx hardis:org:retrieve:packageconfig -u YOUR_PROD_ORG_USER` +> CLI Alternative: `sf hardis:org:retrieve:packageconfig -u YOUR_PROD_ORG_USER` This will update file **config/.sfdx-hardis.yml** diff --git a/docs/salesforce-ci-cd-setup-home.md b/docs/salesforce-ci-cd-setup-home.md index aebfaf47a..95b83a43f 100644 --- a/docs/salesforce-ci-cd-setup-home.md +++ b/docs/salesforce-ci-cd-setup-home.md @@ -22,7 +22,7 @@ ___ ## Setup steps - [Create git repository and configure branches](salesforce-ci-cd-setup-git.md) -- [Activate DevHub or Sandbox Tracking](salesforce-ci-cd-setup-activate-org.md) +- [Configure Orgs](salesforce-ci-cd-setup-activate-org.md) - [Initialize sfdx project](salesforce-ci-cd-setup-init-project.md) - [Configure authentication](salesforce-ci-cd-setup-auth.md) - [Retrieve sources from an existing org](salesforce-ci-cd-setup-existing-org.md) _(optional)_ diff --git a/docs/salesforce-ci-cd-setup-init-project.md b/docs/salesforce-ci-cd-setup-init-project.md index 71b493a49..fffe427e4 100644 --- a/docs/salesforce-ci-cd-setup-init-project.md +++ b/docs/salesforce-ci-cd-setup-init-project.md @@ -8,7 +8,7 @@ description: Learn how to initialize a Salesforce DX Project for CI/CD - Create a new git branch named **cicd** under your lower major branch (usually **integration**) -- Run command **Configuration ->** ![Create new sfdx project](assets/images/btn-create-project.jpg) (`sfdx hardis:project:create`) and select options to create a new sfdx-hardis project. +- Run command **Configuration ->** ![Create new sfdx project](assets/images/btn-create-project.jpg) (`sf hardis:project:create`) and select options to create a new sfdx-hardis project. - Open file **manifest/package.xml** and replace the content by the following code @@ -33,6 +33,8 @@ description: Learn how to initialize a Salesforce DX Project for CI/CD - Bitbucket - `bitbucket-pipelines.yml` +- During the CI/CD setup, find variable **SFDX_DISABLE_FLOW_DIFF** in your git provider pipeline and set its value to **true**, to avoid to generate too many PR comments during setup. At the end of the setup, you can set back the variable to false. + > Some workflow files contain additional configuration instructions, please read the comments at the beginning of the files ! You can now go to step [Setup CI Authentication](salesforce-ci-cd-setup-auth.md) diff --git a/docs/salesforce-ci-cd-setup-integration-api.md b/docs/salesforce-ci-cd-setup-integration-api.md index bc7489ed2..3e4c24e5a 100644 --- a/docs/salesforce-ci-cd-setup-integration-api.md +++ b/docs/salesforce-ci-cd-setup-integration-api.md @@ -4,7 +4,7 @@ description: Learn how to send notifications to external apis like Grafana --- -## API Integration (BETA) +## API Integration You can send notifications to an external API endpoints, for example to [build Grafana dashboards](#grafana-setup) diff --git a/docs/salesforce-ci-cd-setup-integration-azure.md b/docs/salesforce-ci-cd-setup-integration-azure.md index a1df6587b..7a1aee815 100644 --- a/docs/salesforce-ci-cd-setup-integration-azure.md +++ b/docs/salesforce-ci-cd-setup-integration-azure.md @@ -2,12 +2,15 @@ title: Configure Integrations between sfdx-hardis and Azure Pipelines description: Post Notes on Azure Repos Pull Request from CI jobs --- + ## Azure Pull Request notes In order to avoid to have to open job logs to see deployment errors, sfdx-hardis can post them as a thread on the Pull Request UI +### Global configuration + To use this capability: - A build policy must be defined @@ -18,6 +21,20 @@ To use this capability: ![Screenshot](assets/images/AzureReporterConfigContribute.jpg) +### Tech Work Item + +If you want to use **Flow Diff** (visual diff of flow differences): + +- Create an Azure boards ticket exactly named **sfdx-hardis tech attachments**. IT will be used to link uploaded images. + + - If you prefer to name it differently, define variable `AZURE_ATTACHMENTS_WORK_ITEM_ID` with the number of your ticket as value. + +- An Azure Work item can have only 100 attached images, so frequently delete old image attachments, or delete the ticket then recreate it with the same name. + +![Screenshot](assets/images/az-tech-work-item.png) + +### Examples + Everytime you will make a pull request, the CI job will post its result as comment ! - Example with deployment errors @@ -34,7 +51,7 @@ Everytime you will make a pull request, the CI job will post its result as comme Notes: -- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sfdx hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) instead of `sfdx force:source:deploy` ! +- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sf hardis:project:deploy:start](https://sfdx-hardis.cloudity.com/hardis/project/deploy/start/) instead of `sf project:deploy:start` ! - This integration use the following variables: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) @@ -46,3 +63,4 @@ Notes: - SYSTEM_TEAMPROJECT: $(System.TeamProject) - BUILD_BUILD_ID: $(Build.BuildId) - BUILD_REPOSITORY_ID: $(Build.Repository.ID) + - AZURE_ATTACHMENTS_WORK_ITEM_ID (optional: identifier of the Work Items used to attach images) diff --git a/docs/salesforce-ci-cd-setup-integration-bitbucket.md b/docs/salesforce-ci-cd-setup-integration-bitbucket.md index 164c07323..06b9b5c56 100644 --- a/docs/salesforce-ci-cd-setup-integration-bitbucket.md +++ b/docs/salesforce-ci-cd-setup-integration-bitbucket.md @@ -24,7 +24,7 @@ Everytime you will make a pull request, the CI job will post its result as a com Notes: -- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sfdx hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) instead of `sfdx force:source:deploy` ! +- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sf hardis:project:deploy:start](https://sfdx-hardis.cloudity.com/hardis/project/deploy/start/) instead of `sf project:deploy:start` ! - This integration uses the following variables: - CI_SFDX_HARDIS_BITBUCKET_TOKEN diff --git a/docs/salesforce-ci-cd-setup-integration-email.md b/docs/salesforce-ci-cd-setup-integration-email.md index ad10162b8..3b390b757 100644 --- a/docs/salesforce-ci-cd-setup-integration-email.md +++ b/docs/salesforce-ci-cd-setup-integration-email.md @@ -27,6 +27,14 @@ Examples: - `NOTIF_EMAIL_ADDRESS=admin@cloudity.com` - `NOTIF_EMAIL_ADDRESS=admin@cloudity.com,another.user@cloudity.com,nico@cloudity.com` +## Troubleshooting + +If the emails are not sent, apply the following configuration on the Monitoring / Deployment user settings + +- Send through Salesforce + +![](assets/images/screenshot-email-config.jpg) + That's all, you're all set ! diff --git a/docs/salesforce-ci-cd-setup-integration-github.md b/docs/salesforce-ci-cd-setup-integration-github.md index 5c0d7ebd3..89a168ab9 100644 --- a/docs/salesforce-ci-cd-setup-integration-github.md +++ b/docs/salesforce-ci-cd-setup-integration-github.md @@ -32,8 +32,29 @@ Everytime you will make a Pull Request, the CI job will post its result as comme Notes: -- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sfdx hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) instead of `sfdx force:source:deploy` ! +- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sf hardis:project:deploy:start](https://sfdx-hardis.cloudity.com/hardis/project/deploy/start/) instead of `sf project:deploy:start` ! - This integration use the following variables: - GITHUB_TOKEN (provided by GitHub but has to be send as option to the deployment jobs) + +## Using GitHub integration without Github Actions + +You might want to use GitHub integration with other tools than GitHub Actions, like Jenkins or Codefresh + +In that case, to still benefit from GitHub integration, you need to make sure that the following variables are set. + +| Variable | Description | +|:------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| GITHUB_TOKEN | You might need to Create a [GitHub Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) | +| GITHUB_REPOSITORY | ex: `MyClient/crm-salesforce` | +| GITHUB_REPOSITORY_OWNER | ex: `MyClient` | +| GITHUB_SERVER_URL | ex: `https://github.mycompanydomain.com` | +| GITHUB_API_URL | ex: `https://github.mycompanydomain.com/api` | +| GITHUB_GRAPHQL_URL | ex: `https://github.mycompanydomain.com/api/graphql` | +| GITHUB_WORKFLOW | ex: `Simulate Deployment (sfdx-hardis)` | +| GITHUB_REF | ex: `refs/pull/503/merge` | +| GITHUB_REF_NAME | ex: `503/merge` | +| GITHUB_RUN_ID | ex: `14282257027`. If you can't have it, to not set the variable. | +| PIPELINE_JOB_URL | Direct link to the page where we can see your job results. ex: `https://yourserver.com/jobs/345` | + diff --git a/docs/salesforce-ci-cd-setup-integration-gitlab.md b/docs/salesforce-ci-cd-setup-integration-gitlab.md index c9a215f90..a3c272d5a 100644 --- a/docs/salesforce-ci-cd-setup-integration-gitlab.md +++ b/docs/salesforce-ci-cd-setup-integration-gitlab.md @@ -32,7 +32,7 @@ Everytime you will make a merge request, the CI job will post its result as comm Notes: -- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sfdx hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) instead of `sfdx force:source:deploy` ! +- This integration works with sfdx-hardis pipeline, but also on home-made pipelines, just call [sf hardis:project:deploy:start](https://sfdx-hardis.cloudity.com/hardis/project/deploy/start/) instead of `sf project:deploy:start` ! - This integration use the following variables: - CI_SFDX_HARDIS_GITLAB_TOKEN diff --git a/docs/salesforce-ci-cd-setup-integration-jira.md b/docs/salesforce-ci-cd-setup-integration-jira.md index 265c61310..8e745f4c4 100644 --- a/docs/salesforce-ci-cd-setup-integration-jira.md +++ b/docs/salesforce-ci-cd-setup-integration-jira.md @@ -59,6 +59,8 @@ Example: `DEPLOYED_TO_{BRANCH}` Define CI/CD variable **JIRA_TICKET_REGEX** with a regular expression allowing to identify the JIRA tickets of your project in commit& Pull Requests titles & bodies, for example `(CLOUDITY-[0-9]+)` +If not defined, default value is `(?<=[^a-zA-Z0-9_-]|^)([A-Za-z0-9]{2,10}-\d{1,6})(?=[^a-zA-Z0-9_-]|$)` + ### Jira Cloud Define CI/CD variables diff --git a/docs/salesforce-ci-cd-setup-integration-ms-teams.md b/docs/salesforce-ci-cd-setup-integration-ms-teams.md index 5fc9ad0ad..50eaf5161 100644 --- a/docs/salesforce-ci-cd-setup-integration-ms-teams.md +++ b/docs/salesforce-ci-cd-setup-integration-ms-teams.md @@ -6,8 +6,14 @@ description: Learn how to configure Microsoft Teams notifications using Web Hook ## Ms Teams Integration -MS Teams Web Hooks will be deprecated soon and has been removed from sfdx-hardis. +MS Teams Web Hooks have been deprecated my Microsoft so have been removed from sfdx-hardis. Instead, please use [Email Notifications](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-email/) with the Ms Teams Channel Email as NOTIF_EMAIL_ADDRESS +To get MsTeams email channel, click on the channel contextual menu, then "Get channel E-mail Address" +![](assets/images/screenshot-teams-email-1.jpg) + +Then make sure that anyone can send emails to the channel by selecting the first option + +![](assets/images/screenshot-teams-email-2.jpg) \ No newline at end of file diff --git a/docs/salesforce-ci-cd-setup-integrations-home.md b/docs/salesforce-ci-cd-setup-integrations-home.md index 9497ed502..84b543572 100644 --- a/docs/salesforce-ci-cd-setup-integrations-home.md +++ b/docs/salesforce-ci-cd-setup-integrations-home.md @@ -26,7 +26,7 @@ Depending of your git provider, configure one of the following integrations. - Deployment status in Pull Request threads - Quick Deploy to enhance performances -- [BitBucket](salesforce-ci-cd-setup-integrations-bitbucket.md) +- [BitBucket](salesforce-ci-cd-setup-integration-bitbucket.md) - Deployment status in Pull Request comments - Quick Deploy to enhance performance @@ -58,4 +58,14 @@ Depending of your git provider, configure one of the following integrations. - [Generic ticketing](salesforce-ci-cd-setup-integration-generic-ticketing.md) - Enrich MR/PR comments by adding tickets references and links - - Enrich notifications comments by adding tickets references and links \ No newline at end of file + - Enrich notifications comments by adding tickets references and links + +## Large Language Models (AI) + +- [Agentforce](salesforce-ai-setup.md/#with-agentforce) + - AI deployment assistant + - Project Documentation + +- [OpenAi, Anthropic, Gemini, Ollama](salesforce-ai-setup.md/#with-langchain) + - AI deployment assistant + - Project Documentation \ No newline at end of file diff --git a/docs/salesforce-ci-cd-use-home.md b/docs/salesforce-ci-cd-use-home.md index 10840c330..a706cf47b 100644 --- a/docs/salesforce-ci-cd-use-home.md +++ b/docs/salesforce-ci-cd-use-home.md @@ -22,9 +22,9 @@ ___ Now your computer is ready, let's see how you can use sandboxes or scratch orgs to make updates in your Salesforce projects ! -- [Create a new task](salesforce-ci-cd-create-new-task.md) -- [Work on your current task](salesforce-ci-cd-work-on-task.md) -- [Publish your task](salesforce-ci-cd-publish-task.md) (by creating a Merge Request) +- [Create a new User Story](salesforce-ci-cd-create-new-task.md) +- [Work on your current User Story](salesforce-ci-cd-work-on-task.md) +- [Publish your User Story](salesforce-ci-cd-publish-task.md) (by creating a Merge Request) - [Handle merge requests errors](salesforce-ci-cd-handle-merge-request-results.md) - [Validate a merge request](salesforce-ci-cd-validate-merge-request.md) _(Release manager and advanced user only, depending on the project organization)_ diff --git a/docs/salesforce-ci-cd-use-install.md b/docs/salesforce-ci-cd-use-install.md index 8ea06a4f6..82cacbf7e 100644 --- a/docs/salesforce-ci-cd-use-install.md +++ b/docs/salesforce-ci-cd-use-install.md @@ -36,10 +36,8 @@ _See tutorial_ > - [Salesforce CLI](https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_develop.htm) > - Salesforce DX plugins > - [sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis) -> - [Salesforce Data Move Utility](https://github.com/forcedotcom/SFDX-Data-Move-Utility) -> - [SFDX Essentials](https://github.com/nvuillam/sfdx-essentials) > - [SFDX Git Delta](https://github.com/scolladon/sfdx-git-delta) -> - [Texei Sfdx Plugin](https://github.com/texei/texei-sfdx-plugin) +> - [Salesforce Data Move Utility](https://github.com/forcedotcom/SFDX-Data-Move-Utility) Now your computer is all set, you can [clone your project git repository](salesforce-ci-cd-clone-repository.md) :) diff --git a/docs/salesforce-ci-cd-validate-merge-request.md b/docs/salesforce-ci-cd-validate-merge-request.md index 7b477c582..6ecad04c8 100644 --- a/docs/salesforce-ci-cd-validate-merge-request.md +++ b/docs/salesforce-ci-cd-validate-merge-request.md @@ -76,7 +76,7 @@ ___ ### Effective merge - **Click on Merge** - - If the merge request is from a **minor branch** (dev or config task), make sure that **Squash commits** and **Delete after merge** are **checked** + - If the merge request is from a **minor branch** (dev or config User Story), make sure that **Squash commits** and **Delete after merge** are **checked** - If the merge request if from a major branch (develop, recette, uat, preprod...), make sure that **Squash commits** and **Delete after merge** are **NOT checked** - The merge commit in the target branch will **trigger a new job** that will automatically **deploy the updated source to the corresponding Salesforce org** diff --git a/docs/salesforce-ci-cd-work-on-task-development.md b/docs/salesforce-ci-cd-work-on-task-development.md index 58b947d94..8134b051b 100644 --- a/docs/salesforce-ci-cd-work-on-task-development.md +++ b/docs/salesforce-ci-cd-work-on-task-development.md @@ -1,6 +1,6 @@ --- title: Develop on a Salesforce DX project -description: Learn how to develop on a task with a tracked sandbox or a scratch org +description: Learn how to develop on a User Story with a tracked sandbox or a scratch org --- @@ -10,8 +10,8 @@ description: Learn how to develop on a task with a tracked sandbox or a scratch - You can update code and XML metadatas using VsCode IDE - When you need to upload the updates to your org, use command ![Push to org button](assets/images/btn-push-to-org.jpg) -- If you made updates directly on your org, use command ![Pull from org button](assets/images/btn-pull-from-org.jpg) to retrieve into local files the updates that you performed online with point & click -- Once you have finished, you can [publish your task](salesforce-ci-cd-publish-task.md) +- If you made updates directly on your org, use command [![Pull from org button](assets/images/btn-pull-from-org.jpg)](https://sfdx-hardis.cloudity.com/hardis/scratch/pull/) to retrieve into local files the updates that you performed online with point & click +- Once you have finished, you can [publish your User Story](salesforce-ci-cd-publish-task.md) ### Recommendations diff --git a/docs/salesforce-ci-cd-work-on-task-install-packages.md b/docs/salesforce-ci-cd-work-on-task-install-packages.md index 6bc4e0c5c..e2f7cf448 100644 --- a/docs/salesforce-ci-cd-work-on-task-install-packages.md +++ b/docs/salesforce-ci-cd-work-on-task-install-packages.md @@ -2,14 +2,20 @@ title: Install packages on your org description: Learn how to install a package --- + ## Install packages > Packages (managed or not) must **never be directly installed in a major org** (integration, uat, preprod, production), it has to be done in dev sandbox / scratch orgs -If you can find the package id (starting by `04T`), use sfdx-hardis command ![Install package button](assets/images/btn-install-package.jpg) to install package instead of installing them directly with the URL +If you installed a package on a dev sandbox or scratch org, use **DevOps Pipeline -> Installed Packages Manager** to retrieve the package configuration **before creating your merge request** (be careful of what you commit in .sfdx-hardis.yml file !) + +- Select **Deployments** if you want the package to be automatically installed on major orgs +- Select **Scratch** if you want the package to be automatically installed on new Scratch Orgs -If you installed a package using an URL, use command [Retrieve packages button](assets/images/btn-retrieve-packages.jpg) to retrieve package config **before creating your merge request** (be careful of what you commit in .sfdx-hardis.yml file !) +![](assets/images/animation-install-packages.gif) > Once packages are referenced in `.sfdx-hardis.yml`, they will automatically be installed on major orgs during CI/CD deployments + +> If you want packages to be installed during deployment check, see with your release manager to define `installPackagesDuringCheckDeploy: true` in your `.sfdx-hardis.yml` config file. diff --git a/docs/salesforce-ci-cd-work-on-task-open-org.md b/docs/salesforce-ci-cd-work-on-task-open-org.md index b4156edd8..ae4613df6 100644 --- a/docs/salesforce-ci-cd-work-on-task-open-org.md +++ b/docs/salesforce-ci-cd-work-on-task-open-org.md @@ -13,4 +13,4 @@ You can use Salesforce Setup to configure your org as you would do on any sandbo - Click on ![Open current org button](assets/images/btn-open-org.jpg) to open your org in browser - _If you want to use another org, use menu ![Select org button](assets/images/btn-select-org.jpg) to select another one_ - Perform your configurations in the org -- Once you have finished, you can [publish your task](salesforce-ci-cd-publish-task.md) +- Once you have finished, you can [publish your User Story](salesforce-ci-cd-publish-task.md) diff --git a/docs/salesforce-ci-cd-work-on-task.md b/docs/salesforce-ci-cd-work-on-task.md index 8e9a3d877..7054ed7a8 100644 --- a/docs/salesforce-ci-cd-work-on-task.md +++ b/docs/salesforce-ci-cd-work-on-task.md @@ -1,6 +1,6 @@ --- -title: Work on a task on a Salesforce DX project -description: Learn how to work on a task with a tracked sandbox or a scratch org +title: Work on a User Story on a Salesforce DX project +description: Learn how to work on a User Story with a tracked sandbox or a scratch org --- diff --git a/docs/salesforce-deployment-assistant-error-list.md b/docs/salesforce-deployment-assistant-error-list.md index 3a43b1cc9..ed6e2837f 100644 --- a/docs/salesforce-deployment-assistant-error-list.md +++ b/docs/salesforce-deployment-assistant-error-list.md @@ -12,25 +12,29 @@ See how to [setup sfdx-hardis deployment assistant](salesforce-deployment-assist If you see a deployment error which is not here yet, please [add it in this file](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/src/common/utils/deployTipsList.ts) :) -## API Version error +## [API Version error](sf-deployment-assistant/API-Version-error.md) -- `Error (.*) The (.*) apiVersion can't be "([0-9]+)"` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) The (.*) apiVersion can't be "([0-9]+)"` + +**Resolution** ```shell {1} metadata has probably been created/updated in a sandbox already upgraded to next platform version (ex: Sandbox in Summer'23 and Production in Spring'23) - First, try to update the api version in the XML of {1} metadata file (decrement the number in {3}.0) -- If it still doesn't work because the metadata structure has changed between version, you may try a force:source:retrieve of the metadata by forcing --apiversion at the end of the command. +- If it still doesn't work because the metadata structure has changed between version, you may try a sf project:retrieve:start of the metadata by forcing --api-version at the end of the command. ``` --- -## Allow deployment with pending Apex Jobs +## [Allow deployment with pending Apex Jobs](sf-deployment-assistant/Allow-deployment-with-pending-Apex-Jobs.md) + +**Detection** -- `You can bypass this error by allowing deployments with Apex jobs in the Deployment Settings page in Setup.` +- String: `You can bypass this error by allowing deployments with Apex jobs in the Deployment Settings page in Setup.` -**Resolution tip** +**Resolution** ```shell Go to target org, in Setup -> Deployment Settings -> Activate option "Allow deployments of components when corresponding Apex jobs are pending or in progress." @@ -38,11 +42,13 @@ Go to target org, in Setup -> Deployment Settings -> Activate option "Allow depl ``` --- -## Can not change field type to a formula field +## [Can not change field type to a formula field](sf-deployment-assistant/Can-not-change-field-type-to-a-formula-field.md) -- `Error (.*) Cannot update a field from a Formula to something else` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) Cannot update a field from a Formula to something else` + +**Resolution** ```shell You need to manually delete or rename the field in the target org to allow the deployment to pass @@ -51,11 +57,13 @@ You need to manually delete or rename the field in the target org to allow the d ``` --- -## Can not change type due to existing data +## [Can not change type due to existing data](sf-deployment-assistant/Can-not-change-type-due-to-existing-data.md) + +**Detection** -- `Error (.*) Cannot change type due to existing data` +- RegExp: `Error (.*) Cannot change type due to existing data` -**Resolution tip** +**Resolution** ```shell It is usually not recommended to change types of fields, but if it's really necessary you can: @@ -67,11 +75,13 @@ It is usually not recommended to change types of fields, but if it's really nece ``` --- -## Can not change field type with picklist +## [Can not change field type with picklist](sf-deployment-assistant/Can-not-change-field-type-with-picklist.md) -- `Error (.*) Cannot change which global value set this picklist uses` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) Cannot change which global value set this picklist uses` + +**Resolution** ```shell You probably updated the type of field {1}, and Salesforce does not allows that with deployments. You can: @@ -81,12 +91,14 @@ You probably updated the type of field {1}, and Salesforce does not allows that ``` --- -## Can not delete custom field +## [Can not delete custom field](sf-deployment-assistant/Can-not-delete-custom-field.md) + +**Detection** -- `This (.*) is referenced elsewhere in salesforce.com` -- `Le champ personnalisé (.*) est utilisé dans (.*)` +- RegExp: `This (.*) is referenced elsewhere in salesforce.com` +- RegExp: `Le champ personnalisé (.*) est utilisé dans (.*)` -**Resolution tip** +**Resolution** ```shell Custom field {1} can not be deleted because it is used elsewhere. Remove its references ans try again @@ -94,11 +106,13 @@ THIS MAY BE A FALSE POSITIVE if you are just testing the deployment, as destruct ``` --- -## Can not delete record type +## [Can not delete record type](sf-deployment-assistant/Can-not-delete-record-type.md) + +**Detection** -- `Error (.*) Cannot delete record type through API` +- RegExp: `Error (.*) Cannot delete record type through API` -**Resolution tip** +**Resolution** ```shell You need to manually delete record type {1} in target org @@ -107,26 +121,30 @@ You need to manually delete record type {1} in target org ``` --- -## Can not find folder +## [Can not find folder](sf-deployment-assistant/Can-not-find-folder.md) -- `Error (.*) Cannot find folder:(.*)` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) Cannot find folder:(.*)` + +**Resolution** ```shell Folder {2} is missing. - If folder {2} is existing in sources, add it in related package.xml -- If folder {2} is not existing in DX sources, please use sfdx hardis:project:clean:retrievefolders to retrieve it +- If folder {2} is not existing in DX sources, please use sf hardis:project:clean:retrievefolders to retrieve it - If both previous solutions did not work, go create manually folder {2} in target org ``` --- -## Can not find user +## [Can not find user](sf-deployment-assistant/Can-not-find-user.md) + +**Detection** -- `Error (.*) Cannot find a user that matches any of the following usernames` +- RegExp: `Error (.*) Cannot find a user that matches any of the following usernames` -**Resolution tip** +**Resolution** ```shell You made reference to username(s) in {1}, and those users probably do not exist in target org. @@ -144,11 +162,13 @@ Example of XML you have to remove in {1}: ``` --- -## Can not find user (2) +## [Can not find user (2)](sf-deployment-assistant/Can-not-find-user--2-.md) -- `Error (.*) In field: (.*) - no User named (.*) found` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) In field: (.*) - no User named (.*) found` + +**Resolution** ```shell You made reference to username {3} in {1}, and it probably does not exist in the target org. @@ -158,11 +178,13 @@ You made reference to username {3} in {1}, and it probably does not exist in the ``` --- -## Cannot update a field to a Summary from something else +## [Cannot update a field to a Summary from something else](sf-deployment-assistant/Cannot-update-a-field-to-a-Summary-from-something-else.md) + +**Detection** -- `Error (.*) Cannot update a field to a (.*) from something else` +- RegExp: `Error (.*) Cannot update a field to a (.*) from something else` -**Resolution tip** +**Resolution** ```shell You probably updated the type of field {1} to type {2}, and Salesforce does not allows that with deployments. You can: @@ -172,11 +194,26 @@ You probably updated the type of field {1} to type {2}, and Salesforce does not ``` --- -## Condition missing reference +## [Change Matching Rule](sf-deployment-assistant/Change-Matching-Rule.md) + +**Detection** + +- RegExp: `Error (.*) Before you change a matching rule, you must deactivate it` + +**Resolution** + +```shell +To be able to deploy, you must go in target org setup to manually deactivate matching rule {1} +``` + +--- +## [Condition missing reference](sf-deployment-assistant/Condition-missing-reference.md) + +**Detection** -- `Error (.*) field integrity exception: unknown \(A condition has a reference to (.*), which doesn't exist.\)` +- RegExp: `Error (.*) field integrity exception: unknown \(A condition has a reference to (.*), which doesn't exist.\)` -**Resolution tip** +**Resolution** ```shell There is a reference to {2} in {1}, and {2} is not found. You can either: @@ -186,27 +223,51 @@ There is a reference to {2} in {1}, and {2} is not found. You can either: ``` --- -## Custom object not found +## [Couldn't retrieve or load information on the field](sf-deployment-assistant/Couldn-t-retrieve-or-load-information-on-the-field.md) + +**Detection** -- `Error (.*) In field: field - no CustomObject named (.*) found` +- RegExp: `Error (.*) Something went wrong. We couldn't retrieve or load the information on the field: (.*)\.` -**Resolution tip** +**Resolution** + +```shell +There is a reference to {2} in {1}, and {2} is not found. You can either: +- Commit {2} in your deployment sources and make sure it is named in package.xml +- Remove the reference to {2} in {1} + +``` + +--- +## [Custom object not found](sf-deployment-assistant/Custom-object-not-found.md) + +**Detection** + +- RegExp: `Error (.*) In field: field - no CustomObject named (.*) found` + +**Resolution** ```shell A reference to a custom object {2} is not found in {1}: - If you renamed the custom object, do a search/replace in sources with previous object name and new object name - If you deleted the custom object, or if you don't want to deploy it, do a search on the custom object name, and remove XML elements referencing it - If the object should exist, make sure it is in force-app/main/default/objects and that the object name is in manifest/package.xml in CustomObject section -You may also have a look to command sfdx hardis:project:clean:references +You may also have a look to command sf hardis:project:clean:references ``` --- -## Custom field not found +## [Custom field not found](sf-deployment-assistant/Custom-field-not-found.md) + +**Detection** + +- RegExp: `Error (.*) In field: (.*) - no CustomField named (.*)\.(.*) found` + +**Examples** -- `Error (.*) In field: (.*) - no CustomField named (.*)\.(.*) found` +- `Error PS_Admin In field: field - no CustomField named User.expcloud__Portal_Username__c found` -**Resolution tip** +**Resolution** ```shell A reference to a custom field {3}.{4} is not found in {1}: @@ -214,17 +275,19 @@ A reference to a custom field {3}.{4} is not found in {1}: - If you deleted {3}.{4}, or if you don't want to deploy it, do a search on {4} in all sources, and remove all XML elements referring to {3}.{4} (except in destructiveChanges.xml) - If {3}.{4} should exist, make sure it is in force-app/main/default/objects/{3}/fields and that {3}.{4} is in manifest/package.xml in CustomField section - If {3}.{4} is standard, the error is because {3}.{4} is not available in the org you are trying to deploy to. You can: - - Remove the reference to {4} in the XML of {1} ( maybe sfdx hardis:project:clean:references can clean automatically for you ! ) + - Remove the reference to {4} in the XML of {1} ( maybe sf hardis:project:clean:references can clean automatically for you ! ) - Activate the required features/license in the target org ``` --- -## Mandatory custom field can not be in a profile/permission set +## [Mandatory custom field can not be in a profile/permission set](sf-deployment-assistant/Mandatory-custom-field-can-not-be-in-a-profile-permission-set.md) -- `Error (.*) You cannot deploy to a required field: (.*)` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) You cannot deploy to a required field: (.*)` + +**Resolution** ```shell @@ -239,11 +302,13 @@ Example of element to delete: ``` --- -## Custom metadata entry not found +## [Custom metadata entry not found](sf-deployment-assistant/Custom-metadata-entry-not-found.md) + +**Detection** -- `Error (.*) In field: (.*) - no CustomMetadata named (.*) found` +- RegExp: `Error (.*) In field: (.*) - no CustomMetadata named (.*) found` -**Resolution tip** +**Resolution** ```shell A reference to a custom metadata {3} of type {2} is not found in {1}: @@ -253,11 +318,26 @@ A reference to a custom metadata {3} of type {2} is not found in {1}: ``` --- -## Missing Data Category Group +## [Expired Access / Refresh Token](sf-deployment-assistant/Expired-Access---Refresh-Token.md) -- `Error (.*) In field: DeveloperName - no DataCategoryGroup named (.*) found` +**Detection** -**Resolution tip** +- String: `expired access/refresh token` + +**Resolution** + +```shell +Run command "Select another org" from Status panel (or sf hardis:org:select) to authenticate again to your org +``` + +--- +## [Missing Data Category Group](sf-deployment-assistant/Missing-Data-Category-Group.md) + +**Detection** + +- RegExp: `Error (.*) In field: DeveloperName - no DataCategoryGroup named (.*) found` + +**Resolution** ```shell If Data Category Group {2} is not existing yet in target org, you might need to: @@ -267,11 +347,13 @@ If Data Category Group {2} is not existing yet in target org, you might need to: ``` --- -## Dependent class is invalid and needs recompilation +## [Dependent class is invalid and needs recompilation](sf-deployment-assistant/Dependent-class-is-invalid-and-needs-recompilation.md) -- `Error (.*) Dependent class is invalid and needs recompilation` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) Dependent class is invalid and needs recompilation` + +**Resolution** ```shell Solve the other errors and this one will disappear ! @@ -279,47 +361,55 @@ Solve the other errors and this one will disappear ! ``` --- -## Duplicate value Platform Action Id List +## [Duplicate value Platform Action Id List](sf-deployment-assistant/Duplicate-value-Platform-Action-Id-List.md) + +**Detection** -- `duplicate value found: PlatformActionListId duplicates value on record with id` +- String: `duplicate value found: PlatformActionListId duplicates value on record with id` -**Resolution tip** +**Resolution** ```shell There are probably issue with conflict management. Open the XML of the source item, and replace all numbers to make an ascending order, starting with 0 ``` --- -## Duplicate label +## [Duplicate label](sf-deployment-assistant/Duplicate-label.md) + +**Detection** -- `Error (.*) Duplicate label: (.*)` +- RegExp: `Error (.*) Duplicate label: (.*)` -**Resolution tip** +**Resolution** ```shell You probably renamed the picklist API name for {2}. Please update manually the picklist {1} in the target org to avoid to have a duplicate label ``` --- -## Missing e-mail template +## [Missing e-mail template](sf-deployment-assistant/Missing-e-mail-template.md) -- `In field: template - no EmailTemplate named (.*) found` +**Detection** -**Resolution tip** +- RegExp: `In field: template - no EmailTemplate named (.*) found` + +**Resolution** ```shell An email template should be present in the sources. To retrieve it, you can run: -sfdx force:source:retrieve -m EmailTemplate:{1} -u YOUR_ORG_USERNAME +sf project retrieve start -m EmailTemplate:{1} -o YOUR_ORG_USERNAME ``` --- -## Empty source items +## [Empty source items](sf-deployment-assistant/Empty-source-items.md) + +**Detection** -- `Required field is missing: sharingOwnerRules` -- `Required field is missing: standardValue` -- `Required field is missing: valueTranslation` +- String: `Required field is missing: sharingOwnerRules` +- String: `Required field is missing: standardValue` +- String: `Required field is missing: valueTranslation` -**Resolution tip** +**Resolution** ```shell You probably retrieved empty items, that must not be included within the SFDX project @@ -327,11 +417,13 @@ To remove them, please run sfdx:hardis:project:clean:emptyitems ``` --- -## Enable CRM Analytics +## [Enable CRM Analytics](sf-deployment-assistant/Enable-CRM-Analytics.md) + +**Detection** -- `It should be created by enabling the CRM Analytics Cloud preference` +- String: `It should be created by enabling the CRM Analytics Cloud preference` -**Resolution tip** +**Resolution** ```shell You must enable CRM Analytics (ex Wave, Einstein Analytics & Tableau CRM) in the target org. @@ -339,11 +431,13 @@ You probably also need to add CRM Analytics Admin Permission Set assignment to t ``` --- -## Error parsing file +## [Error parsing file](sf-deployment-assistant/Error-parsing-file.md) -- `Error (.*) Error parsing file: (.*) ` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) Error parsing file: (.*)` + +**Resolution** ```shell There has been an error parsing the XML file of {1}: {2} @@ -351,11 +445,13 @@ There has been an error parsing the XML file of {1}: {2} ``` --- -## Formula picklist field issue +## [Formula picklist field issue](sf-deployment-assistant/Formula-picklist-field-issue.md) + +**Detection** -- `Field:(.*) must not be Required` +- RegExp: `Field:(.*) must not be Required` -**Resolution tip** +**Resolution** ```shell You probably made read only field {1} that was required before. @@ -363,11 +459,13 @@ Find field {1} in the layout source XML, then replace Required by Readonly ``` --- -## Field not available for element +## [Field not available for element](sf-deployment-assistant/Field-not-available-for-element.md) + +**Detection** -- `Field (.*) is not available for` +- RegExp: `Field (.*) is not available for` -**Resolution tip** +**Resolution** ```shell You probably changed the type of field {1}. @@ -375,11 +473,13 @@ Find field {1} in the source XML, and remove the section using it ``` --- -## Formula picklist field issue +## [Formula picklist field issue](sf-deployment-assistant/Formula-picklist-field-issue.md) -- `Les champs de liste de sélection sont pris en charge uniquement dans certaines fonctions.` +**Detection** -**Resolution tip** +- String: `Les champs de liste de sélection sont pris en charge uniquement dans certaines fonctions.` + +**Resolution** ```shell You probably changed the type of a field that is used in a formula. @@ -388,22 +488,26 @@ More details at https://help.salesforce.com/articleView?id=sf.tips_on_building_f ``` --- -## Flow must be deleted manually +## [Flow must be deleted manually](sf-deployment-assistant/Flow-must-be-deleted-manually.md) + +**Detection** -- `.flow (.*) insufficient access rights on cross-reference id` +- RegExp: `.flow (.*) insufficient access rights on cross-reference id` -**Resolution tip** +**Resolution** ```shell Flow {1} can not be deleted using deployments, please delete it manually in the target org using menu Setup -> Flows , context menu on {1} -> View details and versions -> Deactivate all versions -> Delete flow ``` --- -## Insufficient access rights on cross-reference id +## [Insufficient access rights on cross-reference id](sf-deployment-assistant/Insufficient-access-rights-on-cross-reference-id.md) -- `Error (.*) insufficient access rights on cross-reference id` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) insufficient access rights on cross-reference id` + +**Resolution** ```shell - If {1} is a Flow, it can not be deleted using deployments, please delete it manually in the target org using menu Setup -> Flows , context menu on {1} -> View details and versions -> Deactivate all versions -> Delete flow @@ -411,22 +515,26 @@ Flow {1} can not be deleted using deployments, please delete it manually in the ``` --- -## Invalid formula grouping context +## [Invalid formula grouping context](sf-deployment-assistant/Invalid-formula-grouping-context.md) + +**Detection** -- `Invalid custom summary formula definition: You must select a grouping context to use any report summary function` +- String: `Invalid custom summary formula definition: You must select a grouping context to use any report summary function` -**Resolution tip** +**Resolution** ```shell You need to update your Report definition. See workaround here -> https://salesforce.stackexchange.com/questions/294850/grouping-error-with-prevgroupval-function ``` --- -## Invalid report type +## [Invalid report type](sf-deployment-assistant/Invalid-report-type.md) -- `Error (.*) invalid report type` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) invalid report type` + +**Resolution** ```shell Report type is missing for report {1} @@ -435,25 +543,29 @@ Report type is missing for report {1} ``` --- -## Invalid scope:Mine, not allowed +## [Invalid scope:Mine, not allowed](sf-deployment-assistant/Invalid-scope-Mine--not-allowed.md) + +**Detection** -- `Invalid scope:Mine, not allowed` +- String: `Invalid scope:Mine, not allowed` -**Resolution tip** +**Resolution** ```shell Replace Mine by Everything in the list view SFDX source XML. Have a look at this command to manage that automatically :) -https://sfdx-hardis.cloudity.com/hardis/org/fix/listviewmine/ +https://sfdx-hardis.cloudity.com/hardis/org/fix/listviewmine/ ``` --- -## Invalid field in related list +## [Invalid field in related list](sf-deployment-assistant/Invalid-field-in-related-list.md) + +**Detection** -- `Error (.*) Invalid field:(.*) in related list:(.*)` +- RegExp: `Error (.*) Invalid field:(.*) in related list:(.*)` -**Resolution tip** +**Resolution** ```shell Field {2} is unknown. You can: @@ -472,11 +584,13 @@ Example of XML to remove: ``` --- -## Invalid field for upsert +## [Invalid field for upsert](sf-deployment-assistant/Invalid-field-for-upsert.md) -- `Error (.*) Invalid field for upsert, must be an External Id custom or standard indexed field: (.*) \((.*)\)` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) Invalid field for upsert, must be an External Id custom or standard indexed field: (.*) \((.*)\)` + +**Resolution** ```shell You tried to use field {2} for an upsert call in {1}. @@ -487,22 +601,26 @@ You tried to use field {2} for an upsert call in {1}. ``` --- -## Invalid type +## [Invalid type](sf-deployment-assistant/Invalid-type.md) + +**Detection** -- `Error (.*) Invalid type: (.*) \((.*)\)` +- RegExp: `Error (.*) Invalid type: (.*) \((.*)\)` -**Resolution tip** +**Resolution** ```shell Apex error in {1} with unknown type {2} at position {3}. If {2} is a class name, try to fix it, or maybe it is missing in the files or in package.xml ! ``` --- -## Campaign can not be updated +## [Campaign can not be updated](sf-deployment-assistant/Campaign-can-not-be-updated.md) -- `The object "Campaign" can't be updated` +**Detection** -**Resolution tip** +- String: `The object "Campaign" can't be updated` + +**Resolution** ```shell Add "MarketingUser" in project-scratch-def.json features @@ -510,12 +628,14 @@ If it is already done, you may manually check "MarketingUser" field on the scrat ``` --- -## Missing field MiddleName +## [Missing field MiddleName](sf-deployment-assistant/Missing-field-MiddleName.md) + +**Detection** -- `field MiddleName` -- `Variable does not exist: MiddleName` +- String: `field MiddleName` +- String: `Variable does not exist: MiddleName` -**Resolution tip** +**Resolution** ```shell MiddleNames must be activated in the target org. @@ -527,11 +647,13 @@ MiddleNames must be activated in the target org. ``` --- -## Missing field Suffix +## [Missing field Suffix](sf-deployment-assistant/Missing-field-Suffix.md) -- `field Suffix` +**Detection** -**Resolution tip** +- String: `field Suffix` + +**Resolution** ```shell Suffix must be activated in the target org. @@ -543,13 +665,15 @@ Suffix must be activated in the target org. ``` --- -## Missing field SyncedQuoteId +## [Missing field SyncedQuoteId](sf-deployment-assistant/Missing-field-SyncedQuoteId.md) + +**Detection** -- `field SyncedQuoteId` -- `Error force-app/main/default/objects/Quote/Quote.object-meta.xml` -- `Error force-app/main/default/objects/Opportunity/fields/SyncedQuoteId.field-meta.xml` +- String: `field SyncedQuoteId` +- String: `Error force-app/main/default/objects/Quote/Quote.object-meta.xml` +- String: `Error force-app/main/default/objects/Opportunity/fields/SyncedQuoteId.field-meta.xml` -**Resolution tip** +**Resolution** ```shell Quotes must be activated in the target org. @@ -561,12 +685,14 @@ Quotes must be activated in the target org. ``` --- -## Missing feature ContactToMultipleAccounts +## [Missing feature ContactToMultipleAccounts](sf-deployment-assistant/Missing-feature-ContactToMultipleAccounts.md) + +**Detection** -- `no CustomObject named AccountContactRelation found` -- `Invalid field:ACCOUNT.NAME in related list:RelatedContactAccountRelationList` +- String: `no CustomObject named AccountContactRelation found` +- String: `Invalid field:ACCOUNT.NAME in related list:RelatedContactAccountRelationList` -**Resolution tip** +**Resolution** ```shell Contacts to multiple accounts be activated in the target org. @@ -576,11 +702,13 @@ Contacts to multiple accounts be activated in the target org. ``` --- -## Missing feature Chatter Collaboration Group +## [Missing feature Chatter Collaboration Group](sf-deployment-assistant/Missing-feature-Chatter-Collaboration-Group.md) -- `CollaborationGroup` +**Detection** -**Resolution tip** +- String: `CollaborationGroup` + +**Resolution** ```shell Quotes must be activated in the target org. @@ -592,11 +720,13 @@ Quotes must be activated in the target org. ``` --- -## Missing feature Enhanced notes +## [Missing feature Enhanced notes](sf-deployment-assistant/Missing-feature-Enhanced-notes.md) + +**Detection** -- `FeedItem.ContentNote` +- String: `FeedItem.ContentNote` -**Resolution tip** +**Resolution** ```shell Enhanced Notes must be activated in the target org. @@ -608,11 +738,13 @@ Enhanced Notes must be activated in the target org. ``` --- -## Missing feature Ideas notes +## [Missing feature Ideas notes](sf-deployment-assistant/Missing-feature-Ideas-notes.md) -- `Idea.InternalIdeasIdeaRecordType` +**Detection** -**Resolution tip** +- String: `Idea.InternalIdeasIdeaRecordType` + +**Resolution** ```shell Ideas must be activated in the target org. @@ -624,11 +756,13 @@ Ideas must be activated in the target org. ``` --- -## Missing feature Live Agent +## [Missing feature Live Agent](sf-deployment-assistant/Missing-feature-Live-Agent.md) + +**Detection** -- `FeedItem.ContentNote` +- String: `FeedItem.ContentNote` -**Resolution tip** +**Resolution** ```shell Live Agent must be activated in the target org. @@ -637,24 +771,28 @@ Live Agent must be activated in the target org. ``` --- -## Missing feature Product Request +## [Missing feature Product Request](sf-deployment-assistant/Missing-feature-Product-Request.md) -- `ProductRequest` +**Detection** -**Resolution tip** +- String: `ProductRequest` + +**Resolution** ```shell ProductRequest object is not available in the target org. Maybe you would like to clean its references within Profiles / PS using the following command ? -sfdx hardis:project:clean:references , then select "ProductRequest references" +sf hardis:project:clean:references , then select "ProductRequest references" ``` --- -## Missing feature Social Customer Service +## [Missing feature Social Customer Service](sf-deployment-assistant/Missing-feature-Social-Customer-Service.md) + +**Detection** -- `SocialPersona.AreWeFollowing` +- String: `SocialPersona.AreWeFollowing` -**Resolution tip** +**Resolution** ```shell Social Custom Service must be activated in the target org. @@ -663,11 +801,13 @@ Social Custom Service must be activated in the target org. ``` --- -## Missing feature Translation Workbench +## [Missing feature Translation Workbench](sf-deployment-assistant/Missing-feature-Translation-Workbench.md) + +**Detection** -- `report-meta.xml(.*)filterlanguage` +- RegExp: `report-meta.xml(.*)filterlanguage` -**Resolution tip** +**Resolution** ```shell Translation workbench must be activated in the target org. @@ -680,11 +820,13 @@ Translation workbench must be activated in the target org. ``` --- -## Missing feature Opportunity Teams +## [Missing feature Opportunity Teams](sf-deployment-assistant/Missing-feature-Opportunity-Teams.md) -- `OpportunityTeam` +**Detection** -**Resolution tip** +- String: `OpportunityTeam` + +**Resolution** ```shell Opportunity Teams must be activated in the target org. @@ -696,11 +838,13 @@ Opportunity Teams must be activated in the target org. ``` --- -## Missing Feature Work.Com +## [Missing Feature Work.Com](sf-deployment-assistant/Missing-Feature-Work-Com.md) + +**Detection** -- `WorkBadgeDefinition` +- String: `WorkBadgeDefinition` -**Resolution tip** +**Resolution** ```shell Work.com feature must be activated in the target org. @@ -708,22 +852,26 @@ Work.com feature must be activated in the target org. ``` --- -## Missing multi-currency field +## [Missing multi-currency field](sf-deployment-assistant/Missing-multi-currency-field.md) + +**Detection** -- `A reference to a custom field (.*)CurrencyIsoCode` +- RegExp: `A reference to a custom field (.*)CurrencyIsoCode` -**Resolution tip** +**Resolution** ```shell You probably need to activate MultiCurrency (from Setup -> Company information) ``` --- -## Missing object referenced in package.xml +## [Missing object referenced in package.xml](sf-deployment-assistant/Missing-object-referenced-in-package-xml.md) -- `An object (.*) of type (.*) was named in package.xml, but was not found in zipped directory` +**Detection** -**Resolution tip** +- RegExp: `An object (.*) of type (.*) was named in package.xml, but was not found in zipped directory` + +**Resolution** ```shell You can either: @@ -732,11 +880,13 @@ You can either: ``` --- -## Missing Quick Action +## [Missing Quick Action](sf-deployment-assistant/Missing-Quick-Action.md) + +**Detection** -- `Error (.*) In field: QuickAction - no QuickAction named (.*) found` +- RegExp: `Error (.*) In field: QuickAction - no QuickAction named (.*) found` -**Resolution tip** +**Resolution** ```shell QuickAction {2} referred in {1} is unknown. You can either: @@ -750,25 +900,29 @@ QuickAction {2} referred in {1} is unknown. You can either: ``` --- -## Missing report +## [Missing report](sf-deployment-assistant/Missing-report.md) + +**Detection** -- `Error (.*) The (.*) report chart has a problem with the "reportName" field` +- RegExp: `Error (.*) The (.*) report chart has a problem with the "reportName" field` -**Resolution tip** +**Resolution** ```shell {1} is referring to unknown report {2}. To retrieve it, you can run: -- sfdx force:source:retrieve -m Report:{2} -u YOUR_ORG_USERNAME +- sf project retrieve start -m Report:{2} -o YOUR_ORG_USERNAME - If it fails, looks for the report folder and add it before report name to the retrieve command (ex: MYFOLDER/MYREPORTNAME) ``` --- -## Missing Sales Team +## [Missing Sales Team](sf-deployment-assistant/Missing-Sales-Team.md) -- `related list:RelatedAccountSalesTeam` +**Detection** -**Resolution tip** +- String: `related list:RelatedAccountSalesTeam` + +**Resolution** ```shell Account Teams must be activated in the target org. @@ -781,27 +935,31 @@ Account Teams must be activated in the target org. ``` --- -## sharing operation already in progress +## [sharing operation already in progress](sf-deployment-assistant/sharing-operation-already-in-progress.md) + +**Detection** -- `sharing operation already in progress` +- String: `sharing operation already in progress` -**Resolution tip** +**Resolution** ```shell You can not deploy multiple SharingRules at the same time. You can either: - Remove SharingOwnerRules and SharingRule from package.xml (so it becomes a manual operation) -- Use sfdx hardis:work:save to generate a deploymentPlan in .sfdx-hardis.json, +- Use sf hardis:work:save to generate a deploymentPlan in .sfdx-hardis.json, - If you are trying to create a scratch org, add DeferSharingCalc in features in project-scratch-def.json ``` --- -## Network issue +## [Network issue](sf-deployment-assistant/Network-issue.md) -- `ECONNABORTED` -- `ECONNRESET` +**Detection** -**Resolution tip** +- String: `ECONNABORTED` +- String: `ECONNRESET` + +**Resolution** ```shell The network connection has been aborted, this is a purely technical issue. @@ -809,11 +967,13 @@ Try again, and if you still see errors, check the status of Salesforce instance ``` --- -## Not available for deploy for this organization +## [Not available for deploy for this organization](sf-deployment-assistant/Not-available-for-deploy-for-this-organization.md) + +**Detection** -- `Error (.*) Not available for deploy for this organization` +- RegExp: `Error (.*) Not available for deploy for this organization` -**Resolution tip** +**Resolution** ```shell The user you use for deployments probably lacks of the rights (Profiles, Permission sets...) to manage {1}. @@ -821,11 +981,13 @@ The user you use for deployments probably lacks of the rights (Profiles, Permiss ``` --- -## Not valid sharing model +## [Not valid sharing model](sf-deployment-assistant/Not-valid-sharing-model.md) -- `Error (.*) (.*) is not a valid sharing model for (.*) when (.*) sharing model is (.*)` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) (.*) is not a valid sharing model for (.*) when (.*) sharing model is (.*)` + +**Resolution** ```shell It seems that Sharing Models of {1} and {4} are not compatible in target org. @@ -836,11 +998,13 @@ It seems that Sharing Models of {1} and {4} are not compatible in target org. ``` --- -## Picklist sharing is not supported +## [Picklist sharing is not supported](sf-deployment-assistant/Picklist-sharing-is-not-supported.md) + +**Detection** -- `Picklist sharing is not supported` +- String: `Picklist sharing is not supported` -**Resolution tip** +**Resolution** ```shell You probably changed the type of a field. @@ -849,25 +1013,29 @@ Go manually make the change in the target org, so the deployment will pass ``` --- -## Picklist value not found +## [Picklist value not found](sf-deployment-assistant/Picklist-value-not-found.md) + +**Detection** -- `Picklist value: (.*) in picklist: (.*) not found` +- RegExp: `Picklist value: (.*) in picklist: (.*) not found` -**Resolution tip** +**Resolution** ```shell Sources have references to value {1} of picklist {2} -- If picklist {2} is standard, add the picklist to sfdx sources by using "sfdx force:source:retrieve -m StandardValueSet:{2}", then save again +- If picklist {2} is standard, add the picklist to sfdx sources by using "sf project retrieve start -m StandardValueSet:{2}", then save again - Else, perform a search in all code of {1}, then remove XML tags referring to {1} (for example in record types metadatas) ``` --- -## Please choose a different name +## [Please choose a different name](sf-deployment-assistant/Please-choose-a-different-name.md) -- `Error (.*) This (.*) already exists or has been previously used(.*)Please choose a different name.` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) This (.*) already exists or has been previously used(.*)Please choose a different name.` + +**Resolution** ```shell - Rename {1} in the target org, then try again the deployment. if it succeeds, delete the renamed item. @@ -876,11 +1044,13 @@ Sources have references to value {1} of picklist {2} ``` --- -## Missing profile default application +## [Missing profile default application](sf-deployment-assistant/Missing-profile-default-application.md) + +**Detection** -- `You can't remove the only default app from the profile.` +- String: `You can't remove the only default app from the profile.` -**Resolution tip** +**Resolution** ```shell You must have a default application for a profile. You can: @@ -895,26 +1065,30 @@ You must have a default application for a profile. You can: ``` --- -## CRM Analytics: A Recipe must specify a DataFlow +## [CRM Analytics: A Recipe must specify a DataFlow](sf-deployment-assistant/CRM-Analytics--A-Recipe-must-specify-a-DataFlow.md) -- `Error (.*) A Recipe must specify a Dataflow` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) A Recipe must specify a Dataflow` + +**Resolution** ```shell You must include related WaveDataFlow {1} in sources (and probably in package.xml too). -To retrieve it, run: sfdx force:source:retrieve -m WaveDataFlow:{1} -u SOURCE_ORG_USERNAME -You can also retrieve all analytics sources in one shot using sfdx hardis:org:retrieve:source:analytics -u SOURCE_ORG_USERNAME +To retrieve it, run: sf project retrieve start -m WaveDataFlow:{1} -u SOURCE_ORG_USERNAME +You can also retrieve all analytics sources in one shot using sf hardis:org:retrieve:source:analytics -u SOURCE_ORG_USERNAME - https://salesforce.stackexchange.com/a/365453/33522 - https://help.salesforce.com/s/articleView?id=000319274&type=1 ``` --- -## Record Type not found +## [Record Type not found](sf-deployment-assistant/Record-Type-not-found.md) + +**Detection** -- `Error (.*) In field: recordType - no RecordType named (.*) found` +- RegExp: `Error (.*) In field: recordType - no RecordType named (.*) found` -**Resolution tip** +**Resolution** ```shell An unknown record type {2} is referenced in {1} @@ -924,11 +1098,13 @@ An unknown record type {2} is referenced in {1} ``` --- -## Objects rights on a role is below org default +## [Objects rights on a role is below org default](sf-deployment-assistant/Objects-rights-on-a-role-is-below-org-default.md) -- `access level below organization default` +**Detection** -**Resolution tip** +- String: `access level below organization default` + +**Resolution** ```shell Your org wide settings default must be lower than the level defined in roles: @@ -938,11 +1114,13 @@ Your org wide settings default must be lower than the level defined in roles: ``` --- -## Unsupported sharing configuration +## [Unsupported sharing configuration](sf-deployment-assistant/Unsupported-sharing-configuration.md) + +**Detection** -- `not supported for (.*) since it's org wide default is` +- RegExp: `not supported for (.*) since it's org wide default is` -**Resolution tip** +**Resolution** ```shell Consistency error between {1} sharing settings and {1} object configuration @@ -951,46 +1129,54 @@ If you already did that, please try again to run the job ``` --- -## A sharing rule may be useless +## [A sharing rule may be useless](sf-deployment-assistant/A-sharing-rule-may-be-useless.md) + +**Detection** -- `Required field is missing: sharingCriteriaRules` +- String: `Required field is missing: sharingCriteriaRules` -**Resolution tip** +**Resolution** ```shell Are you sure you need this sharing rule ? You may remove it from the sfdx project ``` --- -## Sharing recalculation lock +## [Sharing recalculation lock](sf-deployment-assistant/Sharing-recalculation-lock.md) -- `because it interferes with another operation already in progress` -- `Le calcul de partage demandé ne peut être traité maintenant car il interfère avec une autre opération en cours` +**Detection** -**Resolution tip** +- String: `because it interferes with another operation already in progress` +- String: `Le calcul de partage demandé ne peut être traité maintenant car il interfère avec une autre opération en cours` + +**Resolution** ```shell If you changed a field from MasterDetail to Lookup, you must do it manually in the target org before being able to deploy ``` --- -## Send email is disabled +## [Send email is disabled](sf-deployment-assistant/Send-email-is-disabled.md) + +**Detection** -- `Send Email is disabled or activities are not allowed` -- `Unknown user permission: SendExternalEmailAvailable` +- String: `Send Email is disabled or activities are not allowed` +- String: `Unknown user permission: SendExternalEmailAvailable` -**Resolution tip** +**Resolution** ```shell Go to Email -> Deliverability -> Select value "All emails" ``` --- -## Sort order must be in sequential order +## [Sort order must be in sequential order](sf-deployment-assistant/Sort-order-must-be-in-sequential-order.md) -- `Error (.*) SortOrder must be in sequential order from` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) SortOrder must be in sequential order from` + +**Resolution** ```shell You probably have a default DuplicateRule in the target org. Retrieve it from target org, or delete it manually in target org, so you can deploy. @@ -998,11 +1184,13 @@ Ref: https://developer.salesforce.com/forums/?id=9060G000000I6SoQAK ``` --- -## Async exception in test class +## [Async exception in test class](sf-deployment-assistant/Async-exception-in-test-class.md) + +**Detection** -- `System.AsyncException: (.*) Apex` +- RegExp: `System.AsyncException: (.*) Apex` -**Resolution tip** +**Resolution** ```shell This may be a test class implementation issue in {1}. @@ -1010,22 +1198,26 @@ Please check https://developer.salesforce.com/forums/?id=9060G0000005kVLQAY ``` --- -## Test classes with 0% coverage +## [Test classes with 0% coverage](sf-deployment-assistant/Test-classes-with-0--coverage.md) -- ` 0%` +**Detection** -**Resolution tip** +- RegExp: ` 0%` + +**Resolution** ```shell Please make sure that none of the test classes are 0% covered ``` --- -## Can not test item deployment in simulation mode +## [Can not test item deployment in simulation mode](sf-deployment-assistant/Can-not-test-item-deployment-in-simulation-mode.md) + +**Detection** -- `Test only deployment cannot update` +- RegExp: `Test only deployment cannot update` -**Resolution tip** +**Resolution** ```shell THIS IS A FALSE POSITIVE @@ -1033,11 +1225,13 @@ When effective deployment will happen, it should pass ``` --- -## Unknown user permission: CreateAuditFields +## [Unknown user permission: CreateAuditFields](sf-deployment-assistant/Unknown-user-permission--CreateAuditFields.md) + +**Detection** -- `Unknown user permission: CreateAuditFields` +- String: `Unknown user permission: CreateAuditFields` -**Resolution tip** +**Resolution** ```shell You need to enable the "Create audit field" permission in the target org @@ -1045,11 +1239,13 @@ Please check https://help.salesforce.com/articleView?id=000334139&type=1&mode=1 ``` --- -## Unknown user permission: FieldServiceAccess +## [Unknown user permission: FieldServiceAccess](sf-deployment-assistant/Unknown-user-permission--FieldServiceAccess.md) -- `Unknown user permission: FieldServiceAccess` +**Detection** -**Resolution tip** +- String: `Unknown user permission: FieldServiceAccess` + +**Resolution** ```shell You need to enable the "Field Service Access" permission in the target org @@ -1057,11 +1253,13 @@ Please check https://help.salesforce.com/articleView?id=sf.fs_enable.htm&type=5 ``` --- -## Unknown user permission +## [Unknown user permission](sf-deployment-assistant/Unknown-user-permission.md) + +**Detection** -- `Unknown user permission:` +- String: `Unknown user permission:` -**Resolution tip** +**Resolution** ```shell You can: @@ -1070,44 +1268,52 @@ You can: ``` --- -## Variable does not exist +## [Variable does not exist](sf-deployment-assistant/Variable-does-not-exist.md) + +**Detection** -- `Error (.*) Variable does not exist: (.*) \((.*)\)` +- RegExp: `Error (.*) Variable does not exist: (.*) \((.*)\)` -**Resolution tip** +**Resolution** ```shell Apex error in {1} with unknown variable {2} at position {3}. If {2} is a class name, try to fix it, or maybe it is missing in the files or in package.xml ! ``` --- -## Visibility is not allowed for type +## [Visibility is not allowed for type](sf-deployment-assistant/Visibility-is-not-allowed-for-type.md) -- `Error (.*) set the visibility for a (.*) to Protected unless you are in a developer` +**Detection** -**Resolution tip** +- RegExp: `Error (.*) set the visibility for a (.*) to Protected unless you are in a developer` + +**Resolution** ```shell Update the visibility of {1} to "Public" ``` --- -## Tableau CRM / Wave digest error +## [Tableau CRM / Wave digest error](sf-deployment-assistant/Tableau-CRM---Wave-digest-error.md) + +**Detection** -- `Fix the sfdcDigest node errors and then upload the file again` +- String: `Fix the sfdcDigest node errors and then upload the file again` -**Resolution tip** +**Resolution** ```shell Go to the target org, open profile "Analytics Cloud Integration User" and add READ rights to the missing object fields ``` --- -## XML item appears more than once +## [XML item appears more than once](sf-deployment-assistant/XML-item-appears-more-than-once.md) + +**Detection** -- `Error (.*) Field:(.*), value:(.*) appears more than once` +- RegExp: `Error (.*) Field:(.*), value:(.*) appears more than once` -**Resolution tip** +**Resolution** ```shell You probably made an error while merging conflicts diff --git a/docs/salesforce-deployment-assistant-home.md b/docs/salesforce-deployment-assistant-home.md index 211402aa9..23f37f65f 100644 --- a/docs/salesforce-deployment-assistant-home.md +++ b/docs/salesforce-deployment-assistant-home.md @@ -6,14 +6,33 @@ description: Learn how to sfdx-hardis deployment assistant can help you ! # sfdx-hardis Deployment Assistant +## Salesforce DevOps AI Integration + Deployment errors are common and quite boring, right ? Sfdx-hardis deployment assistant will help you to solve them, even if you are not using sfdx-hardis CI/CD pipelines ! -The assistant contain core rules and can be integrated with AI to provide you the best guidance :) +The assistant contain core rules and can optionally be integrated with AI to provide you the best guidance :) ![](assets/images/AI-Assistant.gif) +## Flow Visual Git Diff + +In addition to deployment tips, comments will be posted on PRs with Visual Git Diff for Flows, that will: + +- Visually show you the differences on a diagram +- Display the update details without having to open any XML ! + +🟩 = added + +🟥 = removed + +🟧 = updated + +![](assets/images/flow-visual-git-diff.jpg) + +![](assets/images/flow-visual-git-diff-2.jpg) + ## Integrations Deployment assistant will provide tips in Pull Request comments (GitHub, Gitlab, Azure, Bitbucket). diff --git a/docs/salesforce-deployment-assistant-setup.md b/docs/salesforce-deployment-assistant-setup.md index e2f67cecd..7cdf6e668 100644 --- a/docs/salesforce-deployment-assistant-setup.md +++ b/docs/salesforce-deployment-assistant-setup.md @@ -6,37 +6,63 @@ description: Learn how to setup Salesforce deployment assistant # Setup Salesforce Deployment Assistant -## Using sfdx-hardis CI/CD - -If you are using [sfdx-hardis CI/CD](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/), you are already all set ! +## Configure integrations -Just make sure to have configured your [GitHub](salesforce-ci-cd-setup-integration-github.md), [Gitlab](salesforce-ci-cd-setup-integration-gitlab.md), [Azure Pipelines](salesforce-ci-cd-setup-integration-azure.md) or [BitBucket](salesforce-ci-cd-setup-integration-bitbucket.md) integration so the deployment assistant can post its help in Pull Request comments. +Make sure to have configured your [GitHub](salesforce-ci-cd-setup-integration-github.md), [Gitlab](salesforce-ci-cd-setup-integration-gitlab.md), [Azure Pipelines](salesforce-ci-cd-setup-integration-azure.md) or [BitBucket](salesforce-ci-cd-setup-integration-bitbucket.md) integration so the deployment assistant can post its help in Pull Request comments. If you want to **supercharge Salesforce deployment assistant with AI**, process [sfdx-hardis AI setup](salesforce-ai-setup.md). +You can also receive [Slack](salesforce-ci-cd-setup-integration-slack.md), [Ms Teams](salesforce-ci-cd-setup-integration-ms-teams.md) and [Email](salesforce-ci-cd-setup-integration-email.md) notifications in case of successful deployment. + +If you configure [JIRA](salesforce-ci-cd-setup-integration-jira.md) or [Generic Ticketing](salesforce-ci-cd-setup-integration-generic-ticketing.md) integrations, ticket numbers will be extracted and displayed in the Pull Request comment. + +## Using sfdx-hardis CI/CD + +If you are using [sfdx-hardis CI/CD](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/), you are already all set ! + ## Using custom CI/CD pipeline Replace your calls to Salesforce CLI by calls to sfdx-hardis commands wrapper. -| sfdx command | Corresponding sfdx-hardis wrapper command | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------| -| [sfdx force:source:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_deploy) | [sfdx hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) | -| [sfdx force:source:push](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_push) | [sfdx hardis:source:push](https://sfdx-hardis.cloudity.com/hardis/source/push/) | -| [sfdx force:mdapi:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_mdapi.htm#cli_reference_force_mdapi_beta_deploy) | [sfdx hardis:mdapi:deploy](https://sfdx-hardis.cloudity.com/hardis/mdapi/deploy/) | +| sfdx command | Corresponding sfdx-hardis wrapper command | +|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------| +| [sf project deploy start](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_start_unified) | [sf hardis:project:deploy:start](https://sfdx-hardis.cloudity.com/hardis/project/deploy/start/) | +| [sf project deploy validate](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_validate_unified) | [sf hardis:project:deploy:validate](https://sfdx-hardis.cloudity.com/hardis/project/deploy/validate/) | +| [sf project deploy quick](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_quick_unified) | [sf hardis:project:deploy:quick](https://sfdx-hardis.cloudity.com/hardis/project/deploy/quick/) | +| [sfdx force:source:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_deploy) ([**removed on 6 november**](https://github.com/forcedotcom/cli/issues/2974)) | [sf hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) | +| [sfdx force:source:push](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_push) ([**removed on 6 november**](https://github.com/forcedotcom/cli/issues/2974)) | [sf hardis:source:push](https://sfdx-hardis.cloudity.com/hardis/source/push/) | +| [sfdx force:mdapi:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_mdapi.htm#cli_reference_force_mdapi_beta_deploy) ([**removed on 6 november**](https://github.com/forcedotcom/cli/issues/2974)) | [sf hardis:mdapi:deploy](https://sfdx-hardis.cloudity.com/hardis/mdapi/deploy/) | Configure your [GitHub](salesforce-ci-cd-setup-integration-github.md), [Gitlab](salesforce-ci-cd-setup-integration-gitlab.md), [Azure Pipelines](salesforce-ci-cd-setup-integration-azure.md) or [BitBucket](salesforce-ci-cd-setup-integration-bitbucket.md) integration so the deployment assistant can post its help in Pull Request comments. _Notes:_ -- _sfdx-hardis deployment assistant currently do not support --json option. If you really need it please request it in sfdx-hardis GitHub issues !_ -- _there is no sfdx-hardis wrapper command yet for `sf project deploy start`. If you really need it please request it in sfdx-hardis GitHub issues !_ +- _sfdx-hardis deployment assistant now works better with **--json** option please use it :)_ ### Example Replace: -`sfdx force:source:deploy -x manifest/package.xml --checkonly` +`sf project:deploy:start -x manifest/package.xml --checkonly` + +with: + +`sf hardis:project:deploy:start -x manifest/package.xml --checkonly` + +### Advanced example + +Replace: + +`sf project deploy start --dry-run --source-dir force-app --ignore-warnings --ignore-conflicts --test-level RunLocalTests --coverage-formatters json-summary --verbose --wait 120 --json` + +with: + +`sf hardis project deploy start --dry-run --source-dir force-app --ignore-warnings --ignore-conflicts --test-level RunLocalTests --coverage-formatters json-summary --verbose --wait 120 --json` + +## Not updating custom CI/CD pipeline + +You don't want to update your calls to `sf project deploy start` ? -by +That's ok, you can't benefit from the error management, but you can benefit from the Flows Visual Git Diff and other integrations anyway ! -`sfdx hardis:source:deploy -x manifest/package.xml --checkonly` \ No newline at end of file +Add the [Notify command](hardis/project/deploy/notify.md) to your custom CI/CD pipeline ! diff --git a/docs/salesforce-monitoring-apex-tests.md b/docs/salesforce-monitoring-apex-tests.md index 7fd9e0035..1bec5e377 100644 --- a/docs/salesforce-monitoring-apex-tests.md +++ b/docs/salesforce-monitoring-apex-tests.md @@ -8,7 +8,7 @@ description: Schedule daily apex test runs with sfdx-hardis monitoring Runs all local test classes of the org and calculate coverage. -Sfdx-hardis command: [sfdx hardis:org:test:apex](https://sfdx-hardis.cloudity.com/hardis/org/test/apex/) +Sfdx-hardis command: [sf hardis:org:test:apex](https://sfdx-hardis.cloudity.com/hardis/org/test/apex/) ### Grafana example diff --git a/docs/salesforce-monitoring-config-home.md b/docs/salesforce-monitoring-config-home.md index fb1b19631..1cc373d68 100644 --- a/docs/salesforce-monitoring-config-home.md +++ b/docs/salesforce-monitoring-config-home.md @@ -24,7 +24,7 @@ description: Learn how to configure a monitoring repository for a Salesforce Org All you need to configure sfdx-hardis Org Monitoring is a **GitHub** , **Gitlab**, **Azure** or **BitBucket** repository. -- Create and clone a git repository +- Create and clone a git repository (initialize it with README) - Open it with Visual Studio Code, then open [VsCode SFDX Hardis](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) extension menu. - If you need installations instructions, please [visit documentation page](salesforce-ci-cd-use-install.md) @@ -63,7 +63,7 @@ For a better user experience, it is highly recommended to configure notification - [Slack instructions](salesforce-ci-cd-setup-integration-slack.md) - [Microsoft Teams instructions](salesforce-ci-cd-setup-integration-ms-teams.md) - [Email instructions](salesforce-ci-cd-setup-integration-email.md) -- [API (Beta) instructions](salesforce-ci-cd-setup-integration-api.md) (example: for Grafana Loki integration) +- [Grafana instructions](salesforce-ci-cd-setup-integration-api.md) (example: for Grafana Loki integration) You can decide to run commands but not send some notifications by defining either a **notificationsDisable** property in `.sfdx-hardis.yml`, or a comma separated list in env variable **NOTIFICATIONS_DISABLE** @@ -107,6 +107,10 @@ If there are more than 10000 items, your monitoring job will crash. In that case, you can: -- Single Branch scope: Manually update file `manifest/package-skip-items.xml` in the branch corresponding to an org, then commit and push +- Single Branch scope: Manually update file `manifest/package-skip-items.xml` in the branch corresponding to an org, then commit and push. It works with: + - Full wildcard (`*`) + - Named metadata (`Account.Name`) + - Partial wildcards names (`pi__*` , `*__dlm` , or `prefix*suffix`) + - All branches scope: Define CI/CD env var **MONITORING_BACKUP_SKIP_METADATA_TYPES** with the list of additional metadata types you want to skip - example: \`MONITORING_BACKUP_SKIP_METADATA_TYPES=CustomLabel,StaticResource,Translation\` \ No newline at end of file diff --git a/docs/salesforce-monitoring-deprecated-api-calls.md b/docs/salesforce-monitoring-deprecated-api-calls.md index d15e83cdb..1ecef7cff 100644 --- a/docs/salesforce-monitoring-deprecated-api-calls.md +++ b/docs/salesforce-monitoring-deprecated-api-calls.md @@ -8,7 +8,7 @@ description: Schedule daily checks of suspect actions in setup with sfdx-hardis Will check if [legacy API versions are called by external tools](https://nicolas.vuillamy.fr/handle-salesforce-api-versions-deprecation-like-a-pro-335065f52238). -Sfdx-hardis command: [sfdx hardis:org:diagnose:legacyapi](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/legacyapi/) +Sfdx-hardis command: [sf hardis:org:diagnose:legacyapi](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/legacyapi/) Key: **LEGACY_API** diff --git a/docs/salesforce-monitoring-home.md b/docs/salesforce-monitoring-home.md index 0d054a1f6..872356498 100644 --- a/docs/salesforce-monitoring-home.md +++ b/docs/salesforce-monitoring-home.md @@ -1,6 +1,6 @@ --- title: How to monitor your Salesforce Org -description: Monitor your Salesforce orgs with daily metadata backup and more, with open source only +description: Free Salesforce Metadata BackUp , plus many extra monitoring features like Grafana Dashboards --- @@ -12,40 +12,48 @@ description: Monitor your Salesforce orgs with daily metadata backup and more, w > This feature worked yesterday in production, but today it crashes, what happened ? +_Instead of reading, watch the [presentation at Dreamforce 24](https://reg.salesforce.com/flow/plus/df24/sessioncatalog/page/catalog/session/1718915808069001Q7HH) conference in San Francisco !_ + +
+ +_or if you like reading, here are the slides !_ + +
+ Salesforce provide **Audit Trail** to trace configuration updates in **production** or **sandbox** orgs. You can **know who updated what**, but not with details (before / after). -Sfdx-hardis monitoring provides a **simple way to know the exact state of your orgs metadatas everyday**, or even several times a day, and provides an **exact and detailed comparison with the previous metadata configuration** (using git commits comparison) +Sfdx-hardis monitoring provides a simple way to **Backup your orgs metadatas everyday**, or even several times a day, and provides an **exact and detailed comparison with the previous metadata configuration** (using git commits comparison) -Installation and usage are **admin-friendly**, and **notifications** can be sent via **Slack** or **Microsoft Teams**. - -_Example of a monitoring git repository_ +Extra indicators are also available out of the box, like: -![](assets/images/screenshot-monitoring-git.jpg) +- Run **apex tests** (and soon flow tests) +- Analyze the **quality and the security of your metadatas** with [MegaLinter](https://megalinter.io/latest/) +- Checking org limits +- Be warned of release updates +- Check if you have [**deprecated api versions called**](https://nicolas.vuillamy.fr/handle-salesforce-api-versions-deprecation-like-a-pro-335065f52238) +- **Custom command lines** that you can [define in `.sfdx-hardis.yml`](https://sfdx-hardis.cloudity.com/hardis/org/monitor/all/) -_Example notifications with Slack_ +You don't need to work in CI/CD to use Monitoring, it is **compliant with any API enabled org** :) -![](assets/images/screenshot-slack-monitoring.jpg) +Installation and usage are **admin-friendly**, and **notifications** can be sent via **Slack** or **Microsoft Teams**. _Example of visualization in Grafana_ -![](assets/images/grafana-screenshot.png) +![](assets/images/grafana-screenshot.jpg) -![](assets/images/grafana-screenshot-2.png) +![](assets/images/grafana-screenshot-1.jpg) -_See presentation at Wir Sind Ohana conference in Berlin, may 2024_ +![](assets/images/grafana-screenshot-2.png) -
+_Example notifications with Slack_ -Extra features are also available, like: +![](assets/images/screenshot-slack-monitoring.jpg) -- Run **apex tests** (and soon flow tests) -- Analyze the **quality and the security of your metadatas** with [MegaLinter](https://megalinter.io/latest/) -- Check if you have [**deprecated api versions called**](https://nicolas.vuillamy.fr/handle-salesforce-api-versions-deprecation-like-a-pro-335065f52238) -- **Custom command lines** that you can [define in `.sfdx-hardis.yml`](https://sfdx-hardis.cloudity.com/hardis/org/monitor/all/) +_Example of a monitoring git repository_ -You don't need to work in CI/CD to use Monitoring, it is **compliant with any API enabled org** :) +![](assets/images/screenshot-monitoring-git.jpg) ## How does it work ? @@ -85,7 +93,10 @@ You can force the daily run of all commands by defining env var `MONITORING_IGNO | [Quality Checks with MegaLinter](salesforce-monitoring-quality-checks.md) | Daily | | [Detect limits issues](salesforce-monitoring-org-limits.md) | Daily | | [Detect calls to deprecated API versions](salesforce-monitoring-deprecated-api-calls.md) | Daily | +| [Check Release Updates](salesforce-monitoring-release-updates.md) | Weekly | | [Detect inactive users](salesforce-monitoring-inactive-users.md) | Weekly | +| [Detect not used Apex Classes (Batch,Schedulable,Queueable)](salesforce-monitoring-unused-apex-classes.md) | Weekly | +| [Detect not used Connected Apps](salesforce-monitoring-unused-connected-apps.md) | Weekly | | [Detect unused licenses](salesforce-monitoring-unused-licenses.md) | Weekly | | [Detect custom elements with no access rights defined in permission sets](salesforce-monitoring-missing-access.md) | Weekly | | [Detect custom labels and custom permissions that are not in use](salesforce-monitoring-unused-metadata.md) | Weekly | diff --git a/docs/salesforce-monitoring-inactive-metadata.md b/docs/salesforce-monitoring-inactive-metadata.md index d690938ba..9fb91cf3c 100644 --- a/docs/salesforce-monitoring-inactive-metadata.md +++ b/docs/salesforce-monitoring-inactive-metadata.md @@ -12,7 +12,19 @@ And what about this **deactivated Validation** Rule ? Maybe it's time to remove them ! -Sfdx-hardis command: [sfdx hardis:lint:metadatastatus](https://sfdx-hardis.cloudity.com/hardis/lint/metadatastatus/) +Full list of metadata types that are checked: + +- Approval Processes +- Assignment Rules +- Auto Response Rules +- Escalation Rules +- Flows +- Forecasting Types +- Record Types +- Validation Rules +- Workflow Rules + +Sfdx-hardis command: [sf hardis:lint:metadatastatus](https://sfdx-hardis.cloudity.com/hardis/lint/metadatastatus/) Key: **METADATA_STATUS** @@ -23,3 +35,7 @@ Key: **METADATA_STATUS** ### Slack example ![](assets/images/screenshot-monitoring-inactive-metadata.jpg) + +### Local example + +![](assets/images/detect-inactive-metadata.gif) \ No newline at end of file diff --git a/docs/salesforce-monitoring-inactive-users.md b/docs/salesforce-monitoring-inactive-users.md index bf655621a..6cdc5223c 100644 --- a/docs/salesforce-monitoring-inactive-users.md +++ b/docs/salesforce-monitoring-inactive-users.md @@ -8,7 +8,7 @@ description: Schedule daily checks of inactive users (not connected for 6 months Detect if you are paying licenses for users that did not login for more than 6 months ! -Sfdx-hardis command: [sfdx hardis:org:diagnose:unusedusers](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedusers/) +Sfdx-hardis command: [sf hardis:org:diagnose:unusedusers](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedusers/) Key: **UNUSED_USERS** diff --git a/docs/salesforce-monitoring-metadata-backup.md b/docs/salesforce-monitoring-metadata-backup.md index fa87e9bf8..5fa87ca6d 100644 --- a/docs/salesforce-monitoring-metadata-backup.md +++ b/docs/salesforce-monitoring-metadata-backup.md @@ -8,7 +8,7 @@ description: Schedule daily metadata backups with sfdx-hardis Monitoring Adds a new commit in the git branch with the newest updates since latest monitoring run. -Sfdx-hardis command: [sfdx hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/) +Sfdx-hardis command: [sf hardis:org:monitor:backup](https://sfdx-hardis.cloudity.com/hardis/org/monitor/backup/) ### Grafana example diff --git a/docs/salesforce-monitoring-missing-access.md b/docs/salesforce-monitoring-missing-access.md index 9a2b1927d..42da9ac50 100644 --- a/docs/salesforce-monitoring-missing-access.md +++ b/docs/salesforce-monitoring-missing-access.md @@ -8,7 +8,7 @@ description: Schedule daily checks of metadata without access with sfdx-hardis M If there are elements that nobody has access to (not existing on any Profile or Permission Set), maybe they should be removed ! -Sfdx-hardis command: [sfdx hardis:lint:access](https://sfdx-hardis.cloudity.com/hardis/lint/access/) +Sfdx-hardis command: [sf hardis:lint:access](https://sfdx-hardis.cloudity.com/hardis/lint/access/) Key: **LINT_ACCESS** diff --git a/docs/salesforce-monitoring-missing-metadata-attributes.md b/docs/salesforce-monitoring-missing-metadata-attributes.md index 269a6aaad..ac062c41a 100644 --- a/docs/salesforce-monitoring-missing-metadata-attributes.md +++ b/docs/salesforce-monitoring-missing-metadata-attributes.md @@ -8,7 +8,7 @@ description: Schedule daily check for missing metadata attributes with sfdx-hard Follow best practices by documenting your data model ! -Sfdx-hardis command: [sfdx hardis:lint:missingattributes](https://sfdx-hardis.cloudity.com/hardis/lint/missingattributes/) +Sfdx-hardis command: [sf hardis:lint:missingattributes](https://sfdx-hardis.cloudity.com/hardis/lint/missingattributes/) Key: **MISSING_ATTRIBUTES** diff --git a/docs/salesforce-monitoring-org-limits.md b/docs/salesforce-monitoring-org-limits.md index b5704825e..d15603144 100644 --- a/docs/salesforce-monitoring-org-limits.md +++ b/docs/salesforce-monitoring-org-limits.md @@ -21,7 +21,7 @@ This feature controls that they are not reached, and will send notifications: - Warning > 75% - Error > 100% -Sfdx-hardis command: [sfdx hardis:org:monitor:limits](https://sfdx-hardis.cloudity.com/hardis/org/monitor/limits/) +Sfdx-hardis command: [sf hardis:org:monitor:limits](https://sfdx-hardis.cloudity.com/hardis/org/monitor/limits/) Key: **ORG_LIMITS** diff --git a/docs/salesforce-monitoring-release-updates.md b/docs/salesforce-monitoring-release-updates.md new file mode 100644 index 000000000..b43fcd2f8 --- /dev/null +++ b/docs/salesforce-monitoring-release-updates.md @@ -0,0 +1,25 @@ +--- +title: Check Release Updates (Salesforce monitoring) +description: Schedule weekly checks of Setup Release Updates with sfdx-hardis Monitoring +--- + + +## Check Release Updates + +Before publishing **Breaking Changes** ❌, Salesforce announce them in the setup menu [**Release Updates**](https://help.salesforce.com/s/articleView?id=sf.release_updates.htm&type=5) + +⚠️ Some of them are very important, because if you don't make the related upgrades in time (ex: before Winter 25) , your production org can crash ! + +This command will extract the Release Updates that needs to be checked in your org ! + +Sfdx-hardis command: [sf hardis:org:diagnose:releaseupdates](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/releaseupdates/) + +Key: **RELEASE_UPDATES*** + +### Grafana example + +![](assets/images/screenshot-monitoring-release-updates-grafana.jpg) + +### Slack example + +![](assets/images/screenshot-monitoring-release-updates.jpg) \ No newline at end of file diff --git a/docs/salesforce-monitoring-suspect-audit-trail.md b/docs/salesforce-monitoring-suspect-audit-trail.md index 3a22e47af..51aceda8f 100644 --- a/docs/salesforce-monitoring-suspect-audit-trail.md +++ b/docs/salesforce-monitoring-suspect-audit-trail.md @@ -8,7 +8,7 @@ description: Schedule daily checks of suspect actions in setup with sfdx-hardis Will extract from audit trail all actions that are considered as suspect, excepted the ones related to the deployment user and a given list of users, like the release manager. -Sfdx-hardis command: [sfdx hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/) +Sfdx-hardis command: [sf hardis:org:diagnose:audittrail](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/audittrail/) Key: **AUDIT_TRAIL** @@ -18,4 +18,12 @@ Key: **AUDIT_TRAIL** ### Slack example -![](assets/images/screenshot-monitoring-audittrail.jpg) \ No newline at end of file +![](assets/images/screenshot-monitoring-audittrail.jpg) + +## Excel output example + +![](assets/images/screenshot-monitoring-audittrail-excel.jpg) + +## Local output example + +![](assets/images/screenshot-monitoring-audittrail-local.jpg) diff --git a/docs/salesforce-monitoring-unused-apex-classes.md b/docs/salesforce-monitoring-unused-apex-classes.md new file mode 100644 index 000000000..bc44c76cb --- /dev/null +++ b/docs/salesforce-monitoring-unused-apex-classes.md @@ -0,0 +1,25 @@ +--- +title: Detect unused Apex Classes (Salesforce monitoring) +description: Schedule weekly checks of which Batch, Schedulable and Queueable classes are never called and could be deleted to improve Apex Test Classes performances +--- + + +## Detect unused Apex Classes + +List all async Apex classes (Batch,Queueable,Schedulable) that has not been called for more than 365 days. + +The result class list probably can be removed from the project, and that will improve your test classes performances :) + +The command uses queries on AsyncApexJob and CronTrigger technical tables to build the result. + +Sfdx-hardis command: [sf hardis:org:diagnose:unused-apex-classes](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-apex-classes/) + +Key: **UNUSED_APEX_CLASSES** + +### Grafana example + +![](assets/images/screenshot-monitoring-unused-apex-grafana.jpg) + +### Slack example + +![](assets/images/screenshot-monitoring-unused-apex.jpg) \ No newline at end of file diff --git a/docs/salesforce-monitoring-unused-connected-apps.md b/docs/salesforce-monitoring-unused-connected-apps.md new file mode 100644 index 000000000..8f63a68de --- /dev/null +++ b/docs/salesforce-monitoring-unused-connected-apps.md @@ -0,0 +1,25 @@ +--- +title: Detect unused Connected Apps (Salesforce monitoring) +description: Schedule weekly checks of Connected Apps that are not used anymore, but still accessible +--- + + +## Detect unused Connected Apps + +List all Connected Apps that might be not used anymore + +You might disable them or even delete them to clean your org from technical debt and avoid security risks. + +Sfdx-hardis command: [sf hardis:org:diagnose:unused-connected-apps](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unused-connected-apps/) + +Key: **CONNECTED_APPS** + +### Grafana example + +TODO + + +### Slack example + +TODO + \ No newline at end of file diff --git a/docs/salesforce-monitoring-unused-licenses.md b/docs/salesforce-monitoring-unused-licenses.md index 6e1bf7076..a27aa7233 100644 --- a/docs/salesforce-monitoring-unused-licenses.md +++ b/docs/salesforce-monitoring-unused-licenses.md @@ -17,7 +17,7 @@ This command detects such useless Permission Set Licenses Assignments and sugges Many thanks to [Vincent Finet](https://www.linkedin.com/in/vincentfinet/) for the inspiration during his great speaker session at [French Touch Dreamin '23](https://frenchtouchdreamin.com/), and his kind agreement for reusing such inspiration in this command :) -Sfdx-hardis command: [sfdx hardis:org:diagnose:unusedlicenses](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedlicenses/) +Sfdx-hardis command: [sf hardis:org:diagnose:unusedlicenses](https://sfdx-hardis.cloudity.com/hardis/org/diagnose/unusedlicenses/) Key: **UNUSED_LICENSES** diff --git a/docs/salesforce-monitoring-unused-metadata.md b/docs/salesforce-monitoring-unused-metadata.md index 375e55727..2cfdd90a7 100644 --- a/docs/salesforce-monitoring-unused-metadata.md +++ b/docs/salesforce-monitoring-unused-metadata.md @@ -13,7 +13,7 @@ Today working with: - Custom Labels - Custom Permissions -Sfdx-hardis command: [sfdx hardis:lint:unusedmetadatas](https://sfdx-hardis.cloudity.com/hardis/lint/unusedmetadatas/) +Sfdx-hardis command: [sf hardis:lint:unusedmetadatas](https://sfdx-hardis.cloudity.com/hardis/lint/unusedmetadatas/) Key: **UNUSED_METADATAS** diff --git a/docs/salesforce-project-doc-ai.md b/docs/salesforce-project-doc-ai.md new file mode 100644 index 000000000..10d8737d0 --- /dev/null +++ b/docs/salesforce-project-doc-ai.md @@ -0,0 +1,29 @@ +--- +title: Enhance with AI your Salesforce Project Documentation +description: Learn how to enhance your Salesforce documentation with AI +--- + + +## Improve with AI + +If AI Integration is configured, the following parts of the documentation with be generated / enhanced + +- Objects (with fields, validation rules, relationships and dependencies) +- Automations + - Approval Processes + - Assignment Rules + - AutoResponse Rules + - Escalation Rules + - Flows +- Authorizations + - Profiles + - Permission Set Groups + - Permission Sets +- Code + - Apex + - Lightning Web Components +- Lightning Pages + +Configure AI integration following the [related documentation](salesforce-ai-setup.md) + +See the [list of prompts used by sfdx-hardis](salesforce-ai-prompts.md) to enhance documentation with AI, and how to override them. diff --git a/docs/salesforce-project-doc-cloudflare.md b/docs/salesforce-project-doc-cloudflare.md new file mode 100644 index 000000000..a15fbaf9c --- /dev/null +++ b/docs/salesforce-project-doc-cloudflare.md @@ -0,0 +1,85 @@ +--- +title: Host your AI-enhanced Salesforce Project Documentation on Cloudflare free tier +description: Learn how to host your project doc pages on Cloudflare free tier up to 50 users +--- + + +## Host on Cloudflare + +You can also host your HTML documentation on Cloudflare free tier ! + +![](assets/images/screenshot-cloudflare-doc.jpg) + +### Manually + +- Run command **Documentation Generation -> Upload HTML Doc to Cloudflare** + - Corresponding command line: [`sf hardis:doc:mkdocs-to-cf`](hardis/doc/mkdocs-to-cf.md) + +### From CI/CD + +If using sfdx-hardis monitoring, just set the variable **SFDX_HARDIS_DOC_DEPLOY_TO_CLOUDFLARE=true** (or the .sfdx-hardis.yml variable `docDeployToCloudflare: true`) + +If using custom pipelines, add `sf hardis:doc:project2markdown --with-history` then `sf hardis:doc:mkdocs-to-cf` in your workflow. + +Make sure the following variables are defined and available from the backup command script + +- SFDX_HARDIS_DOC_DEPLOY_TO_CLOUDFLARE=true +- CLOUDFLARE_EMAIL +- CLOUDFLARE_API_TOKEN +- CLOUDFLARE_ACCOUNT_ID +- CLOUDFLARE_PROJECT_NAME +- CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE (optional, `onetimepin` by default) +- CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN (optional, `@cloudity.com` by default) +- CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST (optional) + +### Multilingual documentation + +#### Using sfdx-hardis monitoring + +If you want to generate the documentation in multiple languages, define variable SFDX_DOC_LANGUAGES (ex: `SFDX_DOC_LANGUAGES=en,fr,de`) + +You can define one Cloudflare site by language, for example with the following variables: + +- `CLOUDFLARE_PROJECT_NAME_EN=cloudity-demo-english` +- `CLOUDFLARE_PROJECT_NAME_FR=cloudity-demo-french` +- `CLOUDFLARE_PROJECT_NAME_DE=cloudity-demo-german` + +#### Using custom pipelines + +Use variables when you call the commands, for example: + +```bash +PROMPTS_LANGUAGE=fr sf hardis:doc:project2markdown --with-history +CLOUDFLARE_PROJECT_NAME=cloudity-demo-french sf hardis:doc:mkdocs-to-cf + +PROMPTS_LANGUAGE=en sf hardis:doc:project2markdown --with-history +CLOUDFLARE_PROJECT_NAME=cloudity-demo-english sf hardis:doc:mkdocs-to-cf +``` + +## Create & Configure Cloudflare free tier + +Follow the video tutorial to see all described steps click by click :) + +
+ +- Create a new Cloudflare account on cloudflare.com + +- Create an Cloudflare API Token with the following scopes, and set it in CI/CD variable **CLOUDFLARE_API_TOKEN** + - Account.Cloudflare Pages + - Account.Access: Organizations, Identity Providers, and Groups + - Account.Access: Apps and Policies + - User.User Details + +- Set CI/CD variable **CLOUDFLARE_PROJECT_NAME**, it will be used to build the pages site (uses the repo name by default) + +- Set CI/CD variable **CLOUDFLARE_EMAIL**, with the email you used to create the Cloudflare account + +- Set CI/CD variable **CLOUDFLARE_ACCOUNT_ID** + - To get its value, run `npm install wrangler -g && wrangler login && wrangler whoami` + +- Set CI/CD variable **CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN** , using your company email domain. + +- Configure a new **Cloudflare Zero Trust Team** + - Select free tier (you need a credit card that won't be charged) + +- If you want additional access policies, create them manually in Cloudflare Zero trust config, and set their ids in variable **CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST** \ No newline at end of file diff --git a/docs/salesforce-project-doc-complete-manually.md b/docs/salesforce-project-doc-complete-manually.md new file mode 100644 index 000000000..d1ad4eff4 --- /dev/null +++ b/docs/salesforce-project-doc-complete-manually.md @@ -0,0 +1,90 @@ +--- +title: Complete manually sfdx-hardis generated doc +description: Learn how to define your own documentation from sfdx-hardis generated one +--- + + +## Manually complete documentation + +sfdx-hardis generated a brand new doc using Metadata and AI enhancements. + +But maybe you would like to complete it manually ? + +We got you covered, with 3 ways to manually update generated doc and even create your own pages ! + +### Overwrite AI-generated parts + +In the generated markdown files in `docs` folder and sub folders, you will see HTML comments around AI-generated text, like below + +```markdown + + + + +## Describe flow Opportunity_AfterInsert + +Some summary generated by calls to LLMs +... + + +``` + +It tells you which file you have to rename and how. + +Just rename the file `fr-PROMPT_DESCRIBE_FLOW-Opportunity_AfterInsert-2527848841.md` as `fr-PROMPT_DESCRIBE_FLOW-Opportunity_AfterInsert.md`, update the content, generate doc again, and you're all set ! + +Example result: + +```markdown + + + + +## Describe flow Opportunity_AfterInsert + +Your own text here ! +... + + +``` + +### Overwrite the whole file + +At the beginning of generated markdown files, you will see comments like below. + +```markdown + + + +Some sfdx-hardis generated content text +... +``` + +Update the comment to define ``, then update the documentation with your own text, then you're all set ! + +```markdown + + + +My own documentation ! +... +``` + +Note: AI-generated parts won't be calculated anymore if you define ``. + +### Add your own pages + +Create markdown files in **docs** folder. + +Example: `docs/my-custom-doc-page.md` + +To add a link to your markdown pages in the menu, update **nav** section of file **mkdocs.yml**. + +Example: + +```yaml + - Branches & Orgs: sfdx-hardis-branches-and-orgs.md + - Installed Packages: installed-packages.md + - My Custom menu: + My Custom Page: my-custom-doc-page.md +``` diff --git a/docs/salesforce-project-doc-generate.md b/docs/salesforce-project-doc-generate.md new file mode 100644 index 000000000..a38359f65 --- /dev/null +++ b/docs/salesforce-project-doc-generate.md @@ -0,0 +1,27 @@ +--- +title: Generate Salesforce Project Documentation +description: Learn how to generate Salesforce project documentation, including Flows Visual Differences in History +--- + + +## How To generate + +- Use the Git repository containing your SFDX project, or create it easily using [sfdx-hardis Monitoring](salesforce-monitoring-home.md), or simply calling [BackUp command](hardis/org/monitor/backup.md) + +- [Activate AI Integration](salesforce-ai-setup.md) (Optional but recommended) + +- Call VsCode SFDX-Hardis command [**Documentation Generation > Generate Project Documentation (with history)**](hardis/doc/project2markdown.md) + - Corresponding command line: `sf hardis:doc:project2markdown --with-history` + +Here is a click by click tutorial to generate your documentation locally (but it's best to use sfdx-hardis monitoring once you are convinced ^^) + +
+ +## Run Locally + +- Run command **Documentation Generation > Run local HTML Doc Pages** (Note: you need [Python](https://www.python.org/downloads/) on your computer) + - Corresponding command lines: `pip install mkdocs-material mdx_truly_sane_lists`, then `mkdocs serve -v` + - Alternative 1: `python -m pip install mkdocs-material mdx_truly_sane_lists`, then `python -m mkdocs serve -v` + - Alternative 2: `py -m pip install mkdocs-material mdx_truly_sane_lists`, then `py -m mkdocs serve -v` + +- Open in your Web Browser \ No newline at end of file diff --git a/docs/salesforce-project-doc-host-on-salesforce.md b/docs/salesforce-project-doc-host-on-salesforce.md new file mode 100644 index 000000000..86cd39c1b --- /dev/null +++ b/docs/salesforce-project-doc-host-on-salesforce.md @@ -0,0 +1,30 @@ +--- +title: Host your AI-enhanced Salesforce Project Documentation on Salesforce +description: Learn how to host your project doc pages on Salesforce +--- + + +## Host on Salesforce org + +You can also host the HTML documentation directly in your Salesforce org ! + +> If your have too many metadatas, the pages will be too big for a single static resource. In that case, consider self-hosting your doc website using Cloudflare or your own secured HTTP server. + +![](assets/images/screenshot-html-doc.jpg) + +### Manually + +- Run command **Documentation Generation -> Upload HTML Doc to Salesforce** + - Corresponding command line: [`sf hardis:doc:mkdocs-to-salesforce`](hardis/doc/mkdocs-to-salesforce.md) + +- Set generated Custom Tab as `Default On` on your Profile if necessary + +- Assign generated Permission Set to the users you want to access the SFDX Doc tab + +- Add the tab in a Lightning Application (optional) + +### From CI/CD + +If using sfdx-hardis monitoring, just set the variable **SFDX_HARDIS_DOC_DEPLOY_TO_ORG=true** (or the .sfdx-hardis.yml variable `docDeployToOrg: true`) + +If using custom pipelines, add `sf hardis:doc:project2markdown --with-history` then `sf hardis:doc:mkdocs-to-salesforce` in your workflow. diff --git a/docs/salesforce-project-documentation.md b/docs/salesforce-project-documentation.md new file mode 100644 index 000000000..45b12b91c --- /dev/null +++ b/docs/salesforce-project-documentation.md @@ -0,0 +1,44 @@ +--- +title: Your AI-enhanced Salesforce Project Documentation +description: Learn how to generate Salesforce project documentation, including Flows Visual Differences in History +--- + + +## Salesforce Project Documentation + +With a single command, you can generate a Web Site documenting your Salesforce metadatas + +- Objects (with fields, validation rules, relationships and dependencies) +- Automations + - Approval Processes + - Assignment Rules + - AutoResponse Rules + - Escalation Rules + - Flows +- Authorizations + - Profiles + - Permission Set Groups + - Permission Sets +- Code + - Apex + - Lightning Web Components +- Lightning Pages +- Packages +- SFDX-Hardis Config +- Branches & Orgs +- Manifests + +The documentation will be [**supercharged with AI**](salesforce-ai-setup.md) if activated; + +![](assets/images/project-documentation.gif) + +![](assets/images/screenshot-object-diagram.jpg) + +![](assets/images/screenshot-doc-apex.png) + +![](assets/images/screenshot-project-doc-profile.gif) + +If it is a sfdx-hardis CI/CD project, a diagram of the branches and orgs strategy will be generated. + +![](assets/images/screenshot-doc-branches-strategy.jpg) + diff --git a/docs/schema/sfdx-hardis-json-schema-parameters.html b/docs/schema/sfdx-hardis-json-schema-parameters.html index f53ec6456..848f18d7f 100644 --- a/docs/schema/sfdx-hardis-json-schema-parameters.html +++ b/docs/schema/sfdx-hardis-json-schema-parameters.html @@ -48,7 +48,7 @@

/> allowedOrgTypes

Allowed org types

Type: array of enum (of string)
-

Types of orgs allowed for config & development. If not set, sandbox and scratch are allowed by default

+

Types of Salesforce Orgs allowed for config & development. If not set, Sandbox Orgs and Scratch Orgs are allowed by default

@@ -119,7 +119,8 @@

/> autoCleanTypes

Auto-Cleaning types

Type: array of enum (of string)
-

When saving a sfdx-hardis task, the list of cleanings will be automatically applied to sfdx sources

+

When saving/publishing a User Story, sfdx-hardis can automatically clean the sources before submitting a Pull Request.
+Select the cleanings that will be automatically applied on your project.

@@ -148,7 +149,7 @@

autoCleanTypes itemsType: enum (of string)

Must be one of:

-
  • "caseentitlement"
  • "checkPermissions"
  • "dashboards"
  • "datadotcom"
  • "destructivechanges"
  • "flowPositions"
  • "localfields"
  • "listViewsMine"
  • "minimizeProfiles"
  • "productrequest"
  • "systemDebug"
  • "v60"
+
  • "caseentitlement"
  • "checkPermissions"
  • "dashboards"
  • "datadotcom"
  • "destructivechanges"
  • "flowPositions"
  • "localfields"
  • "listViewsMine"
  • "minimizeProfiles"
  • "productrequest"
  • "sensitiveMetadatas"
  • "systemDebug"
  • "v60"
@@ -192,8 +193,19 @@

/> autoRemoveUserPermissions

Auto-Remove User Permissions on profiles

Type: array of string
-

When saving a sfdx-hardis task, these permissions will be removed from profiles

-
+
+

When your dev sandbox is using the next SF Platform version, sometimes some permissions on Profiles exist on the next version but not the current one.
+To avoid issues, you can force the removal of such permissions when Saving/Publishing a User Story
+Example:
+- EnableCommunityAppLauncher
+- OmnichannelInventorySync

+ +
+
+ +
@@ -262,8 +274,20 @@

/> autoRetrieveWhenPull

Auto-Retrieve when pull

Type: array of string
-

When calling hardis:scratch:pull, if you define metadatas (named or not), they will also be retrieved using force:source:retrieve

-
+
+

Sometimes, SF Cli forgets to pull some metadata updates, like Custom Application for example.
+sfdx-hardis can automatically retrieve named metadatas to avoid issues.
+Example:
+- CustomApplication:MyApp1
+- CustomApplication:MyApp2
+Works also with full metadata types (ex: CustomApplication)

+ +
+
+ +
@@ -334,8 +358,9 @@

d="M4 8a.5.5 0 0 1 .5-.5h5.793L8.146 5.354a.5.5 0 1 1 .708-.708l3 3a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708-.708L10.293 8.5H4.5A.5.5 0 0 1 4 8z" /> - apexTestsMinCoverageOrgWide

Minimum apex test coverage %

Type: number Default: 75.0
-

Minimum percentage of apex code coverage accepted

+ apexTestsMinCoverageOrgWide

Minimum Apex Tests coverage % accepted for a deployment

Type: number Default: 75.0
+

Minimum percentage of apex code coverage accepted.
+75.0% by default, but if you are on a clean project, it's better to define 80.0, 95.0 or 90.0 😎

@@ -375,7 +400,9 @@

/> availableProjects

Available projects

Type: array of string
-

List of business projects that are managed in the same repository. Will be used to build git branch name when using hardis:work:new

+

List of business projects that are managed in the same repository.
+If defined, when creating a new User Story, it will be prompted to the contributor then used to create the git branch name.
+If a value contains a comma, the left part will be used for key and the right part as label for the users.

@@ -444,9 +471,19 @@

d="M4 8a.5.5 0 0 1 .5-.5h5.793L8.146 5.354a.5.5 0 1 1 .708-.708l3 3a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708-.708L10.293 8.5H4.5A.5.5 0 0 1 4 8z" /> - availableTargetBranches

Available target branches

Type: array of string
-

List of git branches that can be used as target for merge requests

-
+ availableTargetBranches

Available PR/MR target branches

Type: array of string
+
+

List of git branches that can be used as target for Pull Requests.
+Contributors will be prompt to select one of these target branch when creating a new User Story
+A classic example on a project with BUILD & RUN in parallel is to have preprod and integration as available target branches.
+If defined, makes obsolete the parameter Default Pull Request target branch.

+ +
+
+ +
@@ -483,8 +520,8 @@


Example:

[
-    "develop",
-    "develop_next"
+    "preprod",
+    "integration"
 ]
 
@@ -1191,6 +1228,65 @@

+ + + + +
+
+
+

+ +

+
+ +
+
+ +

Context

Type: enum (of string) Default: "all"
+

Context when the command must be run

+
+

Must be one of:

+
  • "all"
  • "check-deployment-only"
  • "process-deployment-only"
+
+ + + + + +
+
Examples:
+
"all"
+
+
"check-deployment-only"
+
+
"process-deployment-only"
+
+
@@ -1893,14 +1989,14 @@

"label": "Generate manifest", "icon": "file.svg", "tooltip": "Generates a manifest package.xml using local sfdx source files", - "command": "sfdx force:source:manifest:create --sourcepath force-app --manifestname myNewManifest" + "command": "sf project generate manifest --source-path force-app --name myNewManifest" }, { "id": "list-all-orgs", "label": "List all orgs", "icon": "salesforce.svg", "tooltip": "List all orgs that has already been authenticated using sfdx", - "command": "sfdx force:org:list --all" + "command": "sf org list --all" } ] }, @@ -2405,7 +2501,7 @@

d="M4 8a.5.5 0 0 1 .5-.5h5.793L8.146 5.354a.5.5 0 1 1 .708-.708l3 3a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708-.708L10.293 8.5H4.5A.5.5 0 0 1 4 8z" /> - defaultPackageInstallationKey

Defaut package installation key

Type: string
+ defaultPackageInstallationKey

Default package installation key

Type: string

When generating a new package version protected with password, use this value as default package installation key

@@ -2447,8 +2543,8 @@

d="M4 8a.5.5 0 0 1 .5-.5h5.793L8.146 5.354a.5.5 0 1 1 .708-.708l3 3a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708-.708L10.293 8.5H4.5A.5.5 0 0 1 4 8z" /> - developmentBranch

Default pull/merge request target org

Type: string Default: "developpement"
-

When creating a new sfdx-hardis task, this git branch is used as base to create the feature/debug sub branch. The merge request will later have this branch as target.

+ developmentBranch

Default Pull Request/Merge Request target branch when you create a new User Story.

Type: string Default: "developpement"
+

When creating a new sfdx-hardis User Story, this git branch is used as base to create the feature/debug sub branch. The merge request will later have this branch as target.

@@ -2960,6 +3056,44 @@

+
+
+
+

+ +

+
+ +
+
+ +

Dev Hub Instance URL

Type: string Default: "https://login.salesforce.com"
+

Dev Hub instance URL used for authenticating to DevHub from CI jobs

+
+ + + + + +
+
Example:
+
"DevHub_MyClientMyProject"
+
+
+
+
+
+
@@ -3002,18 +3136,18 @@

-
+
-
+

- +

-
+

Extends remote configuration URL

Type: string
-

You can base your local sfdx-hardis configuration on a remote config file. That allows you to have the same config base for all your projects

+ docDeployToCloudflare

Doc: Deploy to Cloudflare

Type: boolean Default: false
+

Automatically deploy MkDocs HTML documentation from CI/CD Workflows to Cloudflare

@@ -3033,25 +3167,25 @@


Example:
-
"https://raw.githubusercontent.com/worldcompany/shared-config/main/.sfdx-hardis.yml"
+
true
 

-
+
-
+

- +

-
+

Initial Permission Sets

Type: array
-

When creating a scratch org, Admin user will be automatically assigned to those permission sets

+ docDeployToOrg

Doc: Deploy to Salesforce Org

Type: boolean Default: false
+

Automatically deploy MkDocs HTML documentation from CI/CD Workflows to Salesforce org as static resource

- No Additional Items

Each item of this array must be:

+
+
Example:
+
true
+
+
+
+
+
+
+
-
- +
+

+ +

+
+ +
+
Type: object or string
-

Permission Set or Permission Set Group

-
No Additional Properties + extends

Extends remote configuration URL

Type: string
+

You can base your local sfdx-hardis configuration on a remote config file. That allows you to have the same config base for all your projects

+
- -
+
+
Example:
+
"https://raw.githubusercontent.com/worldcompany/shared-config/main/.sfdx-hardis.yml"
+
+
+
+
+
+
+
-
+

- +

-
+

Initial Permission Sets

Type: array of string
+

When creating a scratch org, Admin user will be automatically assigned to those permission sets. Example: PS_Admin

+
+ + + + + + No Additional Items

Each item of this array must be:

+
+
+ + +

Name

Type: string
-

Permission Set or Permission Set Group name

-
+ initPermissionSets items
Type: string
+ -
-
Example:
-
[
-    "MyPermissionSet",
-    "MyPermissionSetGroup",
-    "MyPermissionSetGroup2"
-]
-
-
-
-
-
-
+

Example:
@@ -3797,8 +3950,8 @@

d="M4 8a.5.5 0 0 1 .5-.5h5.793L8.146 5.354a.5.5 0 1 1 .708-.708l3 3a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708-.708L10.293 8.5H4.5A.5.5 0 0 1 4 8z" /> - installPackagesDuringCheckDeploy

Install packages during deployment checks

Type: boolean Default: false
-

When calling deployment check command, installs any package referred within installedPackages property

+ installPackagesDuringCheckDeploy

Install packages during deployment checks workflow

Type: boolean Default: false
+

If your configuration contains an installedPackages property, activating this option allows you to make sfdx-hardis automatically install packages during the Deployments Check workflow, and not to wait after the merge of the Pull Request.

@@ -3878,7 +4031,7 @@

/> listViewsToSetToMine

List views to set to Mine

Type: array of string Default: []
-

List of ListView items to set to Mine after a delivery (that does not accept value 'Everything')

+

List of ListView items to set to Mine after a delivery (that does not accept value 'Everything')

@@ -3988,18 +4141,18 @@

-
+
-
+

- +

-
+

Monitoring commands

Type: array of object
-

List of monitoring commands to run with command hardis:org:monitor:all

+ manualActionsFileUrl

Manual Actions File URL

Type: string Default: ""
+

URL of the XLS file that contains manual actions to perform before or after a deployment

- No Additional Items

Each item of this array must be:

+
+
Example:
+
"https://some.sharepoint.com/file.xlsx"
+
+
+
+
+
+
+
-
- +
+

+ +

+
+ +
+
Type: object
-

Monitoring command

-
No Additional Properties + mergeTargets

Merge target branches

Type: array of string
+

In branch-scoped config file, declares the list of branches that the current one can have as merge target. For example, integration will have mergeTargets [uat]

+
- -
+ No Additional Items

Each item of this array must be:

-
-

- -

-
- -
-
+
+

Key

Type: string
-

Unique identifier of the monitoring command. Can be used with monitoringDisable

-
+ mergeTargets items
Type: string
+ +
+

+
Examples:
+
[
+    "preprod"
+]
+
+
[
+    "integration"
+]
+
+
-
+
-
+

- +

+
+ +
+
+ +

Monitoring commands

Type: array of object
+

List of monitoring commands to run with command hardis:org:monitor:all

+
+ + + + + + No Additional Items

Each item of this array must be:

+
+
+ + + Type: object
+

Monitoring command

+
No Additional Properties + + + + + + +
+
+
+

+ +

+
+ +
+
+ +

Key

Type: string
+

Unique identifier of the monitoring command. Can be used with monitoringDisable

+
+ + + + + + +
+
+
+
+
+
+
+

+

@@ -4244,13 +4507,13 @@

{ "title": "Detect calls to deprecated API versions", "key": "LEGACYAPI", - "command": "sfdx hardis:org:diagnose:legacyapi", + "command": "sf hardis:org:diagnose:legacyapi", "frequency": "weekly" }, { "title": "My custom command", "key": "MY_CUSTOM_KEY", - "command": "sfdx my:custom:command", + "command": "sf my:custom:command", "frequency": "daily" } ] @@ -4282,7 +4545,7 @@

/> monitoringAllowedSectionsActions

Monitoring Allowed Sections Actions

Type: object
-

Override list of Setup Audit Trail elements that won't be considered as suspect by monitoring tools

+

Override list of Setup Audit Trail elements that won't be considered as suspect by monitoring tools

@@ -4387,7 +4650,7 @@

Must be one of:

-
  • "AUDIT_TRAIL"
  • "LEGACY_API"
  • "LINT_ACCESS"
  • "UNUSED_METADATAS"
  • "METADATA_STATUS"
  • "MISSING_ATTRIBUTES"
  • "UNUSED_LICENSES"
+
  • "AUDIT_TRAIL"
  • "LEGACY_API"
  • "LINT_ACCESS"
  • "UNUSED_METADATAS"
  • "METADATA_STATUS"
  • "MISSING_ATTRIBUTES"
  • "UNUSED_LICENSES"
  • "RELEASE_UPDATES"
@@ -4667,6 +4930,88 @@

+
+
+
+

+ +

+
+ +
+
+ +

User Story name validation regex

Type: string Default: ""
+

If you define a regular expression, it will be used to validate the name of new User Stories.
+For example, you can enforce a Jira number in the name with regex '^MYPROJECT-[0-9]+ .*'

+
+ + + + + +
+
Examples:
+
"^[A-Z]+-[0-9]+ .*"
+
+
"^CLOUDITY-[0-9]+ .*"
+
+
"^MYPROJECT-[0-9]+ .*"
+
+
+
+
+
+
+
+
+
+

+ +

+
+ +
+
+ +

Example string for User Story name validation regex

Type: string Default: ""
+

If you activated User Story name validation via RegEx, define an example value that will be displayed to users.
+Example: 'MYPROJECT-168 Update account status validation rule'

+
+ + + + + +
+
Example:
+
"MYPROJECT-123 Update account status validation rule"
+
+
+
+
+
+
@@ -4720,7 +5065,7 @@

Must be one of:

-
  • "AUDIT_TRAIL"
  • "APEX_TESTS"
  • "BACKUP"
  • "DEPLOYMENT"
  • "LEGACY_API"
  • "LINT_ACCESS"
  • "UNUSED_METADATAS"
  • "METADATA_STATUS"
  • "MISSING_ATTRIBUTES"
  • "UNUSED_LICENSES"
+
  • "AUDIT_TRAIL"
  • "APEX_TESTS"
  • "BACKUP"
  • "DEPLOYMENT"
  • "LEGACY_API"
  • "LINT_ACCESS"
  • "UNUSED_METADATAS"
  • "METADATA_STATUS"
  • "MISSING_ATTRIBUTES"
  • "UNUSED_LICENSES"
  • "RELEASE_UPDATES"
@@ -4741,6 +5086,47 @@

Must be one of:

+
+
+
+

+ +

+
+ +
+
+ +

Branch-scoped custom Package-No-Overwrite path

Type: string Default: ""
+

By default, manifest/package-no-overwrite.xml is used, but you could decide to use a different file for specific major branches.
+In that case, set the path to a custom package-no-overwrite XML file in a branch-scoped sfdx-hardis configuration file.

+
+ + + + + +
+
Examples:
+
"manifest/package-no-overwrite-main.xml"
+
+
"manifest/package-no-overwrite-custom.xml"
+
+
+
+
+
+
@@ -5030,6 +5416,290 @@

+
+
+
+

+ +

+
+ +
+
+ +

Refresh Sandbox Configuration

Type: object
+

Configuration for sandbox refresh. Will be used by command hardis:org:refresh:before-refresh and hardis:org:refresh:after-refresh

+
No Additional Properties + + + + + + +
+
+
+

+ +

+
+ +
+
+ +

Connected Apps

Type: array of string
+

List of connected apps to download before refresh and to upload after refresh

+
+ + + + + + No Additional Items

Each item of this array must be:

+
+
+ + + Type: string
+ + + + + + + +
+

+
Example:
+
[
+    "My_Connected_App_1",
+    "My_Connected_App_2"
+]
+
+
+
+
+
+
+
+
+
+

+ +

+
+ +
+
+ +

Custom Settings

Type: array of string
+

List of Custom Settings to download before refresh and to upload after refresh

+
+ + + + + + No Additional Items

Each item of this array must be:

+
+
+ + + Type: string
+ + + + + + + +
+

+
Example:
+
[
+    "MyCustomSetting1__c",
+    "MyCustomSetting2__c"
+]
+
+
+
+
+
+
+
+
+
+

+ +

+
+ +
+
+ +

Data Workspaces

Type: array of string
+

List of data workspaces to download before refresh and to upload after refresh

+
+ + + + + + No Additional Items

Each item of this array must be:

+
+
+ + + Type: string
+ + + + + + + +
+

+
Example:
+
[
+    "scripts/data/AnonymizeAccounts",
+    "scripts/data/AnonymizeContacts",
+    "scripts/data/AnonymizeLeads"
+]
+
+
+
+
+
+
+
+
+
+
@@ -5203,7 +5873,7 @@

/> scratchOrgInitApexScripts

Scratch org init apex scripts

Type: array of string
-

Apex scripts to call after scratch org initialization

+

Apex scripts to call after scratch org initialization. Example: scripts/apex/init-scratch.apex

@@ -5489,6 +6159,41 @@

+

+
+
+
+
+
+
+

+ +

+
+ +
+
+ +

Contributors can share Dev Sandboxes

Type: boolean Default: false
+

Set to true if contributors can share dev sandboxes
+If active, contributors will never be asked to refresh their sandbox metadata, to avoid to overwrite by accident their colleagues work :)

+
+ + + + + +
@@ -5634,7 +6339,79 @@

/> useDeltaDeployment

Use Delta Deployment

Type: boolean Default: false
-

Defines if sfdx-hardis will deploy in delta from minor to major branches

+

Defines if sfdx-hardis will deploy in delta from minor to major branches.
+When active, Delta Deployments allow to deploy only the metadatas in the branch / User Story, and not the full sources of the SFDX project.
+Note: Even if activated, Delta Deployments will be applied only for Pull Requests from minor (features,hotfixes) to major branches (integration,preprod).

+
+ + + + + + +
+
+
+
+
+
+
+

+ +

+
+ +
+
+ +

Use Delta Deployment with dependencies

Type: boolean Default: false
+

Also deploy dependencies of the metadatas identified by delta deployment, to avoid broken deployments due to missing dependencies.
+ Example: removed picklist value in a field, that is still used in a record type.

+
+ + + + + + +
+
+
+
+
+
+
+

+ +

+
+ +
+
+ +

Use Smart Deployment Tests

Type: boolean Default: false
+

Define if Smart Deployment Tests will be activated and run Apex test classes only if metadata that can impact them are present in the branch / User Story.
+Note: Smart Deployment Tests will be applied only for Pull Requests from minor (features,hotfixes) to major branches (integration,preprod).

@@ -5692,6 +6469,6 @@

\ No newline at end of file diff --git a/docs/sf-deployment-assistant/A-sharing-rule-may-be-useless.md b/docs/sf-deployment-assistant/A-sharing-rule-may-be-useless.md new file mode 100644 index 000000000..c06d4adac --- /dev/null +++ b/docs/sf-deployment-assistant/A-sharing-rule-may-be-useless.md @@ -0,0 +1,16 @@ +--- +title: "A sharing rule may be useless (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Required field is missing: sharingCriteriaRules\"" +--- + +# A sharing rule may be useless + +## Detection + +- String: `Required field is missing: sharingCriteriaRules` + +## Resolution + +```shell +Are you sure you need this sharing rule ? You may remove it from the sfdx project +``` diff --git a/docs/sf-deployment-assistant/API-Version-error.md b/docs/sf-deployment-assistant/API-Version-error.md new file mode 100644 index 000000000..d60650da5 --- /dev/null +++ b/docs/sf-deployment-assistant/API-Version-error.md @@ -0,0 +1,19 @@ +--- +title: "API Version error (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) The (.*) apiVersion can't be \"([0-9]+)\"\"" +--- + +# API Version error + +## Detection + +- RegExp: `Error (.*) The (.*) apiVersion can't be "([0-9]+)"` + +## Resolution + +```shell +{1} metadata has probably been created/updated in a sandbox already upgraded to next platform version (ex: Sandbox in Summer'23 and Production in Spring'23) +- First, try to update the api version in the XML of {1} metadata file (decrement the number in {3}.0) +- If it still doesn't work because the metadata structure has changed between version, you may try a sf project:retrieve:start of the metadata by forcing --api-version at the end of the command. + +``` diff --git a/docs/sf-deployment-assistant/Allow-deployment-with-pending-Apex-Jobs.md b/docs/sf-deployment-assistant/Allow-deployment-with-pending-Apex-Jobs.md new file mode 100644 index 000000000..35b1a72b8 --- /dev/null +++ b/docs/sf-deployment-assistant/Allow-deployment-with-pending-Apex-Jobs.md @@ -0,0 +1,17 @@ +--- +title: "Allow deployment with pending Apex Jobs (Deployment assistant)" +description: "How to solve Salesforce deployment error \"You can bypass this error by allowing deployments with Apex jobs in the Deployment Settings page in Setup.\"" +--- + +# Allow deployment with pending Apex Jobs + +## Detection + +- String: `You can bypass this error by allowing deployments with Apex jobs in the Deployment Settings page in Setup.` + +## Resolution + +```shell +Go to target org, in Setup -> Deployment Settings -> Activate option "Allow deployments of components when corresponding Apex jobs are pending or in progress." + +``` diff --git a/docs/sf-deployment-assistant/Async-exception-in-test-class.md b/docs/sf-deployment-assistant/Async-exception-in-test-class.md new file mode 100644 index 000000000..86addb707 --- /dev/null +++ b/docs/sf-deployment-assistant/Async-exception-in-test-class.md @@ -0,0 +1,17 @@ +--- +title: "Async exception in test class (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/System.AsyncException: (.*) Apex\"" +--- + +# Async exception in test class + +## Detection + +- RegExp: `System.AsyncException: (.*) Apex` + +## Resolution + +```shell +This may be a test class implementation issue in {1}. +Please check https://developer.salesforce.com/forums/?id=9060G0000005kVLQAY +``` diff --git a/docs/sf-deployment-assistant/CRM-Analytics--A-Recipe-must-specify-a-DataFlow.md b/docs/sf-deployment-assistant/CRM-Analytics--A-Recipe-must-specify-a-DataFlow.md new file mode 100644 index 000000000..72bdb62c9 --- /dev/null +++ b/docs/sf-deployment-assistant/CRM-Analytics--A-Recipe-must-specify-a-DataFlow.md @@ -0,0 +1,20 @@ +--- +title: "CRM Analytics: A Recipe must specify a DataFlow (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) A Recipe must specify a Dataflow\"" +--- + +# CRM Analytics: A Recipe must specify a DataFlow + +## Detection + +- RegExp: `Error (.*) A Recipe must specify a Dataflow` + +## Resolution + +```shell +You must include related WaveDataFlow {1} in sources (and probably in package.xml too). +To retrieve it, run: sf project retrieve start -m WaveDataFlow:{1} -u SOURCE_ORG_USERNAME +You can also retrieve all analytics sources in one shot using sf hardis:org:retrieve:source:analytics -u SOURCE_ORG_USERNAME + - https://salesforce.stackexchange.com/a/365453/33522 + - https://help.salesforce.com/s/articleView?id=000319274&type=1 +``` diff --git a/docs/sf-deployment-assistant/Campaign-can-not-be-updated.md b/docs/sf-deployment-assistant/Campaign-can-not-be-updated.md new file mode 100644 index 000000000..8944c4cd3 --- /dev/null +++ b/docs/sf-deployment-assistant/Campaign-can-not-be-updated.md @@ -0,0 +1,17 @@ +--- +title: "Campaign can not be updated (Deployment assistant)" +description: "How to solve Salesforce deployment error \"The object "Campaign" can't be updated\"" +--- + +# Campaign can not be updated + +## Detection + +- String: `The object "Campaign" can't be updated` + +## Resolution + +```shell +Add "MarketingUser" in project-scratch-def.json features +If it is already done, you may manually check "MarketingUser" field on the scratch org user +``` diff --git a/docs/sf-deployment-assistant/Can-not-change-field-type-to-a-formula-field.md b/docs/sf-deployment-assistant/Can-not-change-field-type-to-a-formula-field.md new file mode 100644 index 000000000..1b4964304 --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-change-field-type-to-a-formula-field.md @@ -0,0 +1,18 @@ +--- +title: "Can not change field type to a formula field (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Cannot update a field from a Formula to something else\"" +--- + +# Can not change field type to a formula field + +## Detection + +- RegExp: `Error (.*) Cannot update a field from a Formula to something else` + +## Resolution + +```shell +You need to manually delete or rename the field in the target org to allow the deployment to pass +- First, try to manually delete field {1} in the target org +- if you can't delete {1}, rename it into {1}_ToDel, then once the deployment done, delete {1}_ToDel +``` diff --git a/docs/sf-deployment-assistant/Can-not-change-field-type-with-picklist.md b/docs/sf-deployment-assistant/Can-not-change-field-type-with-picklist.md new file mode 100644 index 000000000..9cc33db9c --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-change-field-type-with-picklist.md @@ -0,0 +1,19 @@ +--- +title: "Can not change field type with picklist (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Cannot change which global value set this picklist uses\"" +--- + +# Can not change field type with picklist + +## Detection + +- RegExp: `Error (.*) Cannot change which global value set this picklist uses` + +## Resolution + +```shell +You probably updated the type of field {1}, and Salesforce does not allows that with deployments. You can: +- Try to manually change the type of {1} directly in target org, but it may not be technically possible +- Delete field {1} in target org: it will be recreated after deployment (but you will loose data on existing records, so be careful if your target is a production org) +- Create another field with desired type and manage data recovery if the target is a production org +``` diff --git a/docs/sf-deployment-assistant/Can-not-change-type-due-to-existing-data.md b/docs/sf-deployment-assistant/Can-not-change-type-due-to-existing-data.md new file mode 100644 index 000000000..a5087fbdc --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-change-type-due-to-existing-data.md @@ -0,0 +1,21 @@ +--- +title: "Can not change type due to existing data (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Cannot change type due to existing data\"" +--- + +# Can not change type due to existing data + +## Detection + +- RegExp: `Error (.*) Cannot change type due to existing data` + +## Resolution + +```shell +It is usually not recommended to change types of fields, but if it's really necessary you can: +- Manually change the type of {1} in the target org +- If you can't manually change the type: + - you may modify the dependencies (Formulas, Flows...) using {1}, so they don't use this field + - you can also delete dependencies (Formulas, Flows...) using {1}, but make sure they are deployed again later +- More help: https://help.salesforce.com/s/articleView?id=000327186&type=1 +``` diff --git a/docs/sf-deployment-assistant/Can-not-delete-custom-field.md b/docs/sf-deployment-assistant/Can-not-delete-custom-field.md new file mode 100644 index 000000000..536543ce2 --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-delete-custom-field.md @@ -0,0 +1,18 @@ +--- +title: "Can not delete custom field (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/This (.*) is referenced elsewhere in salesforce.com\"" +--- + +# Can not delete custom field + +## Detection + +- RegExp: `This (.*) is referenced elsewhere in salesforce.com` +- RegExp: `Le champ personnalisé (.*) est utilisé dans (.*)` + +## Resolution + +```shell +Custom field {1} can not be deleted because it is used elsewhere. Remove its references ans try again +THIS MAY BE A FALSE POSITIVE if you are just testing the deployment, as destructiveChanges are deployed separately from updated items deployment check +``` diff --git a/docs/sf-deployment-assistant/Can-not-delete-record-type.md b/docs/sf-deployment-assistant/Can-not-delete-record-type.md new file mode 100644 index 000000000..421dff7fa --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-delete-record-type.md @@ -0,0 +1,18 @@ +--- +title: "Can not delete record type (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Cannot delete record type through API\"" +--- + +# Can not delete record type + +## Detection + +- RegExp: `Error (.*) Cannot delete record type through API` + +## Resolution + +```shell +You need to manually delete record type {1} in target org +- Edit record type {1}, uncheck "Active" +- Delete record type {1} +``` diff --git a/docs/sf-deployment-assistant/Can-not-find-folder.md b/docs/sf-deployment-assistant/Can-not-find-folder.md new file mode 100644 index 000000000..cf5019b8f --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-find-folder.md @@ -0,0 +1,20 @@ +--- +title: "Can not find folder (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Cannot find folder:(.*)\"" +--- + +# Can not find folder + +## Detection + +- RegExp: `Error (.*) Cannot find folder:(.*)` + +## Resolution + +```shell +Folder {2} is missing. +- If folder {2} is existing in sources, add it in related package.xml +- If folder {2} is not existing in DX sources, please use sf hardis:project:clean:retrievefolders to retrieve it +- If both previous solutions did not work, go create manually folder {2} in target org + +``` diff --git a/docs/sf-deployment-assistant/Can-not-find-user--2-.md b/docs/sf-deployment-assistant/Can-not-find-user--2-.md new file mode 100644 index 000000000..5e6570528 --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-find-user--2-.md @@ -0,0 +1,19 @@ +--- +title: "Can not find user (2) (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) In field: (.*) - no User named (.*) found\"" +--- + +# Can not find user (2) + +## Detection + +- RegExp: `Error (.*) In field: (.*) - no User named (.*) found` + +## Resolution + +```shell +You made reference to username {3} in {1}, and it probably does not exist in the target org. +- Do not use named users, but user public groups for assignments -> https://help.salesforce.com/s/articleView?id=sf.creating_and_editing_groups.htm&type=5 +- or Create matching user {3} in the target deployment org +- or open {1} metadata and remove the XML part referring to hardcoded username {3} +``` diff --git a/docs/sf-deployment-assistant/Can-not-find-user.md b/docs/sf-deployment-assistant/Can-not-find-user.md new file mode 100644 index 000000000..586a7b1e9 --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-find-user.md @@ -0,0 +1,27 @@ +--- +title: "Can not find user (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Cannot find a user that matches any of the following usernames\"" +--- + +# Can not find user + +## Detection + +- RegExp: `Error (.*) Cannot find a user that matches any of the following usernames` + +## Resolution + +```shell +You made reference to username(s) in {1}, and those users probably do not exist in target org. +- Do not use named users, but user public groups for assignments -> https://help.salesforce.com/s/articleView?id=sf.creating_and_editing_groups.htm&type=5 +- or Create matching user(s) in the target deployment org +- or Remove the XML part referring to hardcoded usernames + +Example of XML you have to remove in {1}: + + + Manage + nicolas.vuillamy@hardis-scratch-po-tgci-root-develop_20220412_0604.com + User + +``` diff --git a/docs/sf-deployment-assistant/Can-not-test-item-deployment-in-simulation-mode.md b/docs/sf-deployment-assistant/Can-not-test-item-deployment-in-simulation-mode.md new file mode 100644 index 000000000..2266c60a0 --- /dev/null +++ b/docs/sf-deployment-assistant/Can-not-test-item-deployment-in-simulation-mode.md @@ -0,0 +1,17 @@ +--- +title: "Can not test item deployment in simulation mode (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/Test only deployment cannot update\"" +--- + +# Can not test item deployment in simulation mode + +## Detection + +- RegExp: `Test only deployment cannot update` + +## Resolution + +```shell +THIS IS A FALSE POSITIVE +When effective deployment will happen, it should pass +``` diff --git a/docs/sf-deployment-assistant/Cannot-update-a-field-to-a-Summary-from-something-else.md b/docs/sf-deployment-assistant/Cannot-update-a-field-to-a-Summary-from-something-else.md new file mode 100644 index 000000000..1d1f29e2e --- /dev/null +++ b/docs/sf-deployment-assistant/Cannot-update-a-field-to-a-Summary-from-something-else.md @@ -0,0 +1,19 @@ +--- +title: "Cannot update a field to a Summary from something else (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Cannot update a field to a (.*) from something else\"" +--- + +# Cannot update a field to a Summary from something else + +## Detection + +- RegExp: `Error (.*) Cannot update a field to a (.*) from something else` + +## Resolution + +```shell +You probably updated the type of field {1} to type {2}, and Salesforce does not allows that with deployments. You can: +- Try to manually change the type of {1} directly in target org, but it may not be technically possible +- Delete field {1} in target org: it will be recreated after deployment (but you will loose data on existing records, so be careful if your target is a production org) +- Create another field with desired type and manage data recovery if the target is a production org +``` diff --git a/docs/sf-deployment-assistant/Change-Matching-Rule.md b/docs/sf-deployment-assistant/Change-Matching-Rule.md new file mode 100644 index 000000000..f6668d358 --- /dev/null +++ b/docs/sf-deployment-assistant/Change-Matching-Rule.md @@ -0,0 +1,16 @@ +--- +title: "Change Matching Rule (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Before you change a matching rule, you must deactivate it\"" +--- + +# Change Matching Rule + +## Detection + +- RegExp: `Error (.*) Before you change a matching rule, you must deactivate it` + +## Resolution + +```shell +To be able to deploy, you must go in target org setup to manually deactivate matching rule {1} +``` diff --git a/docs/sf-deployment-assistant/Condition-missing-reference.md b/docs/sf-deployment-assistant/Condition-missing-reference.md new file mode 100644 index 000000000..90995c117 --- /dev/null +++ b/docs/sf-deployment-assistant/Condition-missing-reference.md @@ -0,0 +1,19 @@ +--- +title: "Condition missing reference (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) field integrity exception: unknown \(A condition has a reference to (.*), which doesn't exist.\)\"" +--- + +# Condition missing reference + +## Detection + +- RegExp: `Error (.*) field integrity exception: unknown \(A condition has a reference to (.*), which doesn't exist.\)` + +## Resolution + +```shell +There is a reference to {2} in {1}, and {2} is not found. You can either: +- Add {2} in your deployment sources and make sure it is named in package.xml +- Remove the reference to {2} in {1} + +``` diff --git a/docs/sf-deployment-assistant/Couldn-t-retrieve-or-load-information-on-the-field.md b/docs/sf-deployment-assistant/Couldn-t-retrieve-or-load-information-on-the-field.md new file mode 100644 index 000000000..8a0dd9eed --- /dev/null +++ b/docs/sf-deployment-assistant/Couldn-t-retrieve-or-load-information-on-the-field.md @@ -0,0 +1,19 @@ +--- +title: "Couldn't retrieve or load information on the field (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Something went wrong. We couldn't retrieve or load the information on the field: (.*)\.\"" +--- + +# Couldn't retrieve or load information on the field + +## Detection + +- RegExp: `Error (.*) Something went wrong. We couldn't retrieve or load the information on the field: (.*)\.` + +## Resolution + +```shell +There is a reference to {2} in {1}, and {2} is not found. You can either: +- Commit {2} in your deployment sources and make sure it is named in package.xml +- Remove the reference to {2} in {1} + +``` diff --git a/docs/sf-deployment-assistant/Custom-field-not-found.md b/docs/sf-deployment-assistant/Custom-field-not-found.md new file mode 100644 index 000000000..929647d61 --- /dev/null +++ b/docs/sf-deployment-assistant/Custom-field-not-found.md @@ -0,0 +1,27 @@ +--- +title: "Custom field not found (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error PS_Admin In field: field - no CustomField named User.expcloud__Portal_Username__c found\"" +--- + +# Custom field not found + +## Detection + +- RegExp: `Error (.*) In field: (.*) - no CustomField named (.*)\.(.*) found` + +## Examples + +- `Error PS_Admin In field: field - no CustomField named User.expcloud__Portal_Username__c found` + +## Resolution + +```shell +A reference to a custom field {3}.{4} is not found in {1}: +- If you renamed {3}.{4}, do a search/replace in {1} with previous field name and {4} +- If you deleted {3}.{4}, or if you don't want to deploy it, do a search on {4} in all sources, and remove all XML elements referring to {3}.{4} (except in destructiveChanges.xml) +- If {3}.{4} should exist, make sure it is in force-app/main/default/objects/{3}/fields and that {3}.{4} is in manifest/package.xml in CustomField section +- If {3}.{4} is standard, the error is because {3}.{4} is not available in the org you are trying to deploy to. You can: + - Remove the reference to {4} in the XML of {1} ( maybe sf hardis:project:clean:references can clean automatically for you ! ) + - Activate the required features/license in the target org + +``` diff --git a/docs/sf-deployment-assistant/Custom-metadata-entry-not-found.md b/docs/sf-deployment-assistant/Custom-metadata-entry-not-found.md new file mode 100644 index 000000000..fee6f4db1 --- /dev/null +++ b/docs/sf-deployment-assistant/Custom-metadata-entry-not-found.md @@ -0,0 +1,19 @@ +--- +title: "Custom metadata entry not found (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) In field: (.*) - no CustomMetadata named (.*) found\"" +--- + +# Custom metadata entry not found + +## Detection + +- RegExp: `Error (.*) In field: (.*) - no CustomMetadata named (.*) found` + +## Resolution + +```shell +A reference to a custom metadata {3} of type {2} is not found in {1}: +- Are you sure you deployed {3} ? +- If you use a package.xml, is {3} present within type CustomMetadata ? + +``` diff --git a/docs/sf-deployment-assistant/Custom-object-not-found.md b/docs/sf-deployment-assistant/Custom-object-not-found.md new file mode 100644 index 000000000..b74d25ef0 --- /dev/null +++ b/docs/sf-deployment-assistant/Custom-object-not-found.md @@ -0,0 +1,21 @@ +--- +title: "Custom object not found (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) In field: field - no CustomObject named (.*) found\"" +--- + +# Custom object not found + +## Detection + +- RegExp: `Error (.*) In field: field - no CustomObject named (.*) found` + +## Resolution + +```shell +A reference to a custom object {2} is not found in {1}: +- If you renamed the custom object, do a search/replace in sources with previous object name and new object name +- If you deleted the custom object, or if you don't want to deploy it, do a search on the custom object name, and remove XML elements referencing it +- If the object should exist, make sure it is in force-app/main/default/objects and that the object name is in manifest/package.xml in CustomObject section +You may also have a look to command sf hardis:project:clean:references + +``` diff --git a/docs/sf-deployment-assistant/Dependent-class-is-invalid-and-needs-recompilation.md b/docs/sf-deployment-assistant/Dependent-class-is-invalid-and-needs-recompilation.md new file mode 100644 index 000000000..6f7512c6a --- /dev/null +++ b/docs/sf-deployment-assistant/Dependent-class-is-invalid-and-needs-recompilation.md @@ -0,0 +1,17 @@ +--- +title: "Dependent class is invalid and needs recompilation (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Dependent class is invalid and needs recompilation\"" +--- + +# Dependent class is invalid and needs recompilation + +## Detection + +- RegExp: `Error (.*) Dependent class is invalid and needs recompilation` + +## Resolution + +```shell +Solve the other errors and this one will disappear ! + +``` diff --git a/docs/sf-deployment-assistant/Duplicate-label.md b/docs/sf-deployment-assistant/Duplicate-label.md new file mode 100644 index 000000000..356740faa --- /dev/null +++ b/docs/sf-deployment-assistant/Duplicate-label.md @@ -0,0 +1,16 @@ +--- +title: "Duplicate label (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Duplicate label: (.*)\"" +--- + +# Duplicate label + +## Detection + +- RegExp: `Error (.*) Duplicate label: (.*)` + +## Resolution + +```shell +You probably renamed the picklist API name for {2}. Please update manually the picklist {1} in the target org to avoid to have a duplicate label +``` diff --git a/docs/sf-deployment-assistant/Duplicate-value-Platform-Action-Id-List.md b/docs/sf-deployment-assistant/Duplicate-value-Platform-Action-Id-List.md new file mode 100644 index 000000000..6027638a5 --- /dev/null +++ b/docs/sf-deployment-assistant/Duplicate-value-Platform-Action-Id-List.md @@ -0,0 +1,16 @@ +--- +title: "Duplicate value Platform Action Id List (Deployment assistant)" +description: "How to solve Salesforce deployment error \"duplicate value found: PlatformActionListId duplicates value on record with id\"" +--- + +# Duplicate value Platform Action Id List + +## Detection + +- String: `duplicate value found: PlatformActionListId duplicates value on record with id` + +## Resolution + +```shell +There are probably issue with conflict management. Open the XML of the source item, and replace all numbers to make an ascending order, starting with 0 +``` diff --git a/docs/sf-deployment-assistant/Empty-source-items.md b/docs/sf-deployment-assistant/Empty-source-items.md new file mode 100644 index 000000000..bf485720e --- /dev/null +++ b/docs/sf-deployment-assistant/Empty-source-items.md @@ -0,0 +1,19 @@ +--- +title: "Empty source items (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Required field is missing: sharingOwnerRules\"" +--- + +# Empty source items + +## Detection + +- String: `Required field is missing: sharingOwnerRules` +- String: `Required field is missing: standardValue` +- String: `Required field is missing: valueTranslation` + +## Resolution + +```shell +You probably retrieved empty items, that must not be included within the SFDX project +To remove them, please run sfdx:hardis:project:clean:emptyitems +``` diff --git a/docs/sf-deployment-assistant/Enable-CRM-Analytics.md b/docs/sf-deployment-assistant/Enable-CRM-Analytics.md new file mode 100644 index 000000000..e3179fe86 --- /dev/null +++ b/docs/sf-deployment-assistant/Enable-CRM-Analytics.md @@ -0,0 +1,17 @@ +--- +title: "Enable CRM Analytics (Deployment assistant)" +description: "How to solve Salesforce deployment error \"It should be created by enabling the CRM Analytics Cloud preference\"" +--- + +# Enable CRM Analytics + +## Detection + +- String: `It should be created by enabling the CRM Analytics Cloud preference` + +## Resolution + +```shell +You must enable CRM Analytics (ex Wave, Einstein Analytics & Tableau CRM) in the target org. +You probably also need to add CRM Analytics Admin Permission Set assignment to the deployment user +``` diff --git a/docs/sf-deployment-assistant/Error-parsing-file.md b/docs/sf-deployment-assistant/Error-parsing-file.md new file mode 100644 index 000000000..ab13aed28 --- /dev/null +++ b/docs/sf-deployment-assistant/Error-parsing-file.md @@ -0,0 +1,17 @@ +--- +title: "Error parsing file (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Error parsing file: (.*)\"" +--- + +# Error parsing file + +## Detection + +- RegExp: `Error (.*) Error parsing file: (.*)` + +## Resolution + +```shell +There has been an error parsing the XML file of {1}: {2} +- Open file {1} and look where the error can be ! (merge issue, typo, XML tag not closed...) +``` diff --git a/docs/sf-deployment-assistant/Expired-Access---Refresh-Token.md b/docs/sf-deployment-assistant/Expired-Access---Refresh-Token.md new file mode 100644 index 000000000..43b9571a3 --- /dev/null +++ b/docs/sf-deployment-assistant/Expired-Access---Refresh-Token.md @@ -0,0 +1,16 @@ +--- +title: "Expired Access / Refresh Token (Deployment assistant)" +description: "How to solve Salesforce deployment error \"expired access/refresh token\"" +--- + +# Expired Access / Refresh Token + +## Detection + +- String: `expired access/refresh token` + +## Resolution + +```shell +Run command "Select another org" from Status panel (or sf hardis:org:select) to authenticate again to your org +``` diff --git a/docs/sf-deployment-assistant/Field-not-available-for-element.md b/docs/sf-deployment-assistant/Field-not-available-for-element.md new file mode 100644 index 000000000..35db8227b --- /dev/null +++ b/docs/sf-deployment-assistant/Field-not-available-for-element.md @@ -0,0 +1,17 @@ +--- +title: "Field not available for element (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/Field (.*) is not available for\"" +--- + +# Field not available for element + +## Detection + +- RegExp: `Field (.*) is not available for` + +## Resolution + +```shell +You probably changed the type of field {1}. +Find field {1} in the source XML, and remove the section using it +``` diff --git a/docs/sf-deployment-assistant/Flow-must-be-deleted-manually.md b/docs/sf-deployment-assistant/Flow-must-be-deleted-manually.md new file mode 100644 index 000000000..0d000f2dc --- /dev/null +++ b/docs/sf-deployment-assistant/Flow-must-be-deleted-manually.md @@ -0,0 +1,16 @@ +--- +title: "Flow must be deleted manually (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/.flow (.*) insufficient access rights on cross-reference id\"" +--- + +# Flow must be deleted manually + +## Detection + +- RegExp: `.flow (.*) insufficient access rights on cross-reference id` + +## Resolution + +```shell +Flow {1} can not be deleted using deployments, please delete it manually in the target org using menu Setup -> Flows , context menu on {1} -> View details and versions -> Deactivate all versions -> Delete flow +``` diff --git a/docs/sf-deployment-assistant/Formula-picklist-field-issue.md b/docs/sf-deployment-assistant/Formula-picklist-field-issue.md new file mode 100644 index 000000000..8c2318002 --- /dev/null +++ b/docs/sf-deployment-assistant/Formula-picklist-field-issue.md @@ -0,0 +1,18 @@ +--- +title: "Formula picklist field issue (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Les champs de liste de sélection sont pris en charge uniquement dans certaines fonctions.\"" +--- + +# Formula picklist field issue + +## Detection + +- String: `Les champs de liste de sélection sont pris en charge uniquement dans certaines fonctions.` + +## Resolution + +```shell +You probably changed the type of a field that is used in a formula. +Update the formula to use a field compliant with formulas. +More details at https://help.salesforce.com/articleView?id=sf.tips_on_building_formulas.htm&type=5 +``` diff --git a/docs/sf-deployment-assistant/Insufficient-access-rights-on-cross-reference-id.md b/docs/sf-deployment-assistant/Insufficient-access-rights-on-cross-reference-id.md new file mode 100644 index 000000000..df3df89e6 --- /dev/null +++ b/docs/sf-deployment-assistant/Insufficient-access-rights-on-cross-reference-id.md @@ -0,0 +1,17 @@ +--- +title: "Insufficient access rights on cross-reference id (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) insufficient access rights on cross-reference id\"" +--- + +# Insufficient access rights on cross-reference id + +## Detection + +- RegExp: `Error (.*) insufficient access rights on cross-reference id` + +## Resolution + +```shell +- If {1} is a Flow, it can not be deleted using deployments, please delete it manually in the target org using menu Setup -> Flows , context menu on {1} -> View details and versions -> Deactivate all versions -> Delete flow +- If you changed a custom field from unique to not unique, you need to manually make the change in the target org +``` diff --git a/docs/sf-deployment-assistant/Invalid-field-for-upsert.md b/docs/sf-deployment-assistant/Invalid-field-for-upsert.md new file mode 100644 index 000000000..c2df3a98f --- /dev/null +++ b/docs/sf-deployment-assistant/Invalid-field-for-upsert.md @@ -0,0 +1,20 @@ +--- +title: "Invalid field for upsert (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Invalid field for upsert, must be an External Id custom or standard indexed field: (.*) \((.*)\)\"" +--- + +# Invalid field for upsert + +## Detection + +- RegExp: `Error (.*) Invalid field for upsert, must be an External Id custom or standard indexed field: (.*) \((.*)\)` + +## Resolution + +```shell +You tried to use field {2} for an upsert call in {1}. +- Is it declared as externalId ? +- Is the customIndex source file present in the deployment ? +- If it is declared as externalId and customIndex is present, you may have to go manually define the field as externalId in the target org + +``` diff --git a/docs/sf-deployment-assistant/Invalid-field-in-related-list.md b/docs/sf-deployment-assistant/Invalid-field-in-related-list.md new file mode 100644 index 000000000..42e60abba --- /dev/null +++ b/docs/sf-deployment-assistant/Invalid-field-in-related-list.md @@ -0,0 +1,28 @@ +--- +title: "Invalid field in related list (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Invalid field:(.*) in related list:(.*)\"" +--- + +# Invalid field in related list + +## Detection + +- RegExp: `Error (.*) Invalid field:(.*) in related list:(.*)` + +## Resolution + +```shell +Field {2} is unknown. You can: +- Activate the related feature license or option to make {2} existing in target org +- Update XML of {1} to remove reference to field {2} in the related list {3} +- Update XML of {1} to remove the whole related list {3} +Example of XML to remove: + + SOLUTION.ISSUE + SOLUTION.SOLUTION_NUMBER + SOLUTION.STATUS + CORE.USERS.ALIAS + RelatedSolutionList + + +``` diff --git a/docs/sf-deployment-assistant/Invalid-formula-grouping-context.md b/docs/sf-deployment-assistant/Invalid-formula-grouping-context.md new file mode 100644 index 000000000..d10a67b87 --- /dev/null +++ b/docs/sf-deployment-assistant/Invalid-formula-grouping-context.md @@ -0,0 +1,16 @@ +--- +title: "Invalid formula grouping context (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Invalid custom summary formula definition: You must select a grouping context to use any report summary function\"" +--- + +# Invalid formula grouping context + +## Detection + +- String: `Invalid custom summary formula definition: You must select a grouping context to use any report summary function` + +## Resolution + +```shell +You need to update your Report definition. See workaround here -> https://salesforce.stackexchange.com/questions/294850/grouping-error-with-prevgroupval-function +``` diff --git a/docs/sf-deployment-assistant/Invalid-report-type.md b/docs/sf-deployment-assistant/Invalid-report-type.md new file mode 100644 index 000000000..79d81a0e4 --- /dev/null +++ b/docs/sf-deployment-assistant/Invalid-report-type.md @@ -0,0 +1,18 @@ +--- +title: "Invalid report type (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) invalid report type\"" +--- + +# Invalid report type + +## Detection + +- RegExp: `Error (.*) invalid report type` + +## Resolution + +```shell +Report type is missing for report {1} +- Open report {1} to se what report type is used +- Retrieve the report type from an org and add it to the sfdx sources +``` diff --git a/docs/sf-deployment-assistant/Invalid-scope-Mine--not-allowed.md b/docs/sf-deployment-assistant/Invalid-scope-Mine--not-allowed.md new file mode 100644 index 000000000..a561d7f31 --- /dev/null +++ b/docs/sf-deployment-assistant/Invalid-scope-Mine--not-allowed.md @@ -0,0 +1,19 @@ +--- +title: "Invalid scope:Mine, not allowed (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Invalid scope:Mine, not allowed\"" +--- + +# Invalid scope:Mine, not allowed + +## Detection + +- String: `Invalid scope:Mine, not allowed` + +## Resolution + +```shell +Replace Mine by Everything in the list view SFDX source XML. +Have a look at this command to manage that automatically :) +https://sfdx-hardis.cloudity.com/hardis/org/fix/listviewmine/ + +``` diff --git a/docs/sf-deployment-assistant/Invalid-type.md b/docs/sf-deployment-assistant/Invalid-type.md new file mode 100644 index 000000000..c068055c5 --- /dev/null +++ b/docs/sf-deployment-assistant/Invalid-type.md @@ -0,0 +1,16 @@ +--- +title: "Invalid type (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Invalid type: (.*) \((.*)\)\"" +--- + +# Invalid type + +## Detection + +- RegExp: `Error (.*) Invalid type: (.*) \((.*)\)` + +## Resolution + +```shell +Apex error in {1} with unknown type {2} at position {3}. If {2} is a class name, try to fix it, or maybe it is missing in the files or in package.xml ! +``` diff --git a/docs/sf-deployment-assistant/Mandatory-custom-field-can-not-be-in-a-profile-permission-set.md b/docs/sf-deployment-assistant/Mandatory-custom-field-can-not-be-in-a-profile-permission-set.md new file mode 100644 index 000000000..92ced61e8 --- /dev/null +++ b/docs/sf-deployment-assistant/Mandatory-custom-field-can-not-be-in-a-profile-permission-set.md @@ -0,0 +1,24 @@ +--- +title: "Mandatory custom field can not be in a profile/permission set (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) You cannot deploy to a required field: (.*)\"" +--- + +# Mandatory custom field can not be in a profile/permission set + +## Detection + +- RegExp: `Error (.*) You cannot deploy to a required field: (.*)` + +## Resolution + +```shell + +- Search for {2} in source file XML of {1}, then remove the entries matching the results +Example of element to delete: + + true + {2} + true + + +``` diff --git a/docs/sf-deployment-assistant/Missing-Data-Category-Group.md b/docs/sf-deployment-assistant/Missing-Data-Category-Group.md new file mode 100644 index 000000000..5ac0d2aa1 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-Data-Category-Group.md @@ -0,0 +1,19 @@ +--- +title: "Missing Data Category Group (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) In field: DeveloperName - no DataCategoryGroup named (.*) found\"" +--- + +# Missing Data Category Group + +## Detection + +- RegExp: `Error (.*) In field: DeveloperName - no DataCategoryGroup named (.*) found` + +## Resolution + +```shell +If Data Category Group {2} is not existing yet in target org, you might need to: +- create it manually in target org before deployment +- comment DataCategoryGroup in {1} XML + +``` diff --git a/docs/sf-deployment-assistant/Missing-Feature-Work-Com.md b/docs/sf-deployment-assistant/Missing-Feature-Work-Com.md new file mode 100644 index 000000000..a24b59399 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-Feature-Work-Com.md @@ -0,0 +1,17 @@ +--- +title: "Missing Feature Work.Com (Deployment assistant)" +description: "How to solve Salesforce deployment error \"WorkBadgeDefinition\"" +--- + +# Missing Feature Work.Com + +## Detection + +- String: `WorkBadgeDefinition` + +## Resolution + +```shell +Work.com feature must be activated in the target org. +- Org & Scratch: https://developer.salesforce.com/docs/atlas.en-us.workdotcom_dev_guide.meta/workdotcom_dev_guide/wdc_cc_setup_dev_org.htm +``` diff --git a/docs/sf-deployment-assistant/Missing-Quick-Action.md b/docs/sf-deployment-assistant/Missing-Quick-Action.md new file mode 100644 index 000000000..fa5399fbf --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-Quick-Action.md @@ -0,0 +1,23 @@ +--- +title: "Missing Quick Action (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) In field: QuickAction - no QuickAction named (.*) found\"" +--- + +# Missing Quick Action + +## Detection + +- RegExp: `Error (.*) In field: QuickAction - no QuickAction named (.*) found` + +## Resolution + +```shell +QuickAction {2} referred in {1} is unknown. You can either: +- Make sure your QuickAction {2} is present in source files and in package.xml +- If {2} is a standard QuickAction, activate related feature in target org +- Solve other errors that could impact QuickAction {2} +- Remove QuickAction {2} in the source XML of {1}. Example of XML to remove below: + + FeedItem.RypplePost + +``` diff --git a/docs/sf-deployment-assistant/Missing-Sales-Team.md b/docs/sf-deployment-assistant/Missing-Sales-Team.md new file mode 100644 index 000000000..629989117 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-Sales-Team.md @@ -0,0 +1,22 @@ +--- +title: "Missing Sales Team (Deployment assistant)" +description: "How to solve Salesforce deployment error \"related list:RelatedAccountSalesTeam\"" +--- + +# Missing Sales Team + +## Detection + +- String: `related list:RelatedAccountSalesTeam` + +## Resolution + +```shell +Account Teams must be activated in the target org. +- Org: Setup -> Account Teams -> Enable +- Scratch org setting: +"accountSettings": { + "enableAccountTeams": true +} +} +``` diff --git a/docs/sf-deployment-assistant/Missing-e-mail-template.md b/docs/sf-deployment-assistant/Missing-e-mail-template.md new file mode 100644 index 000000000..9981cce05 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-e-mail-template.md @@ -0,0 +1,17 @@ +--- +title: "Missing e-mail template (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/In field: template - no EmailTemplate named (.*) found\"" +--- + +# Missing e-mail template + +## Detection + +- RegExp: `In field: template - no EmailTemplate named (.*) found` + +## Resolution + +```shell +An email template should be present in the sources. To retrieve it, you can run: +sf project retrieve start -m EmailTemplate:{1} -o YOUR_ORG_USERNAME +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Chatter-Collaboration-Group.md b/docs/sf-deployment-assistant/Missing-feature-Chatter-Collaboration-Group.md new file mode 100644 index 000000000..3dae486d3 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Chatter-Collaboration-Group.md @@ -0,0 +1,21 @@ +--- +title: "Missing feature Chatter Collaboration Group (Deployment assistant)" +description: "How to solve Salesforce deployment error \"CollaborationGroup\"" +--- + +# Missing feature Chatter Collaboration Group + +## Detection + +- String: `CollaborationGroup` + +## Resolution + +```shell +Quotes must be activated in the target org. +- Org: Setup -> Chatter settings -> Allow Records in Groups +- Scratch org setting: +"chatterSettings": { + "allowRecordsInChatterGroup": true +}, +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-ContactToMultipleAccounts.md b/docs/sf-deployment-assistant/Missing-feature-ContactToMultipleAccounts.md new file mode 100644 index 000000000..59a012ecd --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-ContactToMultipleAccounts.md @@ -0,0 +1,20 @@ +--- +title: "Missing feature ContactToMultipleAccounts (Deployment assistant)" +description: "How to solve Salesforce deployment error \"no CustomObject named AccountContactRelation found\"" +--- + +# Missing feature ContactToMultipleAccounts + +## Detection + +- String: `no CustomObject named AccountContactRelation found` +- String: `Invalid field:ACCOUNT.NAME in related list:RelatedContactAccountRelationList` + +## Resolution + +```shell +Contacts to multiple accounts be activated in the target org. +- Help: https://help.salesforce.com/articleView?id=sf.shared_contacts_set_up.htm&type=5 +- Scratch org setting: +"features": ["ContactsToMultipleAccounts"] +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Enhanced-notes.md b/docs/sf-deployment-assistant/Missing-feature-Enhanced-notes.md new file mode 100644 index 000000000..722f0a1c2 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Enhanced-notes.md @@ -0,0 +1,21 @@ +--- +title: "Missing feature Enhanced notes (Deployment assistant)" +description: "How to solve Salesforce deployment error \"FeedItem.ContentNote\"" +--- + +# Missing feature Enhanced notes + +## Detection + +- String: `FeedItem.ContentNote` + +## Resolution + +```shell +Enhanced Notes must be activated in the target org. +- Org: Setup -> Notes settings -> Enable Notes +- Scratch org setting: +"enhancedNotesSettings": { + "enableEnhancedNotes": true +}, +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Ideas-notes.md b/docs/sf-deployment-assistant/Missing-feature-Ideas-notes.md new file mode 100644 index 000000000..7b5e39ab6 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Ideas-notes.md @@ -0,0 +1,21 @@ +--- +title: "Missing feature Ideas notes (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Idea.InternalIdeasIdeaRecordType\"" +--- + +# Missing feature Ideas notes + +## Detection + +- String: `Idea.InternalIdeasIdeaRecordType` + +## Resolution + +```shell +Ideas must be activated in the target org. +- Org: https://help.salesforce.com/articleView?id=networks_enable_ideas.htm&type=0 +- Scratch org setting: +"ideasSettings": { + "enableIdeas": true +} +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Live-Agent.md b/docs/sf-deployment-assistant/Missing-feature-Live-Agent.md new file mode 100644 index 000000000..058c438aa --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Live-Agent.md @@ -0,0 +1,18 @@ +--- +title: "Missing feature Live Agent (Deployment assistant)" +description: "How to solve Salesforce deployment error \"FeedItem.ContentNote\"" +--- + +# Missing feature Live Agent + +## Detection + +- String: `FeedItem.ContentNote` + +## Resolution + +```shell +Live Agent must be activated in the target org. +- Org: Setup -> Live Agent Settings -> Enable Live Agent +- Scratch org feature: LiveAgent +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Opportunity-Teams.md b/docs/sf-deployment-assistant/Missing-feature-Opportunity-Teams.md new file mode 100644 index 000000000..7432d0811 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Opportunity-Teams.md @@ -0,0 +1,21 @@ +--- +title: "Missing feature Opportunity Teams (Deployment assistant)" +description: "How to solve Salesforce deployment error \"OpportunityTeam\"" +--- + +# Missing feature Opportunity Teams + +## Detection + +- String: `OpportunityTeam` + +## Resolution + +```shell +Opportunity Teams must be activated in the target org. +- Org: Setup -> Opportunity Team Settings -> Enable Team Selling +- Scratch org: +"opportunitySettings": { + "enableOpportunityTeam": true +} +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Product-Request.md b/docs/sf-deployment-assistant/Missing-feature-Product-Request.md new file mode 100644 index 000000000..f2ada7264 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Product-Request.md @@ -0,0 +1,18 @@ +--- +title: "Missing feature Product Request (Deployment assistant)" +description: "How to solve Salesforce deployment error \"ProductRequest\"" +--- + +# Missing feature Product Request + +## Detection + +- String: `ProductRequest` + +## Resolution + +```shell +ProductRequest object is not available in the target org. +Maybe you would like to clean its references within Profiles / PS using the following command ? +sf hardis:project:clean:references , then select "ProductRequest references" +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Social-Customer-Service.md b/docs/sf-deployment-assistant/Missing-feature-Social-Customer-Service.md new file mode 100644 index 000000000..7cedb201f --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Social-Customer-Service.md @@ -0,0 +1,18 @@ +--- +title: "Missing feature Social Customer Service (Deployment assistant)" +description: "How to solve Salesforce deployment error \"SocialPersona.AreWeFollowing\"" +--- + +# Missing feature Social Customer Service + +## Detection + +- String: `SocialPersona.AreWeFollowing` + +## Resolution + +```shell +Social Custom Service must be activated in the target org. +- Org: Setup -> https://help.salesforce.com/articleView?id=sf.social_customer_service_setup_enable.htm&type=5 +- Scratch org feature: SocialCustomerService +``` diff --git a/docs/sf-deployment-assistant/Missing-feature-Translation-Workbench.md b/docs/sf-deployment-assistant/Missing-feature-Translation-Workbench.md new file mode 100644 index 000000000..796b01106 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-feature-Translation-Workbench.md @@ -0,0 +1,22 @@ +--- +title: "Missing feature Translation Workbench (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/report-meta.xml(.*)filterlanguage\"" +--- + +# Missing feature Translation Workbench + +## Detection + +- RegExp: `report-meta.xml(.*)filterlanguage` + +## Resolution + +```shell +Translation workbench must be activated in the target org. +- Org: Setup -> https://help.salesforce.com/articleView?id=sf.customize_wbench.htm&type=5 +- Scratch org: +"languageSettings": { + "enableTranslationWorkbench": true, + "enableEndUserLanguages": true +} +``` diff --git a/docs/sf-deployment-assistant/Missing-field-MiddleName.md b/docs/sf-deployment-assistant/Missing-field-MiddleName.md new file mode 100644 index 000000000..b1a8da176 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-field-MiddleName.md @@ -0,0 +1,22 @@ +--- +title: "Missing field MiddleName (Deployment assistant)" +description: "How to solve Salesforce deployment error \"field MiddleName\"" +--- + +# Missing field MiddleName + +## Detection + +- String: `field MiddleName` +- String: `Variable does not exist: MiddleName` + +## Resolution + +```shell +MiddleNames must be activated in the target org. +- Help: https://help.salesforce.com/articleView?id=000332623&type=1&mode=1 +- Scratch org setting: +"nameSettings": { + "enableMiddleName": true +} +``` diff --git a/docs/sf-deployment-assistant/Missing-field-Suffix.md b/docs/sf-deployment-assistant/Missing-field-Suffix.md new file mode 100644 index 000000000..57368e390 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-field-Suffix.md @@ -0,0 +1,21 @@ +--- +title: "Missing field Suffix (Deployment assistant)" +description: "How to solve Salesforce deployment error \"field Suffix\"" +--- + +# Missing field Suffix + +## Detection + +- String: `field Suffix` + +## Resolution + +```shell +Suffix must be activated in the target org. +- Help: https://help.salesforce.com/articleView?id=000332623&type=1&mode=1 +- Scratch org setting: +"nameSettings": { + "enableNameSuffix": true +}, +``` diff --git a/docs/sf-deployment-assistant/Missing-field-SyncedQuoteId.md b/docs/sf-deployment-assistant/Missing-field-SyncedQuoteId.md new file mode 100644 index 000000000..0aeef2d44 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-field-SyncedQuoteId.md @@ -0,0 +1,23 @@ +--- +title: "Missing field SyncedQuoteId (Deployment assistant)" +description: "How to solve Salesforce deployment error \"field SyncedQuoteId\"" +--- + +# Missing field SyncedQuoteId + +## Detection + +- String: `field SyncedQuoteId` +- String: `Error force-app/main/default/objects/Quote/Quote.object-meta.xml` +- String: `Error force-app/main/default/objects/Opportunity/fields/SyncedQuoteId.field-meta.xml` + +## Resolution + +```shell +Quotes must be activated in the target org. +- Help: https://help.salesforce.com/articleView?id=sf.quotes_enable.htm&type=5 +- Scratch org setting: +"quoteSettings": { + "enableQuote": true +} +``` diff --git a/docs/sf-deployment-assistant/Missing-multi-currency-field.md b/docs/sf-deployment-assistant/Missing-multi-currency-field.md new file mode 100644 index 000000000..445834728 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-multi-currency-field.md @@ -0,0 +1,16 @@ +--- +title: "Missing multi-currency field (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/A reference to a custom field (.*)CurrencyIsoCode\"" +--- + +# Missing multi-currency field + +## Detection + +- RegExp: `A reference to a custom field (.*)CurrencyIsoCode` + +## Resolution + +```shell +You probably need to activate MultiCurrency (from Setup -> Company information) +``` diff --git a/docs/sf-deployment-assistant/Missing-object-referenced-in-package-xml.md b/docs/sf-deployment-assistant/Missing-object-referenced-in-package-xml.md new file mode 100644 index 000000000..05d942462 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-object-referenced-in-package-xml.md @@ -0,0 +1,18 @@ +--- +title: "Missing object referenced in package.xml (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/An object (.*) of type (.*) was named in package.xml, but was not found in zipped directory\"" +--- + +# Missing object referenced in package.xml + +## Detection + +- RegExp: `An object (.*) of type (.*) was named in package.xml, but was not found in zipped directory` + +## Resolution + +```shell +You can either: +- Update the package.xml to remove the reference to the missing {2} {1} +- Add the missing {2} {1} in your project source files +``` diff --git a/docs/sf-deployment-assistant/Missing-profile-default-application.md b/docs/sf-deployment-assistant/Missing-profile-default-application.md new file mode 100644 index 000000000..050e9e694 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-profile-default-application.md @@ -0,0 +1,24 @@ +--- +title: "Missing profile default application (Deployment assistant)" +description: "How to solve Salesforce deployment error \"You can't remove the only default app from the profile.\"" +--- + +# Missing profile default application + +## Detection + +- String: `You can't remove the only default app from the profile.` + +## Resolution + +```shell +You must have a default application for a profile. You can: + - Update it in UI + - Update the XML of the profile to set "true" in the tag of one of the applicationVisibilities item. + Ex: + + standard__LightningSales + true + true + +``` diff --git a/docs/sf-deployment-assistant/Missing-report.md b/docs/sf-deployment-assistant/Missing-report.md new file mode 100644 index 000000000..02c08ad76 --- /dev/null +++ b/docs/sf-deployment-assistant/Missing-report.md @@ -0,0 +1,19 @@ +--- +title: "Missing report (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) The (.*) report chart has a problem with the \"reportName\" field\"" +--- + +# Missing report + +## Detection + +- RegExp: `Error (.*) The (.*) report chart has a problem with the "reportName" field` + +## Resolution + +```shell +{1} is referring to unknown report {2}. To retrieve it, you can run: +- sf project retrieve start -m Report:{2} -o YOUR_ORG_USERNAME +- If it fails, looks for the report folder and add it before report name to the retrieve command (ex: MYFOLDER/MYREPORTNAME) + +``` diff --git a/docs/sf-deployment-assistant/Network-issue.md b/docs/sf-deployment-assistant/Network-issue.md new file mode 100644 index 000000000..50b37dcc5 --- /dev/null +++ b/docs/sf-deployment-assistant/Network-issue.md @@ -0,0 +1,18 @@ +--- +title: "Network issue (Deployment assistant)" +description: "How to solve Salesforce deployment error \"ECONNABORTED\"" +--- + +# Network issue + +## Detection + +- String: `ECONNABORTED` +- String: `ECONNRESET` + +## Resolution + +```shell +The network connection has been aborted, this is a purely technical issue. +Try again, and if you still see errors, check the status of Salesforce instance on https://status.salesforce.com +``` diff --git a/docs/sf-deployment-assistant/Not-available-for-deploy-for-this-organization.md b/docs/sf-deployment-assistant/Not-available-for-deploy-for-this-organization.md new file mode 100644 index 000000000..5f461643a --- /dev/null +++ b/docs/sf-deployment-assistant/Not-available-for-deploy-for-this-organization.md @@ -0,0 +1,17 @@ +--- +title: "Not available for deploy for this organization (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Not available for deploy for this organization\"" +--- + +# Not available for deploy for this organization + +## Detection + +- RegExp: `Error (.*) Not available for deploy for this organization` + +## Resolution + +```shell +The user you use for deployments probably lacks of the rights (Profiles, Permission sets...) to manage {1}. +- Assign the deployment user to the good Permission Sets, or modify its profile rights, then try again +``` diff --git a/docs/sf-deployment-assistant/Not-valid-sharing-model.md b/docs/sf-deployment-assistant/Not-valid-sharing-model.md new file mode 100644 index 000000000..0b9db91c4 --- /dev/null +++ b/docs/sf-deployment-assistant/Not-valid-sharing-model.md @@ -0,0 +1,20 @@ +--- +title: "Not valid sharing model (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) (.*) is not a valid sharing model for (.*) when (.*) sharing model is (.*)\"" +--- + +# Not valid sharing model + +## Detection + +- RegExp: `Error (.*) (.*) is not a valid sharing model for (.*) when (.*) sharing model is (.*)` + +## Resolution + +```shell +It seems that Sharing Models of {1} and {4} are not compatible in target org. +- Use compatible sharing models between {1} and {4} by updating Sharing model of {1} or {4} +- Make sure that sfdx sources {1}.object-meta.xml and {4}.object-meta.xml and in the files, and that {1} and {4} are in package.xml in CustomObject block +- You may directly update sharingModel in XML. For example, replace ReadWrite by Private in {3}.object-meta.xml + +``` diff --git a/docs/sf-deployment-assistant/Objects-rights-on-a-role-is-below-org-default.md b/docs/sf-deployment-assistant/Objects-rights-on-a-role-is-below-org-default.md new file mode 100644 index 000000000..82793adc8 --- /dev/null +++ b/docs/sf-deployment-assistant/Objects-rights-on-a-role-is-below-org-default.md @@ -0,0 +1,19 @@ +--- +title: "Objects rights on a role is below org default (Deployment assistant)" +description: "How to solve Salesforce deployment error \"access level below organization default\"" +--- + +# Objects rights on a role is below org default + +## Detection + +- String: `access level below organization default` + +## Resolution + +```shell +Your org wide settings default must be lower than the level defined in roles: +- If you are in a scratch org, it can be fixable using "objectProperties" in project-scratch-def.json (see "Set Object-Level Sharing Settings" paragraph in page https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_scratch_orgs_def_file.htm) +- If you are in a sandbox/dev/prod org, you need to update default org wide settings before deployment. See https://www.sfdcpoint.com/salesforce/organization-wide-defaults-owd-in-salesforce/ + +``` diff --git a/docs/sf-deployment-assistant/Picklist-sharing-is-not-supported.md b/docs/sf-deployment-assistant/Picklist-sharing-is-not-supported.md new file mode 100644 index 000000000..316701d39 --- /dev/null +++ b/docs/sf-deployment-assistant/Picklist-sharing-is-not-supported.md @@ -0,0 +1,18 @@ +--- +title: "Picklist sharing is not supported (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Picklist sharing is not supported\"" +--- + +# Picklist sharing is not supported + +## Detection + +- String: `Picklist sharing is not supported` + +## Resolution + +```shell +You probably changed the type of a field. +Go manually make the change in the target org, so the deployment will pass + +``` diff --git a/docs/sf-deployment-assistant/Picklist-value-not-found.md b/docs/sf-deployment-assistant/Picklist-value-not-found.md new file mode 100644 index 000000000..9275e40cc --- /dev/null +++ b/docs/sf-deployment-assistant/Picklist-value-not-found.md @@ -0,0 +1,19 @@ +--- +title: "Picklist value not found (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/Picklist value: (.*) in picklist: (.*) not found\"" +--- + +# Picklist value not found + +## Detection + +- RegExp: `Picklist value: (.*) in picklist: (.*) not found` + +## Resolution + +```shell +Sources have references to value {1} of picklist {2} +- If picklist {2} is standard, add the picklist to sfdx sources by using "sf project retrieve start -m StandardValueSet:{2}", then save again +- Else, perform a search in all code of {1}, then remove XML tags referring to {1} (for example in record types metadatas) + +``` diff --git a/docs/sf-deployment-assistant/Please-choose-a-different-name.md b/docs/sf-deployment-assistant/Please-choose-a-different-name.md new file mode 100644 index 000000000..a993e141d --- /dev/null +++ b/docs/sf-deployment-assistant/Please-choose-a-different-name.md @@ -0,0 +1,18 @@ +--- +title: "Please choose a different name (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) This (.*) already exists or has been previously used(.*)Please choose a different name.\"" +--- + +# Please choose a different name + +## Detection + +- RegExp: `Error (.*) This (.*) already exists or has been previously used(.*)Please choose a different name.` + +## Resolution + +```shell +- Rename {1} in the target org, then try again the deployment. if it succeeds, delete the renamed item. +- or Delete {1} in the target org, then try again the deployment + +``` diff --git a/docs/sf-deployment-assistant/Record-Type-not-found.md b/docs/sf-deployment-assistant/Record-Type-not-found.md new file mode 100644 index 000000000..7444e1548 --- /dev/null +++ b/docs/sf-deployment-assistant/Record-Type-not-found.md @@ -0,0 +1,19 @@ +--- +title: "Record Type not found (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) In field: recordType - no RecordType named (.*) found\"" +--- + +# Record Type not found + +## Detection + +- RegExp: `Error (.*) In field: recordType - no RecordType named (.*) found` + +## Resolution + +```shell +An unknown record type {2} is referenced in {1} +- If record type {2} is not supposed to exist, perform a search in all files of {1}, then remove matching XML elements referring to this record type +- If record type {2} is supposed to exist, you may have to create it manually in the target org to make the deployment pass + +``` diff --git a/docs/sf-deployment-assistant/Send-email-is-disabled.md b/docs/sf-deployment-assistant/Send-email-is-disabled.md new file mode 100644 index 000000000..a22df561c --- /dev/null +++ b/docs/sf-deployment-assistant/Send-email-is-disabled.md @@ -0,0 +1,17 @@ +--- +title: "Send email is disabled (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Send Email is disabled or activities are not allowed\"" +--- + +# Send email is disabled + +## Detection + +- String: `Send Email is disabled or activities are not allowed` +- String: `Unknown user permission: SendExternalEmailAvailable` + +## Resolution + +```shell +Go to Email -> Deliverability -> Select value "All emails" +``` diff --git a/docs/sf-deployment-assistant/Sharing-recalculation-lock.md b/docs/sf-deployment-assistant/Sharing-recalculation-lock.md new file mode 100644 index 000000000..308b28571 --- /dev/null +++ b/docs/sf-deployment-assistant/Sharing-recalculation-lock.md @@ -0,0 +1,17 @@ +--- +title: "Sharing recalculation lock (Deployment assistant)" +description: "How to solve Salesforce deployment error \"because it interferes with another operation already in progress\"" +--- + +# Sharing recalculation lock + +## Detection + +- String: `because it interferes with another operation already in progress` +- String: `Le calcul de partage demandé ne peut être traité maintenant car il interfère avec une autre opération en cours` + +## Resolution + +```shell +If you changed a field from MasterDetail to Lookup, you must do it manually in the target org before being able to deploy +``` diff --git a/docs/sf-deployment-assistant/Sort-order-must-be-in-sequential-order.md b/docs/sf-deployment-assistant/Sort-order-must-be-in-sequential-order.md new file mode 100644 index 000000000..6f2ef2e2e --- /dev/null +++ b/docs/sf-deployment-assistant/Sort-order-must-be-in-sequential-order.md @@ -0,0 +1,17 @@ +--- +title: "Sort order must be in sequential order (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) SortOrder must be in sequential order from\"" +--- + +# Sort order must be in sequential order + +## Detection + +- RegExp: `Error (.*) SortOrder must be in sequential order from` + +## Resolution + +```shell +You probably have a default DuplicateRule in the target org. Retrieve it from target org, or delete it manually in target org, so you can deploy. +Ref: https://developer.salesforce.com/forums/?id=9060G000000I6SoQAK +``` diff --git a/docs/sf-deployment-assistant/Tableau-CRM---Wave-digest-error.md b/docs/sf-deployment-assistant/Tableau-CRM---Wave-digest-error.md new file mode 100644 index 000000000..e2e01d0f7 --- /dev/null +++ b/docs/sf-deployment-assistant/Tableau-CRM---Wave-digest-error.md @@ -0,0 +1,16 @@ +--- +title: "Tableau CRM / Wave digest error (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Fix the sfdcDigest node errors and then upload the file again\"" +--- + +# Tableau CRM / Wave digest error + +## Detection + +- String: `Fix the sfdcDigest node errors and then upload the file again` + +## Resolution + +```shell +Go to the target org, open profile "Analytics Cloud Integration User" and add READ rights to the missing object fields +``` diff --git a/docs/sf-deployment-assistant/Test-classes-with-0--coverage.md b/docs/sf-deployment-assistant/Test-classes-with-0--coverage.md new file mode 100644 index 000000000..47cf5d7b7 --- /dev/null +++ b/docs/sf-deployment-assistant/Test-classes-with-0--coverage.md @@ -0,0 +1,16 @@ +--- +title: "Test classes with 0% coverage (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/ 0%\"" +--- + +# Test classes with 0% coverage + +## Detection + +- RegExp: ` 0%` + +## Resolution + +```shell +Please make sure that none of the test classes are 0% covered +``` diff --git a/docs/sf-deployment-assistant/Unknown-user-permission--CreateAuditFields.md b/docs/sf-deployment-assistant/Unknown-user-permission--CreateAuditFields.md new file mode 100644 index 000000000..894fc57d3 --- /dev/null +++ b/docs/sf-deployment-assistant/Unknown-user-permission--CreateAuditFields.md @@ -0,0 +1,17 @@ +--- +title: "Unknown user permission: CreateAuditFields (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Unknown user permission: CreateAuditFields\"" +--- + +# Unknown user permission: CreateAuditFields + +## Detection + +- String: `Unknown user permission: CreateAuditFields` + +## Resolution + +```shell +You need to enable the "Create audit field" permission in the target org +Please check https://help.salesforce.com/articleView?id=000334139&type=1&mode=1 +``` diff --git a/docs/sf-deployment-assistant/Unknown-user-permission--FieldServiceAccess.md b/docs/sf-deployment-assistant/Unknown-user-permission--FieldServiceAccess.md new file mode 100644 index 000000000..0cf6c14e7 --- /dev/null +++ b/docs/sf-deployment-assistant/Unknown-user-permission--FieldServiceAccess.md @@ -0,0 +1,17 @@ +--- +title: "Unknown user permission: FieldServiceAccess (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Unknown user permission: FieldServiceAccess\"" +--- + +# Unknown user permission: FieldServiceAccess + +## Detection + +- String: `Unknown user permission: FieldServiceAccess` + +## Resolution + +```shell +You need to enable the "Field Service Access" permission in the target org +Please check https://help.salesforce.com/articleView?id=sf.fs_enable.htm&type=5 +``` diff --git a/docs/sf-deployment-assistant/Unknown-user-permission.md b/docs/sf-deployment-assistant/Unknown-user-permission.md new file mode 100644 index 000000000..435e46e54 --- /dev/null +++ b/docs/sf-deployment-assistant/Unknown-user-permission.md @@ -0,0 +1,18 @@ +--- +title: "Unknown user permission (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Unknown user permission:\"" +--- + +# Unknown user permission + +## Detection + +- String: `Unknown user permission:` + +## Resolution + +```shell +You can: +- enable the related permission in the target org +- or remove references to the permission in source XML files (Probably a Profile or a Permission set) +``` diff --git a/docs/sf-deployment-assistant/Unsupported-sharing-configuration.md b/docs/sf-deployment-assistant/Unsupported-sharing-configuration.md new file mode 100644 index 000000000..1f1ce5b1b --- /dev/null +++ b/docs/sf-deployment-assistant/Unsupported-sharing-configuration.md @@ -0,0 +1,18 @@ +--- +title: "Unsupported sharing configuration (Deployment assistant)" +description: "How to solve Salesforce deployment error \"/not supported for (.*) since it's org wide default is\"" +--- + +# Unsupported sharing configuration + +## Detection + +- RegExp: `not supported for (.*) since it's org wide default is` + +## Resolution + +```shell +Consistency error between {1} sharing settings and {1} object configuration +Please check https://salesforce.stackexchange.com/questions/260923/sfdx-deploying-contact-sharing-rules-on-a-fresh-deployment +If you already did that, please try again to run the job +``` diff --git a/docs/sf-deployment-assistant/Variable-does-not-exist.md b/docs/sf-deployment-assistant/Variable-does-not-exist.md new file mode 100644 index 000000000..173dedd6e --- /dev/null +++ b/docs/sf-deployment-assistant/Variable-does-not-exist.md @@ -0,0 +1,16 @@ +--- +title: "Variable does not exist (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Variable does not exist: (.*) \((.*)\)\"" +--- + +# Variable does not exist + +## Detection + +- RegExp: `Error (.*) Variable does not exist: (.*) \((.*)\)` + +## Resolution + +```shell +Apex error in {1} with unknown variable {2} at position {3}. If {2} is a class name, try to fix it, or maybe it is missing in the files or in package.xml ! +``` diff --git a/docs/sf-deployment-assistant/Visibility-is-not-allowed-for-type.md b/docs/sf-deployment-assistant/Visibility-is-not-allowed-for-type.md new file mode 100644 index 000000000..e1f258628 --- /dev/null +++ b/docs/sf-deployment-assistant/Visibility-is-not-allowed-for-type.md @@ -0,0 +1,16 @@ +--- +title: "Visibility is not allowed for type (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) set the visibility for a (.*) to Protected unless you are in a developer\"" +--- + +# Visibility is not allowed for type + +## Detection + +- RegExp: `Error (.*) set the visibility for a (.*) to Protected unless you are in a developer` + +## Resolution + +```shell +Update the visibility of {1} to "Public" +``` diff --git a/docs/sf-deployment-assistant/XML-item-appears-more-than-once.md b/docs/sf-deployment-assistant/XML-item-appears-more-than-once.md new file mode 100644 index 000000000..3ed7b889c --- /dev/null +++ b/docs/sf-deployment-assistant/XML-item-appears-more-than-once.md @@ -0,0 +1,18 @@ +--- +title: "XML item appears more than once (Deployment assistant)" +description: "How to solve Salesforce deployment error \"Error (.*) Field:(.*), value:(.*) appears more than once\"" +--- + +# XML item appears more than once + +## Detection + +- RegExp: `Error (.*) Field:(.*), value:(.*) appears more than once` + +## Resolution + +```shell +You probably made an error while merging conflicts +Look for {3} in the XML of {1} +If you see two {2} XML blocks with {3}, please decide which one you keep and remove the other one +``` diff --git a/docs/sf-deployment-assistant/sharing-operation-already-in-progress.md b/docs/sf-deployment-assistant/sharing-operation-already-in-progress.md new file mode 100644 index 000000000..984d89c31 --- /dev/null +++ b/docs/sf-deployment-assistant/sharing-operation-already-in-progress.md @@ -0,0 +1,20 @@ +--- +title: "sharing operation already in progress (Deployment assistant)" +description: "How to solve Salesforce deployment error \"sharing operation already in progress\"" +--- + +# sharing operation already in progress + +## Detection + +- String: `sharing operation already in progress` + +## Resolution + +```shell +You can not deploy multiple SharingRules at the same time. You can either: +- Remove SharingOwnerRules and SharingRule from package.xml (so it becomes a manual operation) +- Use sf hardis:work:save to generate a deploymentPlan in .sfdx-hardis.json, +- If you are trying to create a scratch org, add DeferSharingCalc in features in project-scratch-def.json + +``` diff --git a/docs/sfdx-hardis-config-file.md b/docs/sfdx-hardis-config-file.md index 304f23edc..f28ed20da 100644 --- a/docs/sfdx-hardis-config-file.md +++ b/docs/sfdx-hardis-config-file.md @@ -10,6 +10,8 @@ Many of these properties are automatically set by CI/CD [setup](salesforce-ci-cd You can see the [**list of all configuration properties**](schema/sfdx-hardis-json-schema-parameters.html). +Additional environment variables are also used by sfdx-hardis, see the [complete list of environment variables](all-env-variables.md). + Here is an example of a .sfdx-hardis.yml config file: ```yaml @@ -70,3 +72,4 @@ listViewsToSetToMine: - force-app/main/default/objects/Operation__c/listViews/MyCurrentOperations.listView-meta.xml - force-app/main/default/objects/Operation__c/listViews/MyFinalizedOperations.listView-meta.xml ``` + diff --git a/docs/sfdx-hardis-help.md b/docs/sfdx-hardis-help.md index 7db48be36..51a8e7bf9 100644 --- a/docs/sfdx-hardis-help.md +++ b/docs/sfdx-hardis-help.md @@ -4,16 +4,6 @@ description: Get help about sfdx-hardis ! --- -## Documentation +You have a question or need assistance ? -You can use the search bar at the top of this documentation to find help pages. - -Many pages contain video tutorials ! - -## Ask questions - -Ask any technical question directly in [sfdx-hardis repository GitHub issues](https://github.com/hardisgroupcom/sfdx-hardis/issues){target=blank} - -## Assistance / Consulting - -If you need assistance from Cloudity to use any of sfdx-hardis features, in the context of a larger project or not, please [**contact us**](https://cloudity.com/#form){target=blank} :) +Feel free to [**contact us 🙂**](https://cloudity.com/#form){target=blank} :) diff --git a/docs/special-thanks.md b/docs/special-thanks.md index b80508a2c..242693981 100644 --- a/docs/special-thanks.md +++ b/docs/special-thanks.md @@ -11,6 +11,8 @@ description: Thank all people making sfdx-hardis great ! ## Contributors +![](https://contrib.rocks/image?repo=hardisgroupcom/sfdx-hardis) + ![](assets/images/special-thanks-1.png) ## Inspirations diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css index 69a79bff8..54c50f44c 100644 --- a/docs/stylesheets/extra.css +++ b/docs/stylesheets/extra.css @@ -1,3 +1,14 @@ +/* Force all sidebar/menu links in dark mode to be white for consistency */ +[data-md-color-scheme="slate"] .md-nav__link, +[data-md-color-scheme="slate"] .md-nav__link--active, +[data-md-color-scheme="slate"] .md-nav__item--active>.md-nav__link, +[data-md-color-scheme="slate"] .md-nav__item--nested>.md-nav__link, +[data-md-color-scheme="slate"] .md-nav__link:visited, +[data-md-color-scheme="slate"] .md-nav__link:focus, +[data-md-color-scheme="slate"] .md-nav__link:hover { + color: #fff !important; +} + /* stylelint-disable SelectorFormat */ .md-typeset__table { min-width: 100%; @@ -7,7 +18,55 @@ display: table; } -:root>* { - --md-primary-fg-color: #1B1464; +/* light mode table header bgcolor */ +.md-typeset__table th { + background-color: #f2edfe; +} + +/* dark mode table header bgcolor */ +[data-md-color-scheme="slate"] .md-typeset__table th { + background-color: hsla(var(--md-hue), 25%, 25%, 1) +} + +/* light mode alternating table bg colors */ +.md-typeset__table tr:nth-child(2n) { + background-color: #f8f8f8; +} + +/* dark mode alternating table bg colors */ +[data-md-color-scheme="slate"] .md-typeset__table tr:nth-child(2n) { + background-color: hsla(var(--md-hue), 25%, 25%, 1) +} + +:root { + --md-primary-fg-color: #0057B8; + --md-primary-bg-color: #F5F7FA; + --md-accent-fg-color: #FFB81C; --md-footer-bg-color: #1B1464; + --md-footer-fg-color: #FFB81C; + --md-header-bg-color: #1B1464; + --md-header-fg-color: #FFB81C; +} + +[data-md-color-scheme="slate"] { + --md-primary-fg-color: #FFB81C; + /* Use accent yellow for primary text for better contrast */ + --md-primary-bg-color: #181B2A; + --md-accent-fg-color: #FFB81C; + --md-footer-bg-color: #181B2A; + --md-footer-fg-color: #FFB81C; + --md-header-bg-color: #181B2A; + --md-header-fg-color: #FFB81C; +} + +/* Improve link readability in dark mode */ +[data-md-color-scheme="slate"] a, +[data-md-color-scheme="slate"] .md-typeset a { + color: #FFB81C; +} + +/* Optional: Make code blocks and inline code more readable in dark mode */ +[data-md-color-scheme="slate"] code, +[data-md-color-scheme="slate"] pre { + color: #FFB81C; } \ No newline at end of file diff --git a/messages/hello.world.md b/messages/hello.world.md new file mode 100644 index 000000000..804f848de --- /dev/null +++ b/messages/hello.world.md @@ -0,0 +1,29 @@ +# summary + +Say hello. + +# description + +Say hello either to the world or someone you know. + +# flags.name.summary + +The name of the person you'd like to say hello to. + +# flags.name.description + +This person can be anyone in the world! + +# examples + +- Say hello to the world: + + <%= config.bin %> <%= command.id %> + +- Say hello to someone you know: + + <%= config.bin %> <%= command.id %> --name Astro + +# info.hello + +Hello %s at %s. diff --git a/messages/org.json b/messages/org.json deleted file mode 100644 index de413d705..000000000 --- a/messages/org.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "allowPurgeFailure": "Allows purges to fail without exiting with 1. Use --no-allowpurgefailure to disable", - "apexTests": "Run apex test cases on org", - "auditApiVersion": "Audit API version", - "auditCallInCallOut": "Generate list of callIn and callouts from sfdx project", - "auditRemoteSites": "Generate list of remote sites", - "checkOnly": "Only checks the deployment, there is no impact on target org", - "createOrgShape": "Updates project-scratch-def.json from org shape", - "completeWorkTask": "When a work task is completed, guide user to create a merge request", - "dataTreeExport": "Assisted export data defined in .sfdx-hardis.yml", - "debugMode": "Activate debug mode (more logs)", - "deployMetadatas": "Deploy metadatas to source org", - "exceptFilter": "Allow to take all item except these criteria", - "failIfError": "Fails (exit code 1) if an error is found", - "filteredMetadatas": "Comma separated list of Metadatas keys to remove from PackageXml file", - "folder": "Folder", - "forceNewScratch": "If an existing scratch org exists, do not reuse it but create a new one", - "installFFLib": "Install FFLib in current project", - "instanceUrl": "URL of org instance", - "loginToOrg": "Login to salesforce org", - "minimumApiVersion": "Minimum allowed API version", - "nameFilter": "Filter according to Name criteria", - "newWorkTask": "New work task", - "orgFreezeUser": "Freeze mass users in org for maintenance or go live purpose", - "orgDataExport": "Export data from org using sfdmu", - "orgDataImport": "Import data in org using sfdmu", - "orgDataDelete": "Delete data in org using sfdmu", - "orgPurgeFlow": "Purge Obsolete flow versions to avoid the 50 max versions limit. Filters on Status and Name", - "orgfreezeUser": "Mass freeze users in org before a maintenance or go live\n\nSee user guide in the following article\n\n\n\n[![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://medium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3)", - "orgUnfreezeUser": "Mass unfreeze users in org after a maintenance or go live\n\nSee user guide in the following article\n\n\n\n[![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://medium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3)", - "packageCreate": "Create a new package", - "packageInstall": "Install a package", - "packageInstallationKey": "installation key for key-protected package (default: null)", - "packageVersionCreate": "Create a new version of an unlocked package", - "packageVersionList": "List versions of unlocked package", - "packageXml": "Path to package.xml manifest file", - "prompt": "Prompt for confirmation (true by default, use --no-prompt to skip)", - "refreshWorkTask": "Make my local branch and my scratch org up to date with the most recent sources", - "retrieveDx": "Retrieve Salesforce DX project from org", - "retrieveMetadatas": "Retrieve metadatas from an org with package.xml manifest", - "sandboxLogin": "Use if the environment is a sandbox", - "scratch": "Scratch org", - "scratchCreate": "Create and initialize a scratch org so it is ready to use", - "rebuildSelection": "Process again the selection of the items that you want to publish to upper level", - "selectOrg": "Interactive org selection for user", - "statusFilter": "Filter according to Status criteria", - "tempFolder": "Temporary folder", - "testLevel": "Level of tests to apply to validate deployment", - "testLevelExtended": "Level of tests to validate deployment. RunRepositoryTests auto-detect and run all repository test classes", - "websocket": "Websocket host:port for VsCode SFDX Hardis UI integration", - "withDevHub": "Also connect associated DevHub", - "runtests": "Apex test classes to run if --testlevel is RunSpecifiedTests" -} diff --git a/messages/org.md b/messages/org.md new file mode 100644 index 000000000..acfa6f0d2 --- /dev/null +++ b/messages/org.md @@ -0,0 +1,222 @@ +# allowPurgeFailure + +Allows purges to fail without exiting with 1. Use --no-allowpurgefailure to disable + +# apexTests + +Run apex test cases on org + +# auditApiVersion + +Audit API version + +# auditCallInCallOut + +Generate list of callIn and callouts from sfdx project + +# auditRemoteSites + +Generate list of remote sites + +# checkOnly + +Only checks the deployment, there is no impact on target org + +# createOrgShape + +Updates project-scratch-def.json from org shape + +# completeWorkTask + +When a User Story is completed, guide user to create a merge request + +# dataTreeExport + +Assisted export data defined in .sfdx-hardis.yml + +# debugMode + +Activate debug mode (more logs) + +# deployMetadatas + +Deploy metadatas to source org + +# exceptFilter + +Allow to take all item except these criteria + +# failIfError + +Fails (exit code 1) if an error is found + +# filteredMetadatas + +Comma separated list of Metadatas keys to remove from PackageXml file + +# folder + +Folder + +# forceNewScratch + +If an existing scratch org exists, do not reuse it but create a new one + +# installFFLib + +Install FFLib in current project + +# instanceUrl + +URL of org instance + +# loginToOrg + +Login to salesforce org + +# minimumApiVersion + +Minimum allowed API version + +# nameFilter + +Filter according to Name criteria + +# newWorkTask + +New User Story + +# orgFreezeUser + +Freeze mass users in org for maintenance or go live purpose + +# orgCommunityUpdate + +Update a community status. + +# orgCommunityUpdateDesc + +Activate or deactivate a community by changing it's status: + +- Live +- DownForMaintenance + +# orgDataExport + +Export data from org using sfdmu + +# orgDataImport + +Import data in org using sfdmu + +# orgPurgeFlow + +Purge Obsolete flow versions to avoid the 50 max versions limit. Filters on Status and Name + +# orgfreezeUser + +Mass freeze users in org before a maintenance or go live + +See user guide in the following article + + + +[![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://medium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3) + +# orgUnfreezeUser + +Mass unfreeze users in org after a maintenance or go live + +See user guide in the following article + + + +[![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://medium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3) + +# packageCreate + +Create a new package + +# packageInstall + +Install a package + +# packageInstallationKey + +installation key for key-protected package (default: null) + +# packageVersionCreate + +Create a new version of an unlocked package + +# packageVersionList + +List versions of unlocked package + +# packageXml + +Path to package.xml manifest file + +# prompt + +Prompt for confirmation (true by default, use --no-prompt to skip) + +# refreshWorkTask + +Make my local branch and my scratch org up to date with the most recent sources + +# retrieveDx + +Retrieve Salesforce DX project from org + +# retrieveMetadatas + +Retrieve metadatas from an org with package.xml manifest + +# sandboxLogin + +Use if the environment is a sandbox + +# scratch + +Scratch org + +# scratchCreate + +Create and initialize a scratch org so it is ready to use + +# rebuildSelection + +Process again the selection of the items that you want to publish to upper level + +# selectOrg + +Interactive org selection for user + +# statusFilter + +Filter according to Status criteria + +# tempFolder + +Temporary folder + +# testLevel + +Level of tests to apply to validate deployment + +# testLevelExtended + +Level of tests to validate deployment. RunRepositoryTests auto-detect and run all repository test classes + +# websocket + +Websocket host:port for VsCode SFDX Hardis UI integration + +# withDevHub + +Also connect associated DevHub + +# runtests + +Apex test classes to run if --testlevel is RunSpecifiedTests \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 9b478204d..033dd2b06 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -4,32 +4,66 @@ repo_url: https://github.com/hardisgroupcom/sfdx-hardis edit_uri: tree/master/docs site_author: Nicolas Vuillamy site_description: Salesforce DX Plugin, by Cloudity. -copyright: >- - Doc generated by sfdx-hardis +copyright: Doc generated by sfdx-hardis theme: name: material custom_dir: docs/overrides features: + - content.code.copy - navigation.instant - navigation.footer logo: assets/images/cloudity-text-logo.svg favicon: assets/images/cloudity-logo.svg palette: - primary: indigo + - scheme: default + primary: custom + accent: custom + toggle: + icon: material/brightness-7 + name: Switch to dark mode + background: '#F5F7FA' + foreground: '#1B1464' + colors: + primary: '#0057B8' + accent: '#FFB81C' + link: '#0057B8' + header: '#1B1464' + nav: '#F5F7FA' + - scheme: slate + primary: custom + accent: custom + toggle: + icon: material/brightness-4 + name: Switch to light mode + background: '#181B2A' + foreground: '#FFB81C' + colors: + primary: '#0057B8' + accent: '#FFB81C' + link: '#FFB81C' + header: '#181B2A' + nav: '#23263A' plugins: - glightbox - search markdown_extensions: - pymdownx.emoji: - emoji_index: !!python/name:materialx.emoji.twemoji - emoji_generator: !!python/name:materialx.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg - pymdownx.snippets: base_path: docs check_paths: true + restrict_base_path: false - mdx_truly_sane_lists - attr_list + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true + - md_in_html extra_javascript: - https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/tablesort.min.js - javascripts/tables.js @@ -50,7 +84,140 @@ extra: generator: false nav: - Home: index.md - - Installation: https://sfdx-hardis.cloudity.com/#installation + - Installation: installation.md + - Salesforce CI/CD: + - CI/CD Home: salesforce-ci-cd-home.md + - Contributor Guide: + - Contributor Guide Home: salesforce-ci-cd-use-home.md + - Prepare your computer: salesforce-ci-cd-use-install.md + - Clone the repository: salesforce-ci-cd-clone-repository.md + - Create new User Story: salesforce-ci-cd-create-new-task.md + - Work on your dev org: + - Work home: salesforce-ci-cd-work-on-task.md + - Open your org: salesforce-ci-cd-work-on-task-open-org.md + - Configure Salesforce: salesforce-ci-cd-work-on-task-configuration.md + - Handle Profiles: salesforce-ci-cd-work-on-task-profiles.md + - Install packages: salesforce-ci-cd-work-on-task-install-packages.md + - Develop in Salesforce: salesforce-ci-cd-work-on-task-development.md + - Publish your User Story: salesforce-ci-cd-publish-task.md + - Create Merge Request: + - On Gitlab: salesforce-ci-cd-merge-request-gitlab.md + - On Azure: salesforce-ci-cd-pull-request-azure.md + - On Github: salesforce-ci-cd-pull-request-github.md + - Check merge request results: + - Merge Request results Home: salesforce-ci-cd-handle-merge-request-results.md + - Solve deployment errors: salesforce-ci-cd-solve-deployment-errors.md + - Solve MegaLinter errors: salesforce-ci-cd-solve-megalinter-errors.md + - Release Manager Guide: + - Release Manager Home: salesforce-ci-cd-release-home.md + - Validate a merge request: salesforce-ci-cd-validate-merge-request.md + - Deploy to major orgs: salesforce-ci-cd-deploy-major-branches.md + - Handle RUN / Hotfix to Production: salesforce-ci-cd-hotfixes.md + - CI/CD Configuration: + - CI/CD Config Home: salesforce-ci-cd-config-home.md + - Overwrite Management: salesforce-ci-cd-config-overwrite.md + - Delta deployments: salesforce-ci-cd-config-delta-deployment.md + - Automated cleaning: salesforce-ci-cd-config-cleaning.md + - Source retrieve issues: salesforce-ci-cd-retrieve.md + - sfdx-hardis for packaging: salesforce-ci-cd-packaging.md + - sfdx-hardis for conga: salesforce-ci-cd-conga.md + - Setup Guide: + - CI/CD Setup Home: salesforce-ci-cd-setup-home.md + - Init Git repository: salesforce-ci-cd-setup-git.md + - Configure Orgs: salesforce-ci-cd-setup-activate-org.md + - Init SFDX Project: salesforce-ci-cd-setup-init-project.md + - CI Server Authentication: salesforce-ci-cd-setup-auth.md + - Init from Existing Org: salesforce-ci-cd-setup-existing-org.md + - Integrations: + - Integrations Home: salesforce-ci-cd-setup-integrations-home.md + - GitHub: salesforce-ci-cd-setup-integration-github.md + - Gitlab: salesforce-ci-cd-setup-integration-gitlab.md + - Azure DevOps: salesforce-ci-cd-setup-integration-azure.md + - BitBucket: salesforce-ci-cd-setup-integration-bitbucket.md + - Slack: salesforce-ci-cd-setup-integration-slack.md + - Microsoft Teams: salesforce-ci-cd-setup-integration-ms-teams.md + - Email: salesforce-ci-cd-setup-integration-email.md + - Grafana: salesforce-ci-cd-setup-integration-api.md + - Jira: salesforce-ci-cd-setup-integration-jira.md + - Azure Boards: salesforce-ci-cd-setup-integration-azure-boards.md + - Generic Ticketing: salesforce-ci-cd-setup-integration-generic-ticketing.md + - First merge request: salesforce-ci-cd-setup-merge-request.md + - Org Monitoring: + - Monitoring home: salesforce-monitoring-home.md + - List of checks: + - Metadata Backup: salesforce-monitoring-metadata-backup.md + - Suspect Setup Actions: salesforce-monitoring-suspect-audit-trail.md + - Apex tests: salesforce-monitoring-apex-tests.md + - Quality Checks with MegaLinter: salesforce-monitoring-quality-checks.md + - Limits issues: salesforce-monitoring-org-limits.md + - Calls to deprecated API versions: salesforce-monitoring-deprecated-api-calls.md + - Inactive users: salesforce-monitoring-inactive-users.md + - Unused licenses: salesforce-monitoring-unused-licenses.md + - Unused Apex Classes: salesforce-monitoring-unused-apex-classes.md + - Unused Connected Apps: salesforce-monitoring-unused-connected-apps.md + - Metadatas without access: salesforce-monitoring-missing-access.md + - Unused Custom Labels: salesforce-monitoring-unused-metadata.md + - Inactive metadata: salesforce-monitoring-inactive-metadata.md + - Missing metadata attributes: salesforce-monitoring-missing-metadata-attributes.md + - Configuration: + - Configuration guide: salesforce-monitoring-config-home.md + - GitHub: salesforce-monitoring-config-github.md + - Gitlab: salesforce-monitoring-config-gitlab.md + - Azure: salesforce-monitoring-config-azure.md + - Bitbucket: salesforce-monitoring-config-bitbucket.md + - Slack: salesforce-ci-cd-setup-integration-slack.md + - Microsoft Teams: salesforce-ci-cd-setup-integration-ms-teams.md + - Generate Org Documentation: + - Doc Gen Home: salesforce-project-documentation.md + - Generate: salesforce-project-doc-generate.md + - Improve with AI: salesforce-project-doc-ai.md + - Complete manually: salesforce-project-doc-complete-manually.md + - Host on Salesforce: salesforce-project-doc-host-on-salesforce.md + - Host on Cloudflare: salesforce-project-doc-cloudflare.md + - AI Deployment Assistant: + - Deployment Assistant home: salesforce-deployment-assistant-home.md + - Configuration: + - Setup Deployment Assistant: salesforce-deployment-assistant-setup.md + - Setup AI integration: salesforce-ai-setup.md + - Deployment errors list: salesforce-deployment-assistant-error-list.md + - Integrations: + - Integrations Home: salesforce-ci-cd-setup-integrations-home.md + - GitHub: salesforce-ci-cd-setup-integration-github.md + - Gitlab: salesforce-ci-cd-setup-integration-gitlab.md + - Azure DevOps: salesforce-ci-cd-setup-integration-azure.md + - BitBucket: salesforce-ci-cd-setup-integration-bitbucket.md + - Slack: salesforce-ci-cd-setup-integration-slack.md + - Microsoft Teams: salesforce-ci-cd-setup-integration-ms-teams.md + - Email: salesforce-ci-cd-setup-integration-email.md + - Grafana: salesforce-ci-cd-setup-integration-api.md + - Jira: salesforce-ci-cd-setup-integration-jira.md + - Azure Boards: salesforce-ci-cd-setup-integration-azure-boards.md + - Generic Ticketing: salesforce-ci-cd-setup-integration-generic-ticketing.md + - LLM Providers: + - LLM home: salesforce-ai-setup.md + - Prompts: + - All prompts: salesforce-ai-prompts.md + - Complete Object Attributes: prompt-templates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.md + - Describe Apex: prompt-templates/PROMPT_DESCRIBE_APEX.md + - Describe Approval Process: prompt-templates/PROMPT_DESCRIBE_APPROVAL_PROCESS.md + - Describe Assignment Rules: prompt-templates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.md + - Describe AutoResponse Rules: prompt-templates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.md + - Describe Escalation Rules: prompt-templates/PROMPT_DESCRIBE_ESCALATION_RULES.md + - Describe Flow: prompt-templates/PROMPT_DESCRIBE_FLOW.md + - Describe Flow Diff: prompt-templates/PROMPT_DESCRIBE_FLOW_DIFF.md + - Describe LWC: prompt-templates/PROMPT_DESCRIBE_LWC.md + - Describe Object: prompt-templates/PROMPT_DESCRIBE_OBJECT.md + - Describe Package: prompt-templates/PROMPT_DESCRIBE_PACKAGE.md + - Describe Page: prompt-templates/PROMPT_DESCRIBE_PAGE.md + - Describe Permission Set: prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET.md + - Describe Permission Set Group: prompt-templates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.md + - Describe Profile: prompt-templates/PROMPT_DESCRIBE_PROFILE.md + - Describe Roles: prompt-templates/PROMPT_DESCRIBE_ROLES.md + - Solve Deployment Error: prompt-templates/PROMPT_SOLVE_DEPLOYMENT_ERROR.md + - Variables: + - Additional Instructions: prompt-templates/VARIABLE_ADDITIONAL_INSTRUCTIONS.md + - Formatting Requirements: prompt-templates/VARIABLE_FORMATTING_REQUIREMENTS.md + - Output Format Markdown Doc: prompt-templates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.md - Commands: All commands: commands.md hardis: @@ -63,8 +230,18 @@ nav: doc: extract: permsetgroups: hardis/doc/extract/permsetgroups.md + fieldusage: hardis/doc/fieldusage.md + flow2markdown: hardis/doc/flow2markdown.md + mkdocs-to-cf: hardis/doc/mkdocs-to-cf.md + mkdocs-to-salesforce: hardis/doc/mkdocs-to-salesforce.md + override-prompts: hardis/doc/override-prompts.md + packagexml2markdown: hardis/doc/packagexml2markdown.md plugin: generate: hardis/doc/plugin/generate.md + project2markdown: hardis/doc/project2markdown.md + git: + pull-requests: + extract: hardis/git/pull-requests/extract.md lint: access: hardis/lint/access.md metadatastatus: hardis/lint/metadatastatus.md @@ -73,8 +250,13 @@ nav: mdapi: deploy: hardis/mdapi/deploy.md misc: + custom-label-translations: hardis/misc/custom-label-translations.md + purge-references: hardis/misc/purge-references.md + servicenow-report: hardis/misc/servicenow-report.md toml2csv: hardis/misc/toml2csv.md org: + community: + update: hardis/org/community/update.md configure: data: hardis/org/configure/data.md files: hardis/org/configure/files.md @@ -87,12 +269,17 @@ nav: import: hardis/org/data/import.md diagnose: audittrail: hardis/org/diagnose/audittrail.md + instanceupgrade: hardis/org/diagnose/instanceupgrade.md legacyapi: hardis/org/diagnose/legacyapi.md licenses: hardis/org/diagnose/licenses.md + releaseupdates: hardis/org/diagnose/releaseupdates.md + unused-apex-classes: hardis/org/diagnose/unused-apex-classes.md + unused-connected-apps: hardis/org/diagnose/unused-connected-apps.md unusedlicenses: hardis/org/diagnose/unusedlicenses.md unusedusers: hardis/org/diagnose/unusedusers.md files: export: hardis/org/files/export.md + import: hardis/org/files/import.md fix: listviewmine: hardis/org/fix/listviewmine.md generate: @@ -101,9 +288,13 @@ nav: all: hardis/org/monitor/all.md backup: hardis/org/monitor/backup.md limits: hardis/org/monitor/limits.md + multi-org-query: hardis/org/multi-org-query.md purge: apexlog: hardis/org/purge/apexlog.md flow: hardis/org/purge/flow.md + refresh: + after-refresh: hardis/org/refresh/after-refresh.md + before-refresh: hardis/org/refresh/before-refresh.md retrieve: packageconfig: hardis/org/retrieve/packageconfig.md sources: @@ -127,6 +318,9 @@ nav: create: hardis/package/version/create.md list: hardis/package/version/list.md promote: hardis/package/version/promote.md + packagexml: + append: hardis/packagexml/append.md + remove: hardis/packagexml/remove.md project: audit: apiversion: hardis/project/audit/apiversion.md @@ -135,6 +329,7 @@ nav: remotesites: hardis/project/audit/remotesites.md clean: emptyitems: hardis/project/clean/emptyitems.md + filter-xml-content: hardis/project/clean/filter-xml-content.md flowpositions: hardis/project/clean/flowpositions.md hiddenitems: hardis/project/clean/hiddenitems.md listviews: hardis/project/clean/listviews.md @@ -143,6 +338,7 @@ nav: orgmissingitems: hardis/project/clean/orgmissingitems.md references: hardis/project/clean/references.md retrievefolders: hardis/project/clean/retrievefolders.md + sensitive-metadatas: hardis/project/clean/sensitive-metadatas.md standarditems: hardis/project/clean/standarditems.md systemdebug: hardis/project/clean/systemdebug.md xml: hardis/project/clean/xml.md @@ -152,17 +348,29 @@ nav: profilestopermsets: hardis/project/convert/profilestopermsets.md create: hardis/project/create.md deploy: + notify: hardis/project/deploy/notify.md + quick: hardis/project/deploy/quick.md + simulate: hardis/project/deploy/simulate.md + smart: hardis/project/deploy/smart.md sources: dx: hardis/project/deploy/sources/dx.md metadata: hardis/project/deploy/sources/metadata.md + start: hardis/project/deploy/start.md + validate: hardis/project/deploy/validate.md fix: profiletabs: hardis/project/fix/profiletabs.md v53flexipages: hardis/project/fix/v53flexipages.md generate: + bypass: hardis/project/generate/bypass.md + flow-git-diff: hardis/project/generate/flow-git-diff.md gitdelta: hardis/project/generate/gitdelta.md lint: hardis/project/lint.md metadata: findduplicates: hardis/project/metadata/findduplicates.md + deploy: + quick: hardis/deploy/quick.md + start: hardis/deploy/start.md + validate: hardis/deploy/validate.md scratch: create: hardis/scratch/create.md delete: hardis/scratch/delete.md @@ -184,99 +392,16 @@ nav: resetselection: hardis/work/resetselection.md save: hardis/work/save.md ws: hardis/work/ws.md - - Salesforce CI/CD: - - CI/CD Home: salesforce-ci-cd-home.md - - Contributor Guide: - - Contributor Guide Home: salesforce-ci-cd-use-home.md - - Prepare your computer: salesforce-ci-cd-use-install.md - - Clone the repository: salesforce-ci-cd-clone-repository.md - - Create new task: salesforce-ci-cd-create-new-task.md - - Work on your dev org: - - Work home: salesforce-ci-cd-work-on-task.md - - Open your org: salesforce-ci-cd-work-on-task-open-org.md - - Configure Salesforce: salesforce-ci-cd-work-on-task-configuration.md - - Handle Profiles: salesforce-ci-cd-work-on-task-profiles.md - - Install packages: salesforce-ci-cd-work-on-task-install-packages.md - - Develop in Salesforce: salesforce-ci-cd-work-on-task-development.md - - Publish your work: salesforce-ci-cd-publish-task.md - - Create Merge Request: - - On Gitlab: salesforce-ci-cd-merge-request-gitlab.md - - On Azure: salesforce-ci-cd-pull-request-azure.md - - On Github: salesforce-ci-cd-pull-request-github.md - - Check merge request results: - - Merge Request results Home: salesforce-ci-cd-handle-merge-request-results.md - - Solve deployment errors: salesforce-ci-cd-solve-deployment-errors.md - - Solve MegaLinter errors: salesforce-ci-cd-solve-megalinter-errors.md - - Release Manager Guide: - - Release Manager Home: salesforce-ci-cd-release-home.md - - Validate a merge request: salesforce-ci-cd-validate-merge-request.md - - Deploy to major orgs: salesforce-ci-cd-deploy-major-branches.md - - Handle RUN / Hotfix to Production: salesforce-ci-cd-hotfixes.md - - CI/CD Configuration: - - CI/CD Config Home: salesforce-ci-cd-config-home.md - - Overwrite Management: salesforce-ci-cd-config-overwrite.md - - Delta deployments: salesforce-ci-cd-config-delta-deployment.md - - Automated cleaning: salesforce-ci-cd-config-cleaning.md - - Source retrieve issues: salesforce-ci-cd-retrieve.md - - sfdx-hardis for packaging: salesforce-ci-cd-packaging.md - - sfdx-hardis for conga: salesforce-ci-cd-conga.md - - Setup Guide: - - CI/CD Setup Home: salesforce-ci-cd-setup-home.md - - Init Git repository: salesforce-ci-cd-setup-git.md - - DevHub and Sandbox Tracking: salesforce-ci-cd-setup-activate-org.md - - Init SFDX Project: salesforce-ci-cd-setup-init-project.md - - CI Server Authentication: salesforce-ci-cd-setup-auth.md - - Init from Existing Org: salesforce-ci-cd-setup-existing-org.md - - Integrations: - - Integrations Home: salesforce-ci-cd-setup-integrations-home.md - - GitHub: salesforce-ci-cd-setup-integration-github.md - - Gitlab: salesforce-ci-cd-setup-integration-gitlab.md - - Azure DevOps: salesforce-ci-cd-setup-integration-azure.md - - BitBucket: salesforce-ci-cd-setup-integrations-bitbucket.md - - Slack: salesforce-ci-cd-setup-integration-slack.md - - Microsoft Teams: salesforce-ci-cd-setup-integration-ms-teams.md - - Email: salesforce-ci-cd-setup-integration-email.md - - API (beta): salesforce-ci-cd-setup-integration-api.md - - Jira: salesforce-ci-cd-setup-integration-jira.md - - Azure Boards: salesforce-ci-cd-setup-integration-azure-boards.md - - Generic Ticketing: salesforce-ci-cd-setup-integration-generic-ticketing.md - - First merge request: salesforce-ci-cd-setup-merge-request.md - - Org Monitoring: - - Monitoring home: salesforce-monitoring-home.md - - List of checks: - - Metadata Backup: salesforce-monitoring-metadata-backup.md - - Suspect Setup Actions: salesforce-monitoring-suspect-audit-trail.md - - Apex tests: salesforce-monitoring-apex-tests.md - - Quality Checks with MegaLinter: salesforce-monitoring-quality-checks.md - - Limits issues: salesforce-monitoring-org-limits.md - - Calls to deprecated API versions: salesforce-monitoring-deprecated-api-calls.md - - Inactive users: salesforce-monitoring-inactive-users.md - - Unused licenses: salesforce-monitoring-unused-licenses.md - - Metadatas without access: salesforce-monitoring-missing-access.md - - Unused Custom Labels: salesforce-monitoring-unused-metadata.md - - Inactive metadata: salesforce-monitoring-inactive-metadata.md - - Missing metadata attributes: salesforce-monitoring-missing-metadata-attributes.md - - Configuration: - - Configuration guide: salesforce-monitoring-config-home.md - - GitHub: salesforce-monitoring-config-github.md - - Gitlab: salesforce-monitoring-config-gitlab.md - - Azure: salesforce-monitoring-config-azure.md - - Bitbucket: salesforce-monitoring-config-bitbucket.md - - Slack: salesforce-ci-cd-setup-integration-slack.md - - Microsoft Teams: salesforce-ci-cd-setup-integration-ms-teams.md - - AI Deployment Assistant: - - Deployment Assistant home: salesforce-deployment-assistant-home.md - - Configuration: - - Setup Deployment Assistant: salesforce-deployment-assistant-setup.md - - Setup AI integration: salesforce-ai-setup.md - - Deployment errors list: salesforce-deployment-assistant-error-list.md + hello: + world: hello/world.md + - Community Events: events.md + - Articles & Videos: articles-videos.md + - Meet the team: contributors.md - Configuration reference: sfdx-hardis-config-file.md - - Articles / Tutorials: https://sfdx-hardis.cloudity.com/#articles - - Special Thanks: special-thanks.md - - Frequently Asked Questions: >- - https://nicolas.vuillamy.fr/what-devops-experts-want-to-know-about-salesforce-ci-cd-with-sfdx-hardis-q-a-1f412db34476 - - Contributing: https://sfdx-hardis.cloudity.com/#contributing + - Frequently Asked Questions: https://nicolas.vuillamy.fr/what-devops-experts-want-to-know-about-salesforce-ci-cd-with-sfdx-hardis-q-a-1f412db34476 + - Contributing: contributing.md - License: license.md + - Security: SECURITY.md - Changelog: CHANGELOG.md - Help: sfdx-hardis-help.md - - Who is Cloudity ?: https://www.cloudity.com/ + - Cloudity, Consulting Partner: cloudity-consulting-partner.md diff --git a/package.json b/package.json index 444386f8e..469a52588 100644 --- a/package.json +++ b/package.json @@ -1,99 +1,113 @@ { "name": "sfdx-hardis", - "description": "Swiss-army-knife Toolbox for Salesforce\nAllows you to define a complete CD/CD Pipeline\nOrchestrate base commands and assist users with interactive wizards", - "version": "4.53.0", + "description": "Swiss-army-knife Toolbox for Salesforce.\n Allows you to define a complete CD/CD Pipeline.\n Orchestrate base commands and assist users with interactive wizards", "author": "NicolasVuillamy @nvuillam", "bugs": "https://github.com/hardisgroupcom/sfdx-hardis/issues", + "version": "6.5.3", "dependencies": { - "@actions/github": "^5.1.1", - "@adobe/node-fetch-retry": "^1.1.2", - "@amplitude/node": "^1.3.2", - "@gitbeaker/node": "^35.8.0", - "@keyv/redis": "^2.1.2", - "@oclif/command": "^1", - "@oclif/config": "^1.18.3", - "@oclif/errors": "^1", - "@salesforce/command": "^4.2.1", - "@salesforce/core": "^2.33.1", - "@salesforce/ts-sinon": "^1.2.4", - "@salesforce/ts-types": "^1.5.20", - "@slack/web-api": "^6.9.0", + "@actions/github": "^6.0.1", + "@cparra/apexdocs": "^3.14.1", + "@gitbeaker/node": "^35.8.1", + "@langchain/anthropic": "^0.3.27", + "@langchain/community": "^0.3.56", + "@langchain/core": "^0.3.72", + "@langchain/google-genai": "^0.2.17", + "@langchain/ollama": "^0.2.4", + "@oclif/core": "^4.5.3", + "@salesforce/core": "^8.23.1", + "@salesforce/sf-plugins-core": "^11.3.12", + "@slack/types": "^2.16.0", + "@slack/web-api": "^7.10.0", "@supercharge/promise-pool": "^3.2.0", - "@types/mocha": "^8.2.1", - "@types/ws": "^7.4.0", - "@typescript-eslint/eslint-plugin": "^4.17.0", - "@typescript-eslint/parser": "^4.17.0", - "@xmldom/xmldom": "^0.8.6", - "axios": "^1.7.4", - "azure-devops-node-api": "^12.0.0", - "bitbucket": "^2.11.0", - "chalk": "^4.1.0", - "columnify": "^1.5.4", - "cosmiconfig": "^7.0.0", - "cross-spawn": "^7.0.3", - "csv-stringify": "^5.6.1", - "debug": "^4.3.4", + "@xmldom/xmldom": "^0.9.8", + "axios": "^1.12.1", + "azure-devops-node-api": "^14.1.0", + "bitbucket": "^2.12.0", + "chalk": "^5.6.0", + "chrome-launcher": "^1.2.0", + "cloudflare": "^4.4.1", + "cosmiconfig": "^9.0.0", + "cross-spawn": "^7.0.6", + "csv-stringify": "^6.6.0", + "debug": "^4.4.1", + "diff": "^7.0.0", + "dotenv": "^16.6.1", "email-validator": "^2.0.4", - "eslint": "^7.21.0", "exceljs": "^4.4.0", "extract-zip": "^2.0.1", - "find-package-json": "^1.2.0", - "fs-extra": "^9.1.0", + "farmhash": "^4.0.2", + "fast-xml-parser": "^4.5.3", + "form-data": "^4.0.4", + "fs-extra": "^11.3.2", "fs-readdir-recursive": "^1.1.0", - "glob": "^11.0.0", + "glob": "^11.0.3", "he": "^1.2.0", - "inquirer": "^10.1.8", - "isomorphic-dompurify": "^2.3.0", + "inquirer": "^10.2.2", + "isomorphic-dompurify": "^2.26.0", "jira-client": "^8.2.2", - "js-yaml": "^4.0.0", - "jsforce": "^1.11.0", - "keyv": "^4.0.3", - "markdown-toc": "^1.2.0", - "marked": "^12.0.0", - "mega-linter-runner": "^4.39.0", + "js-yaml": "^4.1.0", + "jsdoc-to-markdown": "^9.1.1", + "langchain": "^0.3.34", + "make-fetch-happen": "^14.0.3", + "marked": "^14.1.4", + "md-to-pdf": "^5.2.4", + "mega-linter-runner": "^8.8.0", "moment": "^2.30.1", - "node-fetch": "^3.2.10", - "open": "8.4.2", - "openai": "^4.52.0", - "ora": "5.4.1", - "papaparse": "^5.3.1", - "pascalcase": "^1.0.0", - "psl": "^1.8.0", - "puppeteer": "^13.5.2", - "read-pkg-up": "^7.0.1", - "semver": "^7.6.0", - "set-value": "^4.0.0", - "simple-git": "^3.20.0", - "sort-array": "^4.1.3", + "open": "^10.1.2", + "openai": "^4.104.0", + "ora": "^8.2.0", + "papaparse": "^5.5.3", + "pascalcase": "^2.0.0", + "psl": "^1.15.0", + "puppeteer-core": "^23.11.1", + "read-package-up": "^11.0.0", + "semver": "^7.6.3", + "set-value": "^4.1.0", + "simple-git": "^3.28.0", + "sort-array": "^4.1.5", "split": "^1.0.1", "string-template": "^1.0.0", - "tslib": "^2.1.0", - "update-notifier": "^5.1.0", - "which": "^2.0.2", - "ws": "^7.4.4", - "xml2js": "^0.5.0", - "xpath": "^0.0.32" + "update-notifier": "^7.3.1", + "which": "^4.0.0", + "ws": "^8.18.3", + "xml2js": "^0.6.2", + "xpath": "^0.0.34" + }, + "resolutions": { + "@langchain/core": "0.3.57", + "form-data": "4.0.4", + "tmp": "0.2.5" }, "devDependencies": { - "@oclif/dev-cli": "^1.26.0", - "@oclif/plugin-help": "^3.2.2", - "@oclif/test": "^1", - "@salesforce/dev-config": "^2.1.0", - "@types/chai": "^4", - "@types/dompurify": "^3.0.5", + "@oclif/plugin-command-snapshot": "^5.3.6", + "@salesforce/cli-plugins-testkit": "^5.3.41", + "@salesforce/dev-config": "^4.3.2", + "@salesforce/dev-scripts": "^10", + "@types/columnify": "^1.5.4", + "@types/cosmiconfig": "^6.0.3", + "@types/cross-spawn": "^6.0.6", + "@types/diff": "^6.0.0", + "@types/extract-zip": "^2.0.3", + "@types/fs-extra": "^11.0.4", + "@types/fs-readdir-recursive": "^1.1.3", + "@types/glob": "^8.1.0", + "@types/inquirer": "^9.0.7", "@types/jira-client": "^7.1.9", - "@types/node": "^14.14.32", - "chai": "^4", - "globby": "^11.0.2", - "mocha": "^8.3.1", - "nyc": "^15.1.0", - "rimraf": "^3.0.2", - "ts-node": "^10.7.0", - "typescript": "^5.5.2", - "yarn-audit-fix": "^9.3.7" - }, - "resolutions": { - "graceful-fs": "^4.2.4" + "@types/js-yaml": "^4.0.9", + "@types/make-fetch-happen": "^10.0.4", + "@types/papaparse": "^5.3.16", + "@types/psl": "^1.1.3", + "@types/set-value": "^4.0.3", + "@types/sort-array": "^4.1.2", + "@types/update-notifier": "^6.0.8", + "@types/which": "^3.0.4", + "@types/ws": "^8.18.1", + "@types/xml2js": "^0.4.14", + "eslint-plugin-sf-plugin": "^1.20.31", + "oclif": "^4.22.9", + "tmp": "0.2.5", + "ts-node": "^10.9.2", + "typescript": "^5.9.2" }, "engines": { "node": ">=20.0.0" @@ -102,12 +116,16 @@ "/lib", "/messages", "/defaults", - "/npm-shrinkwrap.json", - "/oclif.manifest.json" + "/oclif.manifest.json", + "/oclif.lock" ], "homepage": "https://sfdx-hardis.cloudity.com", "keywords": [ + "force", "salesforce", + "salesforcedx", + "sf", + "sf-plugin", "sfdx", "devops", "automation", @@ -132,12 +150,24 @@ "license": "AGPL-3.0", "oclif": { "commands": "./lib/commands", - "bin": "sfdx", + "bin": "sf", + "topicSeparator": " ", + "devPlugins": [ + "@oclif/plugin-help" + ], + "topics": { + "hello": { + "description": "Commands to say hello." + }, + "hardis": { + "description": "Hardis tools by Cloudity" + } + }, + "flexibleTaxonomy": true, "hooks": { "init": [ "./lib/hooks/init/log", "./lib/hooks/init/check-upgrade", - "./lib/hooks/init/check-local-sfdx-hardis-files", "./lib/hooks/init/start-ws-client" ], "prerun": [ @@ -145,36 +175,150 @@ "./lib/hooks/prerun/check-dependencies" ], "auth": [ - "./lib/hooks/prerun/auth" + "./lib/hooks/auth/auth" ], "postrun": [ - "./lib/hooks/postrun/notify", "./lib/hooks/postrun/store-cache" + ], + "finally": [ + "./lib/hooks/finally/notify" ] - }, - "topics": { - "hardis": { - "description": "Hardis tools" - } - }, - "devPlugins": [ - "@oclif/plugin-help" - ] + } }, "repository": "https://github.com/hardisgroupcom/sfdx-hardis", - "prettier": { - "printWidth": 150 - }, "scripts": { - "build": "tsc -b && node build.js && generate-schema-doc config/sfdx-hardis.jsonschema.json docs/schema/sfdx-hardis-json-schema-parameters.html --config minify=false --config link_to_reused_ref=false || echo 'Please run \"pip install json-schema-for-humans\" (you need python installed on your computer)'", - "build:doc": "sfdx hardis:doc:plugin:generate", - "compile": "tsc --watch", - "lint": "eslint . --ext .ts", - "lint:fix": "eslint . --ext .ts --fix", - "postpack": "rimraf oclif.manifest.json", - "posttest": "npm run lint", - "prepack": "rimraf lib && tsc -b && oclif-dev manifest && oclif-dev readme || true && yarn build", - "test": "exit 0 && nyc --extension .ts mocha --forbid-only \"test/**/*.test.ts\"", - "version": "yarn build && git add docs/* && oclif-dev readme || true && git add README.md" - } + "build": "wireit", + "build:doc": "sf hardis:doc:plugin:generate", + "clean": "sf-clean", + "clean-all": "sf-clean all", + "compile": "wireit", + "docs": "sf-docs", + "format": "wireit", + "link-check": "wireit", + "lint": "wireit", + "postinstall": "yarn husky install || echo \"Unable to install Husky. If you are in a CI/CD job, that's ok !\"", + "postpack": "sf-clean --ignore-signing-artifacts", + "prepack": "sf-prepack", + "test": "wireit", + "test:nuts": "nyc mocha \"**/*.nut.ts\" --slow 4500 --timeout 600000 --parallel", + "test:only": "wireit", + "version": "oclif readme" + }, + "publishConfig": { + "access": "public" + }, + "wireit": { + "build": { + "command": "node build.cjs && generate-schema-doc config/sfdx-hardis.jsonschema.json docs/schema/sfdx-hardis-json-schema-parameters.html --config minify=false --config link_to_reused_ref=false || echo 'Please run \"pip install json-schema-for-humans\" (you need python installed on your computer)'", + "dependencies": [ + "compile", + "lint" + ] + }, + "compile": { + "command": "tsc -p . --pretty --incremental", + "files": [ + "src/**/*.ts", + "**/tsconfig.json", + "messages/**" + ], + "output": [ + "lib/**", + "*.tsbuildinfo" + ], + "clean": "if-file-deleted" + }, + "format": { + "command": "prettier --write \"+(src|test|schemas)/**/*.+(ts|js|json)|command-snapshot.json\"", + "files": [ + "src/**/*.ts", + "test/**/*.ts", + "schemas/**/*.json", + "command-snapshot.json", + ".prettier*" + ], + "output": [] + }, + "lint": { + "command": "eslint src test --color --cache --cache-location .eslintcache", + "files": [ + "src/**/*.ts", + "test/**/*.ts", + "messages/**", + "**/.eslint*", + "**/tsconfig.json" + ], + "output": [] + }, + "test:compile": { + "command": "tsc -p \"./test\" --pretty", + "files": [ + "test/**/*.ts", + "**/tsconfig.json" + ], + "output": [] + }, + "test": { + "dependencies": [ + "test:compile", + "test:only", + "lint" + ] + }, + "test:only": { + "command": "nyc mocha \"test/**/*.test.ts\"", + "env": { + "FORCE_COLOR": "2" + }, + "files": [ + "test/**/*.ts", + "src/**/*.ts", + "**/tsconfig.json", + ".mocha*", + "!*.nut.ts", + ".nycrc" + ], + "output": [] + }, + "test:command-reference": { + "command": "node --loader ts-node/esm --no-warnings=ExperimentalWarning \"./bin/dev.js\" commandreference:generate --erroronwarnings", + "files": [ + "src/**/*.ts", + "messages/**", + "package.json" + ], + "output": [ + "tmp/root" + ] + }, + "test:deprecation-policy": { + "command": "node --loader ts-node/esm --no-warnings=ExperimentalWarning \"./bin/dev.js\" snapshot:compare", + "files": [ + "src/**/*.ts" + ], + "output": [], + "dependencies": [ + "compile" + ] + }, + "test:json-schema": { + "command": "node --loader ts-node/esm --no-warnings=ExperimentalWarning \"./bin/dev.js\" schema:compare", + "files": [ + "src/**/*.ts", + "schemas" + ], + "output": [] + }, + "link-check": { + "command": "node -e \"process.exit(process.env.CI ? 0 : 1)\" || linkinator \"**/*.md\" --skip \"CHANGELOG.md|node_modules|test/|confluence.internal.salesforce.com|my.salesforce.com|%s\" --markdown --retry --directory-listing --verbosity error", + "files": [ + "./*.md", + "./!(CHANGELOG).md", + "messages/**/*.md" + ], + "output": [] + } + }, + "exports": "./lib/index.js", + "type": "module" } diff --git a/src/commands/hardis/auth/login.ts b/src/commands/hardis/auth/login.ts index 61c9c9863..09737bc08 100644 --- a/src/commands/hardis/auth/login.ts +++ b/src/commands/hardis/auth/login.ts @@ -1,74 +1,109 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { authOrg } from '../../../common/utils/authUtils.js'; +import { CONSTANTS, getEnvVar } from '../../../config/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class Login extends SfCommand { + public static title = 'Login'; -export default class Login extends SfdxCommand { - public static title = "Login"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("loginToOrg"); +**Authenticates to a Salesforce org, primarily designed for CI/CD workflows.** - public static examples = ["$ sfdx hardis:auth:login"]; +This command facilitates secure and automated logins to Salesforce organizations within continuous integration and continuous delivery pipelines. It leverages pre-configured authentication details, ensuring that CI/CD processes can interact with Salesforce without manual intervention. + +Key aspects: + +- **Configuration-Driven:** It relies on authentication variables and files set up by dedicated configuration commands: + - For CI/CD repositories: [Configure Org CI Authentication](${CONSTANTS.DOC_URL_ROOT}/hardis/project/configure/auth/) + - For Monitoring repositories: [Configure Org Monitoring](${CONSTANTS.DOC_URL_ROOT}/hardis/org/configure/monitoring/) +- **Technical Org Support:** Supports authentication to a 'technical org' (e.g., for calling Agentforce from another org) by utilizing the \`SFDX_AUTH_URL_TECHNICAL_ORG\` environment variable. If this variable is set, the command authenticates to this org with the alias \`TECHNICAL_ORG\`. + +To obtain the \`SFDX_AUTH_URL_TECHNICAL_ORG\` value, you can run \`sf org display --verbose --json\` and copy the \`sfdxAuthUrl\` field from the output. + +
+Technical explanations + +The command's technical flow involves: + +- **Flag Parsing:** It parses command-line flags such as \`instanceurl\`, \`devhub\`, \`scratchorg\`, and \`debug\` to determine the authentication context. +- **Authentication Hook:** It triggers an internal authentication hook (\`this.config.runHook('auth', ...\`)) which is responsible for executing the actual authentication logic based on the provided flags (e.g., whether it's a Dev Hub or a scratch org). +- **Environment Variable Check:** It checks for the presence of \`SFDX_AUTH_URL_TECHNICAL_ORG\` or \`TECHNICAL_ORG_ALIAS\` environment variables. +- **\`authOrg\` Utility:** If a technical org is configured, it calls the \`authOrg\` utility function to perform the authentication for that specific org, ensuring it's connected and available for subsequent operations. +- **Salesforce CLI Integration:** It integrates with the Salesforce CLI's authentication mechanisms to establish and manage org connections. +
+`; + + public static examples = [ + '$ sf hardis:auth:login', + 'CI=true sf hardis:auth:login' + ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - instanceurl: flags.string({ - char: "r", - description: messages.getMessage("instanceUrl"), + public static flags: any = { + instanceurl: Flags.string({ + char: 'r', + description: messages.getMessage('instanceUrl'), }), - devhub: flags.boolean({ - char: "h", + devhub: Flags.boolean({ + char: 'h', default: false, - description: messages.getMessage("withDevHub"), + description: messages.getMessage('withDevHub'), }), - scratchorg: flags.boolean({ - char: "s", + scratchorg: Flags.boolean({ + char: 's', default: false, - description: messages.getMessage("scratch"), + description: messages.getMessage('scratch'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; /* jscpd:ignore-end */ public async run(): Promise { - const devHub = this.flags.devhub || false; - const scratch = this.flags.scratchorg || false; - await this.config.runHook("auth", { + console.log('Entering Login command to authenticate to Salesforce org'); + const { flags } = await this.parse(Login); + const devHub = flags.devhub || false; + const scratch = flags.scratchorg || false; + await this.config.runHook('auth', { checkAuth: !devHub, Command: this, devHub, scratch, }); + // Login to secondary org + if (getEnvVar('TECHNICAL_ORG_ALIAS') || getEnvVar('SFDX_AUTH_URL_TECHNICAL_ORG')) { + await authOrg('TECHNICAL_ORG', { + checkAuth: true, + Command: this, + devHub: false, + scratch: false, + argv: this.argv + }); + } + // Return an object to be displayed with --json - return { outputString: "Logged to Salesforce org" }; + return { outputString: 'Logged to Salesforce org' }; } } diff --git a/src/commands/hardis/cache/clear.ts b/src/commands/hardis/cache/clear.ts index 29bf20b86..613f9ba2e 100644 --- a/src/commands/hardis/cache/clear.ts +++ b/src/commands/hardis/cache/clear.ts @@ -1,52 +1,63 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { clearCache } from "../../../common/cache"; -import { uxLog } from "../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from "chalk"; +import { clearCache } from '../../../common/cache/index.js'; +import { uxLog } from '../../../common/utils/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class CacheClear extends SfCommand { + public static title = 'Clear sfdx-hardis cache'; -export default class DxSources extends SfdxCommand { - public static title = "Clear sfdx-hardis cache"; + public static description = ` +## Command Behavior - public static description = "Clear cache generated by sfdx-hardis"; +**Clears the local cache generated by the sfdx-hardis plugin.** - public static examples = ["$ sfdx hardis:cache:clear"]; +This command is designed to remove temporary files, stored configurations, and other cached data that sfdx-hardis uses to optimize its operations. Clearing the cache can be beneficial for: - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", +- **Troubleshooting:** Resolving unexpected behavior or inconsistencies. +- **Disk Space Management:** Freeing up storage on your local machine. +- **Ensuring Fresh Data:** Guaranteeing that the plugin operates with the most current data and configurations. + +## Technical explanations + +The command's technical implementation is straightforward: + +- **Direct Function Call:** It directly invokes the \`clearCache()\` function, which is imported from uri../../../common/cache/index.jsuri. +- **Cache Management Logic:** The uriclearCache()\` function encapsulates the logic for identifying and removing the specific files and directories that constitute the sfdx-hardis cache. +`; + + public static examples = ['$ sf hardis:cache:clear']; + + public static uiConfig = { hide: true }; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = false; /* jscpd:ignore-end */ public async run(): Promise { await clearCache(); - uxLog(this, "sfdx-hardis cache cleared"); + uxLog("action", this, c.cyan('sfdx-hardis cache cleared')); return { - message: "sfdx-hardis cache cleared", + message: 'sfdx-hardis cache cleared', }; } } diff --git a/src/commands/hardis/config/get.ts b/src/commands/hardis/config/get.ts index 634381c81..be7f5992b 100644 --- a/src/commands/hardis/config/get.ts +++ b/src/commands/hardis/config/get.ts @@ -1,59 +1,70 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { uxLog } from "../../../common/utils"; -import { getConfig } from "../../../config"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { uxLog } from '../../../common/utils/index.js'; +import { getConfig } from '../../../config/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ConfigGet extends SfCommand { + public static title = 'Deploy metadata sources to org'; -export default class DxSources extends SfdxCommand { - public static title = "Deploy metadata sources to org"; + public static description = ` +## Command Behavior - public static description = "Returns sfdx-hardis project config for a given level"; +**Retrieves and displays the sfdx-hardis configuration for a specified level.** - public static examples = ["$ sfdx hardis:project:deploy:sources:metadata"]; +This command allows you to inspect the configuration that is currently in effect for your project, which is useful for debugging and understanding how sfdx-hardis will behave. - protected static flagsConfig = { - level: flags.string({ - char: "l", - default: "project", - description: "project,branch or user", - options: ["project", "branch", "user"], +- **Configuration levels:** It can retrieve configuration from three different levels: + - **Project:** The configuration defined in the project's \`.sfdx-hardis.yml\` file. + - **Branch:** The configuration defined in a branch-specific configuration file (e.g., \`.sfdx-hardis.production.yml\`). + - **User:** The global user-level configuration. + +## Technical explanations + +The command's logic is straightforward: + +- **\`getConfig\` function:** It calls the \`getConfig\` utility function, passing the desired configuration level as an argument. +- **Configuration loading:** The \`getConfig\` function is responsible for finding the appropriate configuration file, reading its contents, and parsing it as YAML or JSON. +- **Output:** The retrieved configuration is then displayed to the user as a JSON string. +`; + + public static examples = ['$ sf hardis:project:deploy:sources:metadata']; + + public static flags: any = { + level: Flags.string({ + char: 'l', + default: 'project', + description: 'project,branch or user', + options: ['project', 'branch', 'user'], }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = false; protected configInfo: any = {}; /* jscpd:ignore-end */ public async run(): Promise { - const level = this.flags.level || "project"; + const { flags } = await this.parse(ConfigGet); + const level = flags.level || 'project'; this.configInfo = await getConfig(level); - uxLog(this, JSON.stringify(this.configInfo)); + uxLog("other", this, JSON.stringify(this.configInfo)); return { config: this.configInfo, }; diff --git a/src/commands/hardis/doc/extract/permsetgroups.ts b/src/commands/hardis/doc/extract/permsetgroups.ts index 2d065d5b9..86ba43b95 100644 --- a/src/commands/hardis/doc/extract/permsetgroups.ts +++ b/src/commands/hardis/doc/extract/permsetgroups.ts @@ -1,75 +1,89 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import * as toc from "markdown-toc"; -import { uxLog } from "../../../../common/utils"; -import { parseXmlFile } from "../../../../common/utils/xmlUtils"; -import { getReportDirectory } from "../../../../config"; -import { WebSocketClient } from "../../../../common/websocketClient"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class DocGenerate extends SfdxCommand { - public static title = "Generate project documentation"; - - public static description = `Generate markdown files with project documentation`; - - public static examples = ["$ sfdx hardis:doc:extract:permsetgroups"]; - - protected static flagsConfig = { - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { parseXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { getReportDirectory } from '../../../../config/index.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class ExtractPermSetGroups extends SfCommand { + public static title = 'Generate project documentation'; + + public static description = ` +## Command Behavior + +**Extracts and documents Salesforce Permission Set Groups and their assigned Permission Sets.** + +This command generates two types of output: a CSV file and a Markdown file, providing a clear overview of how Permission Set Groups are structured and what Permission Sets they contain within your Salesforce project. This is particularly useful for: + +- **Documentation:** Creating human-readable documentation of your permission architecture. +- **Auditing:** Understanding the composition of permission sets for security and compliance checks. +- **Analysis:** Gaining insights into how permissions are bundled and assigned in your Salesforce environment. + +The generated CSV file provides a structured, machine-readable format, while the Markdown file offers a more descriptive, human-friendly view, including the group's name, label, description, and a list of its constituent permission sets. + +## Technical explanations + +The command performs the following technical steps: + +- **File Discovery:** It uses \`glob\` to find all \`.permissionsetgroup-meta.xml\` files within the current working directory, respecting \`.gitignore\` patterns. +- **XML Parsing:** For each discovered Permission Set Group XML file, it parses the XML content using \`parseXmlFile\` to extract relevant information such as the group's name, label, description, and the names of the Permission Sets it contains. +- **Data Structuring:** The extracted data is then structured into a list of objects, making it easy to process. +- **CSV Generation:** It constructs a CSV file with two columns: 'Permission set group' and 'Permission sets'. The 'Permission sets' column lists all assigned permission sets for each group, enclosed in quotes and separated by commas. The CSV file is saved to a temporary directory or a user-specified path. +- **Markdown Generation:** It generates a Markdown file (\`docs/permission-set-groups.md\`) that includes a title, a table of contents, and detailed sections for each Permission Set Group. Each section lists the group's name, label, description, and a bulleted list of its assigned Permission Sets. +- **File System Operations:** It uses \`fs-extra\` to ensure output directories exist and to write the generated CSV and Markdown files. +- **VS Code Integration:** It uses \`WebSocketClient.requestOpenFile\` to automatically open the generated CSV and Markdown files in VS Code, enhancing the user experience. +`; + + public static examples = ['$ sf hardis:doc:extract:permsetgroups']; + + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected outputFile; protected debugMode = false; public async run(): Promise { - this.outputFile = this.flags.outputfile || null; - this.debugMode = this.flags.debug || false; - + const { flags } = await this.parse(ExtractPermSetGroups); + this.outputFile = flags.outputfile || null; + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Generating CSV and Markdown with Permission Set Groups and their related Permission Sets`)); + uxLog("action", this, c.cyan(`Generating CSV and Markdown with Permission Set Groups and their related Permission Sets`)); /* jscpd:ignore-end */ - const psgList = []; + const psgList: any[] = []; const globPatternPSG = process.cwd() + `/**/*.permissionsetgroup-meta.xml`; - const psgFiles = await glob(globPatternPSG); - uxLog(this, c.grey(`Found ${psgFiles.length} permission set groups`)); + const psgFiles = await glob(globPatternPSG, { ignore: GLOB_IGNORE_PATTERNS }); + uxLog("log", this, c.grey(`Found ${psgFiles.length} permission set groups`)); for (const psgFile of psgFiles) { - const psgName = psgFile.replace(/\\/g, "/").split("/").pop().replace(".permissionsetgroup-meta.xml", ""); + const psgName = (psgFile.replace(/\\/g, '/').split('/').pop() || '').replace('.permissionsetgroup-meta.xml', ''); const psg = await parseXmlFile(psgFile); const psgItem = { name: psgName, @@ -81,12 +95,12 @@ export default class DocGenerate extends SfdxCommand { } // Build CSV - const csvLines = []; - const header = ["Permission set group", "Permission sets"]; + const csvLines: any[] = []; + const header = ['Permission set group', 'Permission sets']; csvLines.push(header); for (const psg of psgList) { const psgLine = [psg.name]; - psgLine.push(`"${psg.permissionSetsNames.join(",")}"`); + psgLine.push(`"${psg.permissionSetsNames.join(',')}"`); csvLines.push(psgLine); } @@ -94,41 +108,41 @@ export default class DocGenerate extends SfdxCommand { if (this.outputFile == null) { // Default file in system temp directory if --outputfile not provided const reportDir = await getReportDirectory(); - this.outputFile = path.join(reportDir, "permission-set-groups.csv"); + this.outputFile = path.join(reportDir, 'permission-set-groups.csv'); } else { // Ensure directories to provided --outputfile are existing await fs.ensureDir(path.dirname(this.outputFile)); } try { - const csvText = csvLines.map((e) => e.join(",")).join("\n"); - await fs.writeFile(this.outputFile, csvText, "utf8"); - uxLog(this, c.cyan(`Permission set groups CSV file generated in ${c.bold(c.green(this.outputFile))}`)); + const csvText = csvLines.map((e) => e.join(',')).join('\n'); + await fs.writeFile(this.outputFile, csvText, 'utf8'); + uxLog("action", this, c.cyan(`Permission set groups CSV file generated in ${c.bold(c.green(this.outputFile))}`)); // Trigger command to open CSV file in VsCode extension WebSocketClient.requestOpenFile(this.outputFile); - } catch (e) { - uxLog(this, c.yellow("Error while generating CSV log file:\n" + e.message + "\n" + e.stack)); + } catch (e: any) { + uxLog("warning", this, c.yellow('Error while generating CSV log file:\n' + (e as Error).message + '\n' + e.stack)); this.outputFile = null; } // Build markdown file - const mdPsg = ["# Permission set groups", "", "", ""]; + const mdPsg = ['# Permission set groups', '', '', '']; for (const psg of psgList) { - mdPsg.push(...[`## ${psg.name}`, "", psg.label, "", psg.description, ""]); + mdPsg.push(...[`## ${psg.name}`, '', psg.label, '', psg.description, '']); for (const psName of psg.permissionSetsNames) { mdPsg.push(` - ${psName} `); } - mdPsg.push(""); + mdPsg.push(''); } - const docFile = "docs/permission-set-groups.md"; - await fs.ensureDir("docs"); - let mdPsgText = mdPsg.join("\n"); - mdPsgText = toc.insert(mdPsgText); - await fs.writeFile(docFile, mdPsgText, "utf8"); - uxLog(this, c.cyan(`Permission set groups Markdown file generated in ${c.bold(c.green(docFile))}`)); + const docFile = 'docs/permission-set-groups.md'; + await fs.ensureDir('docs'); + const mdPsgText = mdPsg.join('\n'); + // mdPsgText = toc.insert(mdPsgText); + await fs.writeFile(docFile, mdPsgText, 'utf8'); + uxLog("action", this, c.cyan(`Permission set groups Markdown file generated in ${c.bold(c.green(docFile))}`)); // Trigger command to open CSV file in VsCode extension WebSocketClient.requestOpenFile(docFile); // Return an object to be displayed with --json - return { outputString: "Permission set groups Documentation generated" }; + return { outputString: 'Permission set groups Documentation generated' }; } } diff --git a/src/commands/hardis/doc/fieldusage.ts b/src/commands/hardis/doc/fieldusage.ts new file mode 100644 index 000000000..212bf64a8 --- /dev/null +++ b/src/commands/hardis/doc/fieldusage.ts @@ -0,0 +1,198 @@ +import { requiredOrgFlagWithDeprecations, SfCommand } from '@salesforce/sf-plugins-core'; +import { Flags } from '@salesforce/sf-plugins-core'; +import { Connection } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import sortArray from 'sort-array'; +import { generateReports, uxLog, uxLogTable } from '../../../common/utils/index.js'; +import { soqlQuery, soqlQueryTooling } from '../../../common/utils/apiUtils.js'; + +export default class HardisDocFieldusage extends SfCommand { + + public static flags: any = { + 'target-org': requiredOrgFlagWithDeprecations, + 'sObjects': Flags.string({ + char: 's', + description: 'Comma-separated list of sObjects to filter', + required: false, + }), + }; + + public static description = ` +## Command Behavior + +**Retrieves and displays the usage of custom fields within a Salesforce org, based on metadata dependencies.** + +This command helps identify where custom fields are referenced across various metadata components in your Salesforce environment. It's particularly useful for impact analysis before making changes to fields, or for understanding the complexity and interconnectedness of your Salesforce customizations. + +- **Targeted sObjects:** You can specify a comma-separated list of sObjects (e.g., \`Account,Contact\`) to narrow down the analysis to relevant objects. If no sObjects are specified, it will analyze all customizable sObjects. +- **Usage Details:** For each custom field, the command lists the metadata components (e.g., Apex Classes, Visualforce Pages, Flows, Reports) that reference it, along with their types and names. + +!['Find custom fields usage'](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/doc-fieldusage.png) + +
+Technical explanations + +The command operates by querying Salesforce's Tooling API and Metadata Component Dependency API: + +- **sObject Retrieval:** It first queries \`EntityDefinition\` to get a list of customizable sObjects, optionally filtered by the user's input. +- **Custom Field Identification:** For each identified sObject, it queries \`CustomField\` to retrieve all custom fields associated with it. +- **Dependency Lookup:** The core of the command involves querying \`MetadataComponentDependency\` using the IDs of the custom fields. This API provides information about which other metadata components depend on the specified fields. +- **Data Aggregation & Reporting:** The retrieved data is then processed and formatted into a tabular output, showing the sObject name, field name, field type, dependency type, and dependency name. The results are also generated into various report formats (e.g., CSV, JSON) for further analysis. +- **SOQL Queries:** It uses \`soqlQuery\` and \`soqlQueryTooling\` utilities to execute SOQL queries against the Salesforce org. +
+`; + + public static examples = [ + '$ sf hardis:doc:fieldusage', + '$ sf hardis:doc:fieldusage --sObjects Account,Contact,Opportunity', + '$ sf hardis:doc:fieldusage --target-org myOrgAlias --sObjects CustomObject__c' + ]; + + public async querySObjects(connection: Connection, sObjectsFilter?: string[]) { + let sObjectsQuery = ` + SELECT Id, DeveloperName, PublisherId, IsCustomizable, IsCustomSetting + FROM EntityDefinition + WHERE IsCustomizable = true + `; + + if (sObjectsFilter && sObjectsFilter.length > 0) { + const sObjectsList = sObjectsFilter + .map(sObject => sObject.trim().replace(/__c$/, '')) + .map(sObject => `'${sObject}'`) + .join(','); + + sObjectsQuery += ` AND DeveloperName IN (${sObjectsList})`; + } + + const sObjectResults = await soqlQuery(sObjectsQuery, connection); + uxLog("other", this, `Found ${sObjectResults.records.length} sObjects.`); + return sObjectResults; + } + + public async getFilteredSObjects(connection: Connection, sObjectsFilter?: string[]) { + const sObjectResults = await this.querySObjects(connection, sObjectsFilter); + const sObjectsDict: Record = {}; + + sObjectResults.records.forEach((record) => { + if (!record.DeveloperName.endsWith('__Share') && !record.DeveloperName.endsWith('__ChangeEvent')) { + sObjectsDict[record.DeveloperName] = { + publisherId: record.PublisherId, + fields: [] + }; + } + }); + + return sObjectsDict; + } + + public async queryCustomFields(connection: Connection, sObjectName: string) { + uxLog("other", this, `Extracting fields for sObject: ${sObjectName}`); + const queryTooling = ` + SELECT Id, DeveloperName + FROM CustomField + WHERE EntityDefinition.DeveloperName = '${sObjectName}' + `; + const fieldResults = await soqlQueryTooling(queryTooling, connection); + return fieldResults; + } + + public async queryMetadataComponentDependency(connection: Connection, fieldIds: string[]) { + const metadataQuery = ` + SELECT MetadataComponentId, MetadataComponentType, MetadataComponentName, RefMetadataComponentName, RefMetadataComponentId + FROM MetadataComponentDependency + WHERE RefMetadataComponentId IN (${fieldIds.join(',')}) + `; + const dependencyResults = await soqlQueryTooling(metadataQuery, connection); + + return dependencyResults; + } + + public async run(): Promise { + const { flags } = await this.parse(HardisDocFieldusage); + const connection = flags['target-org'].getConnection(); + + const sObjectsFilter = flags['sObjects'] ? flags['sObjects'].split(',').map(s => s.trim()) : undefined; + + const sObjectsDict = await this.getFilteredSObjects(connection, sObjectsFilter); + + const fieldQueries = Object.keys(sObjectsDict).map(async (sObjectName) => { + const fieldResults = await this.queryCustomFields(connection, sObjectName); + if (fieldResults.records.length > 0) { + fieldResults.records.forEach((field) => { + sObjectsDict[sObjectName].fields.push({ + id: field.Id, + name: field.DeveloperName, + type: "custom", + usedIn: [] + }); + }); + } + }); + await Promise.all(fieldQueries); + + const dependencyQueries = Object.entries(sObjectsDict).map(async ([sObjectName, { fields }]) => { + if (fields.length === 0) { + uxLog("other", this, `sObject ${sObjectName} does not have any custom fields, skipping dependencies.`); + return; + } + + uxLog("other", this, `Retrieving dependencies for sObject: ${sObjectName}`); + + const fieldIds = fields.map((field) => `'${field.id}'`); + const dependencyResults = await this.queryMetadataComponentDependency(connection, fieldIds); + + dependencyResults.records.forEach((dep) => { + const field = fields.find(f => f.id === dep.RefMetadataComponentId); + if (field) { + field.usedIn.push({ id: dep.MetadataComponentId, type: dep.MetadataComponentType, name: dep.MetadataComponentName }); + } + }); + }); + await Promise.all(dependencyQueries); + + const columns = [ + { key: 'sObjectName', header: 'sObject Name' }, + { key: 'fieldName', header: 'Field Name' }, + { key: 'fieldType', header: 'Field Type' }, + { key: 'dependencyType', header: 'Dependency Type' }, + { key: 'dependencyName', header: 'Dependency Name' } + ]; + + const rows: any[] = []; + + for (const [sObjectName, { fields }] of Object.entries(sObjectsDict)) { + fields.forEach((field) => { + field.usedIn.forEach((dep) => { + const row = {}; + row[columns[0].key] = sObjectName; + row[columns[1].key] = field.name; + row[columns[2].key] = field.type; + row[columns[3].key] = dep.type; + row[columns[4].key] = dep.name; + + rows.push(row); + }); + }); + } + + const resultSorted = sortArray(rows, { + by: [columns[0].key, columns[1].key, columns[3].key], + order: ['asc', 'asc', 'asc'], + }); + + uxLog("action", this, c.cyan(`Found ${resultSorted.length} custom fields usage records.`)); + uxLogTable(this, rows); + + const reportFiles = await generateReports(resultSorted, columns, this, { + logFileName: 'fields-usage', + logLabel: 'Find fields usage', + }); + + return { + outputString: 'Processed fieldusage doc', + result: resultSorted, + reportFiles, + }; + } +} diff --git a/src/commands/hardis/doc/flow2markdown.ts b/src/commands/hardis/doc/flow2markdown.ts new file mode 100644 index 000000000..4fc218232 --- /dev/null +++ b/src/commands/hardis/doc/flow2markdown.ts @@ -0,0 +1,164 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import c from "chalk"; +import * as path from "path"; +import fs from "fs-extra"; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { isCI, uxLog } from '../../../common/utils/index.js'; +import { MetadataUtils } from '../../../common/metadata-utils/index.js'; +import { generateFlowMarkdownFile, generateHistoryDiffMarkdown, generateMarkdownFileWithMermaid } from '../../../common/utils/mermaidUtils.js'; +import { CONSTANTS } from '../../../config/index.js'; +import { setConnectionVariables } from '../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class Flow2Markdown extends SfCommand { + public static title = 'Flow to Markdown'; + + public static description = ` +## Command Behavior + +**Generates comprehensive Markdown documentation from a Salesforce Flow metadata file.** + +This command automates the creation of human-readable documentation for Salesforce Flows, making it easier to understand their logic and behavior. It can process a single Flow file or multiple Flow files, generating a Markdown file for each. + +Key features include: + +- **Detailed Flow Description:** Extracts and presents the Flow's structure, elements, and decision logic in a clear, organized Markdown format. +- **AI-Powered Summarization (Optional):** If [AI integration](${CONSTANTS.DOC_URL_ROOT}/salesforce-ai-setup/) is configured, the documentation will include an AI-generated summary of the Flow's purpose and functionality. +- **Mermaid Diagram Generation:** Integrates with Mermaid to visualize the Flow's structure, providing a graphical representation alongside the textual description. +- **History Diff (Optional):** Can generate a Markdown file showing the historical differences of the Flow, useful for tracking changes over time. +- **PDF Export (Optional):** Allows for the generation of the documentation in PDF format for easy sharing and archiving. +- **Interactive File Selection:** If no input file is specified, the command interactively prompts the user to select Flow files. + +
+Technical explanations + +The command leverages several internal utilities and external libraries to achieve its functionality: + +- **Flow Metadata Parsing:** Reads and parses the XML content of Salesforce Flow metadata files (.flow-meta.xml). +- **Markdown Generation:** Utilizes exttt{generateFlowMarkdownFile} to transform the parsed Flow data into a structured Markdown format. +- **Mermaid Integration:** Employs exttt{generateMarkdownFileWithMermaid} to embed Mermaid diagrams within the Markdown output, which are then rendered by compatible Markdown viewers. +- **AI Integration:** If enabled, it interacts with an AI service (via exttt{describeWithAi} option) to generate a high-level summary of the Flow. +- **Git History Analysis:** For the \ +--with-history\ + flag, it uses exttt{generateHistoryDiffMarkdown} to analyze Git history and present changes to the Flow. +- **File System Operations:** Uses exttt{fs-extra} for file system operations like reading input files, creating output directories (e.g., exttt{docs/flows/}), and writing Markdown and PDF files. +- **Salesforce CLI Integration:** Uses exttt{@salesforce/sf-plugins-core} for command-line parsing and exttt{setConnectionVariables} for Salesforce organization context. +- **WebSocket Communication:** Interacts with a WebSocket client ( exttt{WebSocketClient.requestOpenFile}) to open the generated Markdown file in a VS Code tab, enhancing user experience. +
+`; + + public static examples = [ + '$ sf hardis:doc:flow2markdown', + '$ sf hardis:doc:flow2markdown --inputfile force-app/main/default/flows/MyFlow.flow-meta.xml', + '$ sf hardis:doc:flow2markdown --pdf', + '$ sf hardis:doc:flow2markdown --inputfile force-app/main/default/flows/MyFlow.flow-meta.xml --pdf', + ]; + + public static flags: any = { + inputfile: Flags.string({ + char: 'x', + description: 'Path to Flow metadata file. If not specified, the command will prompt the user', + }), + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output markdown file. Must end with .md', + }), + "with-history": Flags.boolean({ + default: false, + description: "Generate a markdown file with the history diff of the Flow", + }), + pdf: Flags.boolean({ + description: 'Also generate the documentation in PDF format', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + "target-org": optionalOrgFlagWithDeprecations + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = false; + + protected withHistory = false; + protected withPdf = false; + protected singleFileMode = false; + protected inputFiles; + protected outputFile; + protected debugMode = false; + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(Flow2Markdown); + const outputFiles: string[] = []; + + this.inputFiles = flags.inputfile ? [flags.inputfile] : null; + this.outputFile = flags.outputfile || null; + this.withHistory = flags["with-history"] === true ? true : false; + this.withPdf = flags.pdf === true ? true : false; + this.singleFileMode = this.inputFiles != null && this.inputFiles.length == 1; + this.debugMode = flags.debug || false; + await setConnectionVariables(flags['target-org']?.getConnection(), true); // Required for some notifications providers like Email, or for Agentforce + + + if (this.inputFiles === null && !isCI) { + this.inputFiles = await MetadataUtils.promptMultipleFlows(); + } + + if (this.singleFileMode && !this.outputFile) { + await fs.ensureDir(path.join("docs", "flows")); + this.outputFile = path.join("docs", "flows", path.basename(this.inputFiles[0]).replace(".flow-meta.xml", ".md")); + } + + for (const inputFile of this.inputFiles) { + let outputFile = this.outputFile; + if (!this.singleFileMode) { + await fs.ensureDir(path.join("docs", "flows")); + outputFile = path.join("docs", "flows", path.basename(inputFile).replace(".flow-meta.xml", ".md")); + } + const flowName = path.basename(inputFile, ".flow-meta.xml"); + + uxLog("log", this, c.grey(`Generating markdown for Flow ${inputFile}...`)); + const flowXml = (await fs.readFile(inputFile, "utf8")).toString(); + const genRes = await generateFlowMarkdownFile(flowName, flowXml, outputFile, { collapsedDetails: false, describeWithAi: true, flowDependencies: {} }); + if (!genRes) { + throw new Error("Error generating markdown file"); + } + if (this.debugMode) { + await fs.copyFile(outputFile, outputFile.replace(".md", ".mermaid.md")); + } + const gen2res = await generateMarkdownFileWithMermaid(outputFile, outputFile, null, this.withPdf); + if (!gen2res) { + throw new Error("Error generating mermaid markdown file"); + } + + if (this.withHistory) { + try { + await generateHistoryDiffMarkdown(inputFile, this.debugMode); + } catch (e: any) { + uxLog("warning", this, c.yellow(`Error generating history diff markdown: ${e.message}`)); + } + } + + // Open file in a new VsCode tab if available + WebSocketClient.requestOpenFile(outputFile); + outputFiles.push(outputFile); + + } + // Return an object to be displayed with --json + return { outputFiles: outputFiles }; + } + +} diff --git a/src/commands/hardis/doc/mkdocs-to-cf.ts b/src/commands/hardis/doc/mkdocs-to-cf.ts new file mode 100644 index 000000000..ec47382dc --- /dev/null +++ b/src/commands/hardis/doc/mkdocs-to-cf.ts @@ -0,0 +1,284 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import fs from 'fs-extra'; +import c from "chalk"; +import * as path from "path"; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import Cloudflare from 'cloudflare'; +import { execCommand, getCurrentGitBranch, uxLog } from '../../../common/utils/index.js'; + +import { CONSTANTS, getEnvVar } from '../../../config/index.js'; +import which from 'which'; +import { generateMkDocsHTML } from '../../../common/docBuilder/docUtils.js'; + + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class MkDocsToCloudflare extends SfCommand { + public static title = 'MkDocs to Cloudflare'; + + public static description = `\ +## Command Behavior\ +\ +**Generates MkDocs HTML pages and uploads them to Cloudflare as a static site, secured with Cloudflare Access.**\ +\ +This command automates the deployment of your project's documentation (built with MkDocs) to Cloudflare Pages, making it accessible and secure. It handles the entire process from HTML generation to Cloudflare configuration.\ +\ +Key operations performed:\ +\ +- **MkDocs HTML Generation:** Builds the MkDocs project into static HTML pages. It can use a locally installed \`mkdocs-material\` or a \`mkdocs\` Docker image.\ +- **Cloudflare Pages Project Creation/Update:** Creates a new Cloudflare Pages project if one doesn't exist for your documentation, or updates an existing one.\ +- **Cloudflare Access Policy Assignment:** Assigns a policy to restrict access to the deployed application, ensuring only authorized users can view your documentation.\ +- **Cloudflare Access Application Setup:** Configures a Cloudflare Access application for the deployed site, integrating it with your Zero Trust policies.\ +- **HTML Page Upload:** Deploys the generated HTML pages to Cloudflare Pages.\ +- **Browser Opening (Non-CI):** Opens the newly deployed website in your default browser if the command is not run in a CI/CD environment.\ +\ +**Prerequisite:** The documentation must have been previously generated using \`sf hardis:doc:project2markdown --with-history\`.\ +\ +**Customization:** You can override default styles by customizing your \`mkdocs.yml\` file.\ +\ +More information can be found in the [Documentation section](${CONSTANTS.DOC_URL_ROOT}/salesforce-project-documentation/).\ +\ +**Environment Variables for Cloudflare Configuration:**\ +\ +| Variable | Description | Default |\ +| :---------------------------------------- | :----------------------------------------------------------------------- | :------------------------------------: |\ +| \`CLOUDFLARE_EMAIL\` | Cloudflare account email | _Required_ |\ +| \`CLOUDFLARE_API_TOKEN\` | Cloudflare API token | _Required_ |\ +| \`CLOUDFLARE_ACCOUNT_ID\` | Cloudflare account ID | _Required_ |\ +| \`CLOUDFLARE_PROJECT_NAME\` | Project name, also used for the site URL | Built from Git branch name |\ +| \`CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE\` | Cloudflare default login method type | \`onetimepin\` |\ +| \`CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN\` | Cloudflare default access email domain | \`@cloudity.com\` |\ +| \`CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST\` | Comma-separated list of additional policy IDs to assign to the application | _Optional_ |\ +\ +
+Technical explanations\ +\ +The command orchestrates interactions with MkDocs, Cloudflare APIs, and Git:\ +\ +- **MkDocs Integration:** It calls \`generateMkDocsHTML()\` to execute the MkDocs build process, which converts Markdown files into static HTML. It checks for the presence of \`mkdocs.yml\` to ensure it's a valid MkDocs project.\ +- **Cloudflare API Interaction:** It uses the \`cloudflare\` npm package to interact with the Cloudflare API. This involves:\ + - **Authentication:** Initializes the Cloudflare client using \`CLOUDFLARE_EMAIL\`, \`CLOUDFLARE_API_TOKEN\`, and \`CLOUDFLARE_ACCOUNT_ID\` environment variables.\ + - **Pages Project Management:** Calls \`client.pages.projects.get()\` to check for an existing project and \`client.pages.projects.create()\` to create a new one if needed.\ + - **Access Policy Management:** Lists existing access policies (\`client.zeroTrust.access.policies.list()\`) and creates a new one (\`client.zeroTrust.access.policies.create()\`) if the required policy doesn't exist. It configures the policy with email domain restrictions and a default login method.\ + - **Access Application Management:** Lists existing access applications (\`client.zeroTrust.access.applications.list()\`) and creates a new one (\`client.zeroTrust.access.applications.create()\`) for the deployed site. It then updates the application to associate it with the created access policy.\ +- **Git Integration:** Retrieves the current Git branch name using \`getCurrentGitBranch()\` to construct the Cloudflare project name and branch for deployment.\ +- **Wrangler CLI:** Uses the \`wrangler\` CLI (Cloudflare's developer tool) to deploy the generated HTML pages to Cloudflare Pages via \`wrangler pages deploy\`.\ +- **Environment Variable Management:** Reads various environment variables to configure Cloudflare settings and project names.\ +- **Error Handling:** Includes checks for missing \`mkdocs.yml\` and Cloudflare environment variables, throwing \`SfError\` when necessary.\ +
+`; + + public static examples = [ + '$ sf hardis:doc:mkdocs-to-cf', + ]; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }) + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + protected debugMode = false; + protected apiEmail: string | undefined; + protected apiToken: string | undefined; + protected accountId: string | undefined; + protected client: Cloudflare; + protected projectName: string | null; + protected currentGitBranch: string; + protected defaultLoginMethodType: string = process.env.CLOUDFLARE_DEFAULT_LOGIN_METHOD_TYPE || "onetimepin"; + protected defaultAccessEmailDomain: string = process.env.CLOUDFLARE_DEFAULT_ACCESS_EMAIL_DOMAIN || "@cloudity.com"; + protected pagesProjectName: string; + protected pagesProject: Cloudflare.Pages.Projects.Project; + protected accessPolicyName: string; + protected accessPolicy: Cloudflare.ZeroTrust.Access.Policies.PolicyGetResponse | null; + protected extraPolicyIds: string[] = (process.env.CLOUDFLARE_EXTRA_ACCESS_POLICY_ID_LIST || "").split(",").filter(p => p); + protected accessApp: Cloudflare.ZeroTrust.Access.Applications.ApplicationGetResponse.SelfHostedApplication | null; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(MkDocsToCloudflare); + this.debugMode = flags.debug || false; + + // Check if the project is a MkDocs project + const mkdocsYmlFile = path.join(process.cwd(), "mkdocs.yml"); + if (!fs.existsSync(mkdocsYmlFile)) { + throw new SfError('This command needs a mkdocs.yml config file. Generate one using "sf hardis:doc:project2markdown --with-history"'); + } + + this.currentGitBranch = await getCurrentGitBranch() || "main"; + this.projectName = await this.getProjectName(); + this.pagesProjectName = `sfdoc-${this.projectName}`; + this.accessPolicyName = `access-policy-${this.projectName}`; + + // Create connection to Cloudflare + this.setupCloudflareClient(); + + // Generate HTML pages + if ((process.env?.SKIP_BUILD_HTML || "false") !== "true") { + await generateMkDocsHTML(); + } + + // Get or Create Cloudflare Pages project + await this.ensureCloudflarePagesProject(); + + // Ensure there is a policy restricting access to the application + await this.ensureCloudflareAccessPolicy(); + + // Ensure there is an access application + await this.ensureCloudflareAccessApplication(); + + // Ensure the access application has the right policy + await this.ensureCloudflareAccessApplicationPolicy(); + + // Upload pages + await this.uploadHtmlPages(); + + return { success: true }; + } + + // Search first for CLOUDFLARE_PROJECT_NAME_ env var, then CLOUDFLARE_PROJECT_NAME, then git branch name + // If none of them is found, use the default project name + private async getProjectName(): Promise { + const defaultProjectName = (getEnvVar('CLOUDFLARE_PROJECT_NAME') || this.currentGitBranch).replace(/\//g, "-").toLowerCase(); + const promptsLanguage = getEnvVar('PROMPTS_LANGUAGE') || 'en'; + const languageScopedProjectVariableName = `CLOUDFLARE_PROJECT_NAME_${promptsLanguage?.toUpperCase()}`; + if (getEnvVar(languageScopedProjectVariableName)) { + return getEnvVar(languageScopedProjectVariableName) || defaultProjectName; + } + return defaultProjectName + } + + private setupCloudflareClient() { + this.apiEmail = process.env.CLOUDFLARE_EMAIL; + this.apiToken = process.env.CLOUDFLARE_API_TOKEN; + this.accountId = process.env.CLOUDFLARE_ACCOUNT_ID; + if (!this.apiEmail || !this.accountId || !this.apiToken) { + throw new Error('Missing CLOUDFLARE_EMAIL or CLOUDFLARE_API_TOKEN or CLOUDFLARE_ACCOUNT_ID'); + } + this.client = new Cloudflare({ + apiEmail: this.apiEmail, + apiToken: this.apiToken, + }); + uxLog("log", this, c.grey("Cloudflare client info found")); + } + + private async ensureCloudflarePagesProject() { + uxLog("action", this, c.cyan("Checking Cloudflare Pages project...")); + try { + this.pagesProject = await this.client.pages.projects.get(this.pagesProjectName, { account_id: this.accountId || "" }); + uxLog("action", this, c.cyan("Cloudflare Pages project found: " + this.pagesProjectName)); + } catch (e: any) { + uxLog("log", this, c.grey(e.message)); + this.pagesProject = await this.client.pages.projects.create({ + name: this.pagesProjectName, + account_id: this.accountId || "", + production_branch: this.currentGitBranch || "main", + }); + uxLog("success", this, c.green("Cloudflare Pages project created: " + this.pagesProjectName)); + } + uxLog("log", this, c.grey(JSON.stringify(this.pagesProject, null, 2))); + } + + private async ensureCloudflareAccessPolicy() { + uxLog("action", this, c.cyan("Checking Cloudflare Access policy...")); + const accessPolicies = await this.client.zeroTrust.access.policies.list({ account_id: this.accountId || "" }); + this.accessPolicy = accessPolicies.result.find((p: Cloudflare.ZeroTrust.Access.Policies.PolicyGetResponse) => p.name === this.accessPolicyName) || null; + if (this.accessPolicy) { + uxLog("action", this, c.cyan("Cloudflare policy found: " + this.accessPolicyName)); + } + else { + const loginMethods = await this.client.zeroTrust.identityProviders.list({ account_id: this.accountId || "" }); + const defaultLoginMethod = loginMethods.result.find((m: Cloudflare.ZeroTrust.IdentityProviders.IdentityProviderListResponse) => m.type === this.defaultLoginMethodType); + if (!defaultLoginMethod) { + throw new SfError(`No login method of type ${this.defaultLoginMethodType} found in Cloudflare account. Please create one in Zero Trust/Settings before running this command`); + } + this.accessPolicy = await this.client.zeroTrust.access.policies.create({ + name: this.accessPolicyName, + account_id: this.accountId || "", + decision: "allow", + include: [ + { email_domain: { domain: this.defaultAccessEmailDomain } }, + ], + require: [ + { login_method: { id: defaultLoginMethod.id } } + ], + } as any); + uxLog("success", this, c.green("Cloudflare policy created: " + this.accessPolicyName)); + } + uxLog("log", this, c.grey(JSON.stringify(this.accessPolicy, null, 2))); + } + + private async ensureCloudflareAccessApplication() { + uxLog("action", this, c.cyan("Checking Cloudflare access application...")); + const accessApplications = await this.client.zeroTrust.access.applications.list({ account_id: this.accountId || "" }); + this.accessApp = (accessApplications.result.find((a: Cloudflare.ZeroTrust.Access.Applications.ApplicationListResponse) => a.name === this.pagesProject?.domains?.[0]) || null) as any; + if (this.accessApp) { + uxLog("action", this, c.cyan("Cloudflare access application found: " + this.pagesProject?.domains?.[0])); + } + else { + this.accessApp = (await this.client.zeroTrust.access.applications.create({ + name: this.pagesProject?.domains?.[0], + account_id: this.accountId || "", + type: "self_hosted", + domain: this.pagesProject?.domains?.[0], + destinations: [ + { + "type": "public", + "uri": `${this.pagesProject?.domains?.[0]}` + }, + { + "type": "public", + "uri": `*.${this.pagesProject?.domains?.[0]}` + } + ] + }) as Cloudflare.ZeroTrust.Access.Applications.ApplicationGetResponse.SelfHostedApplication); + uxLog("success", this, c.green("Cloudflare access application created: " + this.pagesProject?.domains?.[0])); + } + uxLog("log", this, c.grey(JSON.stringify(this.accessApp, null, 2))); + } + + private async ensureCloudflareAccessApplicationPolicy() { + uxLog("action", this, c.cyan("Checking Cloudflare access application policy...")); + if (this.accessApp?.policies?.length && this.accessApp.policies.find(p => p.id === this.accessPolicy?.id)) { + uxLog("action", this, c.cyan(`Access Application ${this.accessApp.name} already has the policy ${this.accessPolicy?.name}`)); + } + else { + const policiesWithExtra = this.extraPolicyIds.concat([this.accessPolicy?.id || ""]).filter(p => p); + this.accessApp = (await this.client.zeroTrust.access.applications.update(this.accessApp?.id || "", { + account_id: this.accountId, + domain: this.accessApp?.domain, + destinations: this.accessApp?.destinations, + type: this.accessApp?.type, + policies: policiesWithExtra, + } as Cloudflare.ZeroTrust.Access.ApplicationUpdateParams)) as Cloudflare.ZeroTrust.Access.Applications.ApplicationGetResponse.SelfHostedApplication; + uxLog("success", this, c.green(`Access Application ${this.accessApp?.name} updated with the policy ${this.accessPolicy?.name}`)); + } + uxLog("log", this, c.grey(JSON.stringify(this.accessApp, null, 2))); + } + + private async uploadHtmlPages() { + uxLog("action", this, c.cyan("Uploading HTML pages to Cloudflare Pages...")); + let wranglerCommand = `wrangler pages deploy ./site --project-name="${this.pagesProjectName}" --branch=${this.currentGitBranch}`; + const isWranglerAvailable = await which("wrangler", { nothrow: true }); + if (!isWranglerAvailable) { + wranglerCommand = "npx --yes " + wranglerCommand; + } + await execCommand(wranglerCommand, this, { fail: true, output: true, debug: this.debugMode }); + } + +} diff --git a/src/commands/hardis/doc/mkdocs-to-salesforce.ts b/src/commands/hardis/doc/mkdocs-to-salesforce.ts new file mode 100644 index 000000000..93deb4c62 --- /dev/null +++ b/src/commands/hardis/doc/mkdocs-to-salesforce.ts @@ -0,0 +1,268 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import fs, { ensureDir } from 'fs-extra'; +import c from "chalk"; +import * as path from "path"; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { createTempDir, execCommand, isCI, uxLog } from '../../../common/utils/index.js'; +import { createBlankSfdxProject } from '../../../common/utils/projectUtils.js'; +import { initPermissionSetAssignments, isProductionOrg } from '../../../common/utils/orgUtils.js'; +import { CONSTANTS } from '../../../config/index.js'; +import { generateMkDocsHTML, readMkDocsFile, writeMkDocsFile } from '../../../common/docBuilder/docUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class MkDocsToSalesforce extends SfCommand { + public static title = 'MkDocs to Salesforce'; + + public static description = ` +## Command Behavior\ + +**Generates MkDocs HTML pages and deploys them to a Salesforce org as a static resource, Visualforce page, and Custom Tab.** + +This command provides a convenient way to host your project's documentation directly within Salesforce, making it easily accessible to users. It automates the entire process of converting your MkDocs project into a deployable Salesforce package. + +Key operations performed: + +- **MkDocs HTML Generation:** Builds the MkDocs project into static HTML pages. It can use a locally installed \`mkdocs-material\` or a \`mkdocs\` Docker image. +- **Salesforce Metadata Creation:** Creates the necessary Salesforce metadata components: + - A **Static Resource** to store the generated HTML, CSS, and JavaScript files. + - A **Visualforce Page** that embeds the static resource, allowing it to be displayed within Salesforce. + - A **Custom Tab** to provide a user-friendly entry point to the documentation from the Salesforce navigation. + - A **Permission Set** to grant access to the Visualforce page and Custom Tab. +- **Metadata Deployment:** Deploys these newly created metadata components to the specified Salesforce org. +- **Permission Set Assignment:** Assigns the newly created permission set to the current user, ensuring immediate access to the documentation. +- **Browser Opening (Non-CI):** Opens the Custom Tab in your default browser if the command is not run in a CI/CD environment. + +**Prerequisite:** The documentation must have been previously generated using \`sf hardis:doc:project2markdown --with-history\`. + +**Customization:** + +- You can specify the type of documentation to generate (e.g., \`CICD\` or \`Monitoring\`) using the \`--type\` flag. The default is \`CICD\`. +- You can override default styles by customizing your \`mkdocs.yml\` file. + +More information can be found in the [Documentation section]($\{CONSTANTS.DOC_URL_ROOT}/salesforce-project-documentation/).\ + +
+Technical explanations + +The command orchestrates interactions with MkDocs, Salesforce CLI, and file system operations: + +- **MkDocs Integration:** It first modifies the \`mkdocs.yml\` file to ensure compatibility with Salesforce static resources (e.g., setting \`use_directory_urls\` to \`false\`). Then, it calls \`generateMkDocsHTML()\` to build the static HTML content. +- **Temporary SFDX Project:** It creates a temporary SFDX project using \`createTempDir\` and \`createBlankSfdxProject\` to stage the generated Salesforce metadata before deployment. +- **Metadata Generation:** It dynamically creates the XML metadata files for the Static Resource, Visualforce Page, Custom Tab, and Permission Set. The HTML content from the MkDocs build is moved into the static resource folder. +- **Salesforce CLI Deployment:** It constructs and executes a \`sf project deploy start\` command to deploy the generated metadata to the target Salesforce org. It intelligently adds \`--test-level RunLocalTests\` for production orgs and \`--test-level NoTestRun\` for sandboxes. +- **Permission Set Assignment:** After successful deployment, it calls \`initPermissionSetAssignments\` to assign the newly created permission set to the current user. +- **Browser Launch:** For non-CI environments, it uses \`execCommand\` to open the deployed Custom Tab in the user's default browser. +- **Error Handling and Cleanup:** It includes error handling for deployment failures (e.g., static resource size limits) and ensures that the \`mkdocs.yml\` file is restored to its original state after execution. +- **File System Operations:** It extensively uses \`fs-extra\` for file manipulation, including creating directories, moving files, and writing XML content. +
+`; + + public static examples = [ + '$ sf hardis:doc:mkdocs-to-salesforce', + ]; + + public static flags: any = { + type: Flags.string({ + char: 't', + options: ["CICD", "Monitoring"], + default: "CICD", + description: 'Type of the documentation to generate. Default is "all"', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + protected debugMode = false; + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(MkDocsToSalesforce); + const type = flags.type || "CICD"; + const targetUsername = flags['target-org'].getUsername(); + const conn = flags['target-org'].getConnection(); + this.debugMode = flags.debug || false; + + // Check if the project is a MkDocs project + const mkdocsYmlFile = path.join(process.cwd(), "mkdocs.yml"); + if (!fs.existsSync(mkdocsYmlFile)) { + throw new SfError('This command needs a mkdocs.yml config file. Generate one using "sf hardis:doc:project2markdown --with-history"'); + } + + // Update mkdocs.yml so it is compliant with being hosted in a static resource + const mkdocsYml: any = readMkDocsFile(mkdocsYmlFile); + const mkdocsYmlCopy = Object.assign({}, mkdocsYml); + mkdocsYmlCopy.use_directory_urls = false; + await writeMkDocsFile(mkdocsYmlFile, mkdocsYmlCopy); + + try { + // Generate HTML pages + await generateMkDocsHTML(); + + // Create temp sfdx project + const tmpDirForSfdxProject = await createTempDir(); + const tmpProjectPath = await createBlankSfdxProject(tmpDirForSfdxProject); + const defaultProjectPath = path.join(tmpProjectPath, "force-app", "main", "default"); + + // Create static resource folder + const resName = `SfdxHardis_MkDocsSite_${type}`; + const { mkDocsResourcePath, vfPageMetaFile, tabMetaFile, permissionSetFile } = await this.createDocMetadatas(resName, defaultProjectPath, type); + + // Upload resource to remote org + const deployRes = await this.uploadDocMetadatas(resName, targetUsername, conn, tmpProjectPath, mkDocsResourcePath, vfPageMetaFile, tabMetaFile, permissionSetFile); + + const success = deployRes.status === 0; + + if (success) { + // Assign current user to newly created permission set + await initPermissionSetAssignments([resName], targetUsername); + } + + if (success && !isCI) { + uxLog("action", this, c.cyan(`Opening the Custom Tab ${c.green(resName)} in your default browser...`)); + uxLog("warning", this, c.yellow(`If you have an access issue, make sure the tab ${resName} is not hidden on your Profile...`)); + const sfPath = `lightning/n/${resName}`; + await execCommand(`sf org open --path ${sfPath} --target-org ${targetUsername}`, this, { fail: false, output: true, debug: this.debugMode }); + } + // Restore previous mkdocs.yml version + await writeMkDocsFile(mkdocsYmlFile, mkdocsYml); + return { success: success }; + } catch (e) { + // Restore previous mkdocs.yml version + await writeMkDocsFile(mkdocsYmlFile, mkdocsYml); + throw e; + } + } + + private async createDocMetadatas(resName: string, defaultProjectPath: string, type: any) { + uxLog("action", this, c.cyan(`Creating Static Resource ${resName} metadata...`)); + const staticResourcePath = path.join(defaultProjectPath, "staticresources"); + const mkDocsResourcePath = path.join(staticResourcePath, resName); + await ensureDir(mkDocsResourcePath); + await fs.move(path.join(process.cwd(), "site"), mkDocsResourcePath, { overwrite: true }); + + // Create Static resource metadata + uxLog("action", this, c.cyan(`Creating Static Resource ${resName} metadata...`)); + const mkDocsResourceFileName = path.join(staticResourcePath, `${resName}.resource-meta.xml`); + const mkDocsResourceMeta = ` + + Private + application/x-zip-compressed + +`; + await fs.writeFile(mkDocsResourceFileName, mkDocsResourceMeta); + + // Create visual force page + uxLog("action", this, c.cyan(`Creating VisualForce page ${resName} metadata...`)); + const vfPagesPath = path.join(defaultProjectPath, "pages"); + await ensureDir(vfPagesPath); + const vfPageFileName = path.join(vfPagesPath, `${resName}.page`); + const vfPageCode = ` + + +`; + await fs.writeFile(vfPageFileName, vfPageCode); + + // Create visual force page metadata + const vfPageMetaFile = path.join(vfPagesPath, `${resName}.page-meta.xml`); + const vfPageMeta = ` + + 62.0 + false + false + + +`; + await fs.writeFile(vfPageMetaFile, vfPageMeta); + + // Create custom tab metadata + const tabsPath = path.join(defaultProjectPath, "tabs"); + await ensureDir(tabsPath); + const tabMetaFile = path.join(tabsPath, `${resName}.tab-meta.xml`); + const tabMeta = ` + + + Custom46: Postage + ${resName} + +`; + await fs.writeFile(tabMetaFile, tabMeta); + + // Create Permission Set metadata + const permissionSetsPath = path.join(defaultProjectPath, "permissionsets"); + await ensureDir(permissionSetsPath); + const permissionSetFile = path.join(tabsPath, `${resName}.permissionset-meta.xml`); + const permissionSetMeta = ` + + Permissions to Visualize Project Documentation, including Flow history, generated with sfdx-hardis from Git + true + + + ${resName} + true + + +`; + await fs.writeFile(permissionSetFile, permissionSetMeta); + + return { mkDocsResourcePath, vfPageMetaFile, tabMetaFile, permissionSetFile }; + } + + private async uploadDocMetadatas(resName: string, targetUsername: any, conn: any, tmpProjectPath: string, mkDocsResourcePath: string, vfPageMetaFile: string, tabMetaFile: string, permissionSetFile: string) { + uxLog("action", this, c.cyan(`Deploying Static Resource ${resName}, VisualForce page ${resName}, Custom Tab ${resName} and Permission Set ${resName} to org ${targetUsername}...`)); + let deployCommand = `sf project deploy start -m StaticResource:${resName} -m ApexPage:${resName} -m CustomTab:${resName} -m PermissionSet:${resName} --ignore-conflicts --ignore-warnings --target-org ${targetUsername}`; + const isProdOrg = await isProductionOrg(targetUsername, { conn: conn }); + if (isProdOrg) { + deployCommand += " --test-level RunLocalTests"; + } + else { + deployCommand += " --test-level NoTestRun"; + } + + let deployRes = { status: 1, stdout: "", stderr: "" }; + try { + deployRes = await execCommand(deployCommand, this, { cwd: tmpProjectPath, fail: false, output: true, debug: this.debugMode }); + } catch (e: any) { + deployRes.status = e.code; + deployRes.stderr = e.stderr; + deployRes.stdout = e.stdout; + } + if (deployRes.status !== 0) { + uxLog("error", this, c.red(`Deployment failed:\n${deployRes.stderr + "\n" + deployRes.stdout}`)); + if ((deployRes.stderr + deployRes.stdout).includes("static resource cannot exceed")) { + uxLog("error", this, c.red(`Documentation is too big to be hosted in a Static Resource.`)); + uxLog("warning", this, c.yellow(`Cloudity can help you to host it somewhere else :)`)); + uxLog("warning", this, c.yellow(`If you are interested, contact us on ${c.green(c.bold(CONSTANTS.CONTACT_URL))}`)); + } + else { + uxLog("warning", this, c.yellow(`You can manually deploy the Static Resource ${resName},the VisualForce page ${resName} and the custom tab ${resName} to your org +- Static Resource: ${mkDocsResourcePath} (If you upload using UI, zip the folder and make sure to have index.html at the zip root) +- VisualForce page: ${vfPageMetaFile} +- Custom tab: ${tabMetaFile} +- Permission Set: ${permissionSetFile} +You can also run the documentation locally using "mkdocs serve -v || python -m mkdocs serve -v || py -m mkdocs serve -v" +`)); + } + } + else { + uxLog("success", this, c.green(`SFDX Project documentation uploaded to salesforce and available in Custom Tab ${resName}`)); + } + return deployRes; + } +} diff --git a/src/commands/hardis/doc/override-prompts.ts b/src/commands/hardis/doc/override-prompts.ts new file mode 100644 index 000000000..8184d5791 --- /dev/null +++ b/src/commands/hardis/doc/override-prompts.ts @@ -0,0 +1,192 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import fs from 'fs-extra'; +import c from "chalk"; +import * as path from "path"; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { uxLog } from '../../../common/utils/index.js'; +import { PROMPT_TEMPLATES } from '../../../common/aiProvider/promptTemplates/index.js'; +import { PROMPT_VARIABLES } from '../../../common/aiProvider/promptTemplates/variablesIndex.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class OverridePrompts extends SfCommand { + public static title = 'Override AI Prompt Templates'; + public static description = ` +## Command Behavior + +**Creates local override files for AI prompt templates and variables, allowing for customization of sfdx-hardis AI interactions.** + +This command sets up a \`config/prompt-templates/\` folder within your project. It populates this folder with \`.txt\` files containing the default AI prompt templates and variables used by sfdx-hardis. This enables you to tailor the AI's behavior and responses to your organization's specific needs, terminology, and coding standards. + +Key functionalities: + +- **Template Customization:** Modify templates used for generating documentation, solving deployment errors, and describing Salesforce metadata. +- **Variable Customization:** Adjust common instruction patterns (e.g., role definitions, formatting requirements, security cautions) that are reused across multiple templates. +- **Persistent Overrides:** Once created, these local files will override the default sfdx-hardis templates and variables, and they will not be overwritten by future sfdx-hardis updates unless explicitly requested with the \`--overwrite\` flag. + +**Important:** After running this command, you can modify any of the \`.txt\` files in \`config/prompt-templates/\` to customize the AI's behavior. + +Available templates: +${Object.keys(PROMPT_TEMPLATES).map(name => `- ${name}`).join('\\n')} + +Available variables: +${Object.keys(PROMPT_VARIABLES).map(name => `- ${name}`).join('\\n')} + +More info on [AI Prompts documentation](https://sfdx-hardis.cloudity.com/salesforce-ai-prompts/) + +
+Technical explanations + +The command's technical implementation involves: + +- **Directory Creation:** Ensures the \`config/prompt-templates/\` directory exists using \`fs.ensureDirSync()\`. +- **File Copying:** Iterates through predefined \`PROMPT_TEMPLATES\` and \`PROMPT_VARIABLES\` objects. For each template/variable, it extracts the English text content and writes it to a corresponding \`.txt\` file in the \`config/prompt-templates/\` directory. +- **Overwrite Logic:** Checks if a file already exists. If the \`--overwrite\` flag is provided, it overwrites the existing file; otherwise, it skips the file and logs a message. +- **User Feedback:** Provides detailed logs about created, overwritten, and skipped files, along with instructions on how to use the customized prompts and variables. +- **Dynamic Content:** The description itself dynamically lists available templates and variables by iterating over \`PROMPT_TEMPLATES\` and \`PROMPT_VARIABLES\` objects. +
+`; + public static examples = [ + '$ sf hardis:doc:override-prompts', + '$ sf hardis:doc:override-prompts --overwrite', + ]; + + public static flags: any = { + overwrite: Flags.boolean({ + default: false, + description: 'Overwrite existing template files if they already exist', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }) + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = false; + + protected debugMode = false; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(OverridePrompts); + this.debugMode = flags.debug || false; + + // Create config/prompt-templates folder + const configDir = path.join(process.cwd(), 'config'); + const promptTemplatesDir = path.join(configDir, 'prompt-templates'); + uxLog("action", this, c.cyan('Creating prompt templates directory...')); + fs.ensureDirSync(promptTemplatesDir); + + let createdCount = 0; + let overwrittenCount = 0; + let skippedCount = 0; + + uxLog("action", this, c.cyan('Creating prompt templates and variables...')); + // Copy all prompt templates as .txt files + for (const [templateName, templateDefinition] of Object.entries(PROMPT_TEMPLATES)) { + const targetFile = path.join(promptTemplatesDir, `${templateName}.txt`); + + if (fs.existsSync(targetFile)) { + if (flags.overwrite) { + // Get the English text from the template + const promptText = templateDefinition.text.en; + + // Overwrite the existing file + fs.writeFileSync(targetFile, promptText); + uxLog("log", this, c.grey(`Overwritten: ${templateName}.txt`)); + overwrittenCount++; + } else { + uxLog("warning", this, c.yellow(`Template already exists: ${templateName}.txt`)); + skippedCount++; + } + continue; + } + + // Get the English text from the template + const promptText = templateDefinition.text.en; + + // Write the prompt text to the .txt file + fs.writeFileSync(targetFile, promptText); + uxLog("success", this, c.green(`Created: ${templateName}.txt`)); + createdCount++; + } + + // Copy all prompt variables as .txt files + for (const [variableName, variableDefinition] of Object.entries(PROMPT_VARIABLES)) { + const targetFile = path.join(promptTemplatesDir, `${variableName}.txt`); + + if (fs.existsSync(targetFile)) { + if (flags.overwrite) { + // Get the English text from the variable + const variableText = variableDefinition.text.en; + + // Overwrite the existing file + fs.writeFileSync(targetFile, variableText); + uxLog("log", this, c.grey(`Overwritten: ${variableName}.txt`)); + overwrittenCount++; + } else { + uxLog("warning", this, c.yellow(`Variable already exists: ${variableName}.txt`)); + skippedCount++; + } + continue; + } + + // Get the English text from the variable + const variableText = variableDefinition.text.en; + + // Write the variable text to the .txt file + fs.writeFileSync(targetFile, variableText); + uxLog("success", this, c.green(`Created: ${variableName}.txt`)); + createdCount++; + } // Summary + uxLog("other", this, ''); + const actionMessage = overwrittenCount > 0 ? + `Created ${createdCount} and overwritten ${overwrittenCount} prompt template and variable files` : + `Created ${createdCount} prompt template and variable files`; + uxLog("action", this, c.cyan(actionMessage)); + + if (overwrittenCount > 0) { + uxLog("warning", this, c.yellow(`Overwritten ${overwrittenCount} existing files`)); + } + + if (skippedCount > 0) { + uxLog("warning", this, c.yellow(`Skipped ${skippedCount} existing files`)); + } + + const usageMessage = [ + '', + 'Prompt templates and variables location:', + ` ${promptTemplatesDir}`, + '', + 'Usage:', + ' - Edit template .txt files to customize AI prompts', + ' - Edit variable .txt files to customize common instruction patterns', + ' - Use {{VARIABLE_NAME}} placeholders for dynamic content', + ' - Templates can reference variables with {{VARIABLE_NAME}} syntax', + ' - Your custom prompts and variables will override the defaults automatically', + ].join('\n'); + uxLog("log", this, c.grey(usageMessage)); + uxLog("log", this, c.grey('Documentation: https://sfdx-hardis.cloudity.com/salesforce-ai-prompts/')); + + return { + status: 'success', + message: `${actionMessage} in ${promptTemplatesDir}`, + createdCount, + overwrittenCount, + skippedCount, + outputDir: promptTemplatesDir + }; + } +} diff --git a/src/commands/hardis/doc/packagexml2markdown.ts b/src/commands/hardis/doc/packagexml2markdown.ts new file mode 100644 index 000000000..ec6f36f97 --- /dev/null +++ b/src/commands/hardis/doc/packagexml2markdown.ts @@ -0,0 +1,97 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { DocBuilderPackageXML } from '../../../common/docBuilder/docBuilderPackageXml.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class PackageXml2Markdown extends SfCommand { + public static title = 'PackageXml to Markdown'; + + public static description = ` +## Command Behavior + +**Generates a Markdown documentation file from a Salesforce \`package.xml\` file.** + +This command provides a convenient way to visualize and document the metadata components defined within a \`package.xml\` file. It's particularly useful for: + +- **Understanding Project Scope:** Quickly grasp what metadata types and components are included in a specific deployment or retrieval. +- **Documentation:** Create human-readable documentation of your project's metadata structure. +- **Collaboration:** Share a clear overview of metadata changes with team members or stakeholders. + +Key features: + +- **Flexible Input:** You can specify the path to a \`package.xml\` file using the \`--inputfile\` flag. If not provided, the command will automatically look for \`package.xml\` files in the \`manifest\` folder. +- **Customizable Output:** You can force the path and name of the output Markdown file using the \`--outputfile\` flag. +- **VS Code Integration:** Automatically opens the generated Markdown file in a new VS Code tab for immediate review. + +
+Technical explanations + +The command's technical implementation involves: + +- **XML Parsing:** It reads the content of the specified \`package.xml\` file and parses its XML structure to extract the metadata types and their members. +- **Markdown Generation:** It utilizes the \`DocBuilderPackageXML.generatePackageXmlMarkdown\` utility to transform the parsed \`package.xml\` data into a structured Markdown format. This utility handles the formatting and organization of the metadata information. +- **File System Operations:** It uses \`fs-extra\` (implicitly through \`DocBuilderPackageXML\`) to read the input \`package.xml\` and write the generated Markdown file. +- **WebSocket Communication:** It interacts with a WebSocket client (\`WebSocketClient.requestOpenFile\`) to open the generated Markdown file in a VS Code tab, enhancing user experience. +- **Salesforce Org Context:** It can optionally use the \`target-org\` flag to provide context, such as the instance URL, which might be used for generating links or additional information within the Markdown. +
+`; + + public static examples = [ + '$ sf hardis:doc:packagexml2markdown', + '$ sf hardis:doc:packagexml2markdown --inputfile manifest/package-all.xml' + ]; + + public static flags: any = { + inputfile: Flags.string({ + char: 'x', + description: 'Path to package.xml file. If not specified, the command will look in manifest folder', + }), + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .md', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + "target-org": optionalOrgFlagWithDeprecations + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = false; + + protected inputFile; + protected outputFile; + protected debugMode = false; + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(PackageXml2Markdown); + this.inputFile = flags.inputfile || null; + this.outputFile = flags.outputfile || null; + this.debugMode = flags.debug || false; + + // Generate markdown for package.xml + const instanceUrl = flags?.['target-org']?.getConnection()?.instanceUrl; + this.outputFile = await DocBuilderPackageXML.generatePackageXmlMarkdown(this.inputFile, this.outputFile, null, instanceUrl); + + // Open file in a new VsCode tab if available + WebSocketClient.requestOpenFile(this.outputFile); + + // Return an object to be displayed with --json + return { outputFile: this.outputFile }; + } + +} diff --git a/src/commands/hardis/doc/plugin/generate.ts b/src/commands/hardis/doc/plugin/generate.ts index a35010faa..c164563eb 100644 --- a/src/commands/hardis/doc/plugin/generate.ts +++ b/src/commands/hardis/doc/plugin/generate.ts @@ -1,132 +1,156 @@ /* jscpd:ignore-start */ -import * as Config from "@oclif/config"; -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import * as sortArray from "sort-array"; -import * as set from "set-value"; -import * as yaml from "js-yaml"; -import { uxLog } from "../../../../common/utils"; -import { PACKAGE_ROOT_DIR } from "../../../../settings"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import sortArray from 'sort-array'; +import set from 'set-value'; +import * as yaml from 'js-yaml'; +import { uxLog } from '../../../../common/utils/index.js'; +import { PACKAGE_ROOT_DIR } from '../../../../settings.js'; +import { Config } from '@oclif/core'; +import { readMkDocsFile, writeMkDocsFile } from '../../../../common/docBuilder/docUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DocPluginGenerate extends SfCommand { + public static title = 'Generate SF Cli Plugin Documentation'; -export default class DocPluginGenerate extends SfdxCommand { - public static title = "Generate SFDX Plugin Documentation"; + public static description = ` +## Command Behavior - public static description = `Generate Markdown documentation ready for HTML conversion with mkdocs +**Generates Markdown documentation for an SF CLI plugin, ready for conversion into HTML with MkDocs.** -After the first run, you need to update manually: +This command automates the creation of comprehensive documentation for your Salesforce CLI plugin. It processes your plugin's commands and their flags to generate structured Markdown files, which can then be used with MkDocs to produce a professional-looking website. -- mkdocs.yml -- .github/workflows/build-deploy-docs.yml -- docs/javascripts/gtag.js , if you want Google Analytics tracking +Key functionalities: -Then, activate Github pages, with "gh_pages" as target branch +- **Command Documentation:** Generates a dedicated Markdown file for each command, including its description, parameters (flags), and examples. +- **Index and Commands Pages:** Creates an \`index.md\` and \`commands.md\` file that list all available commands, providing an overview and easy navigation. +- **MkDocs Integration:** Sets up the basic MkDocs project structure and updates the \`mkdocs.yml\` navigation to include the generated command documentation. +- **Default File Copying:** Copies essential MkDocs configuration files and GitHub Actions workflows to your project, streamlining the setup for continuous documentation deployment. -At each merge into master/main branch, the GitHub Action build-deploy-docs will rebuild documentation and publish it in GitHub pages +**Post-Generation Steps:** + +After the initial run, you will need to manually update: + +- \`mkdocs.yml\`: Customize the project title, theme, and other MkDocs settings. +- \`.github/workflows/build-deploy-docs.yml\`: Configure the GitHub Actions workflow for automatic documentation deployment. +- \`docs/javascripts/gtag.js\`: If desired, set up Google Analytics tracking. + +Finally, activate GitHub Pages with \`gh_pages\` as the target branch. This will enable automatic documentation rebuilding and publishing to GitHub Pages upon each merge into your \`master\`/\`main\` branch. + +
+Technical explanations + +The command's technical implementation involves: + +- **Plugin Configuration Loading:** It loads the SF CLI plugin's configuration using \`@oclif/core\`'s \`Config.load()\`, which provides access to all registered commands and their metadata. +- **Command Iteration:** It iterates through each command defined in the plugin's configuration. +- **Markdown File Generation:** For each command, it constructs a Markdown file (\`.md\`) containing: + - The command ID as the main heading. + - The command's \`description\` property. + - A table of parameters (flags), including their name, type, description, default value, required status, and available options. It dynamically extracts this information from the command's \`flags\` property. + - Code blocks for each example provided in the command's \`examples\` property. +- **Navigation Structure:** It builds a nested JavaScript object (\`commandsNav\`) that mirrors the command hierarchy, which is then converted to YAML and inserted into \`mkdocs.yml\` to create the navigation menu. +- **Index and Commands Page Generation:** It reads the project's \`README.md\` and extracts relevant sections to create the \`index.md\` file. It also generates a separate \`commands.md\` file listing all commands. +- **File System Operations:** It uses \`fs-extra\` to create directories, copy default MkDocs files (\`defaults/mkdocs\`), and write the generated Markdown and YAML files. +- **YAML Serialization:** It uses \`js-yaml\` to serialize the navigation object into YAML format for \`mkdocs.yml\`. +
`; - public static examples = ["$ sfdx hardis:doc:plugin:generate"]; + public static examples = ['$ sf hardis:doc:plugin:generate']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(DocPluginGenerate); + this.debugMode = flags.debug || false; // Load plugin configuration const cwd = process.cwd(); const config = await Config.load({ root: cwd, devPlugins: false, userPlugins: false }); // Generate commands markdowns - const commandsNav = { "All commands": "commands.md" }; + const commandsNav = { 'All commands': 'commands.md' }; const commandsLinks = {}; for (const command of config.commands) { await this.generateCommandDoc(command); - const commandsSplit = command.id.split(":"); + const commandsSplit = command.id.split(':'); const commandName = commandsSplit.pop(); - const commandMdPath = commandsSplit.join("/") + `/${commandName}.md`; + const commandMdPath = commandsSplit.join('/') + `/${commandName}.md`; const navItem = {}; - navItem[commandName] = commandMdPath; - set(commandsNav, commandsSplit.join("."), navItem, { preservePaths: true, merge: true }); + navItem[commandName || ''] = commandMdPath; + set(commandsNav, commandsSplit.join('.'), navItem, { preservePaths: true, merge: true }); commandsLinks[command.id] = commandMdPath; } - uxLog(this, yaml.dump(commandsNav)); + uxLog("other", this, yaml.dump(commandsNav)); // Generate index.md await this.generateIndexDoc(config, commandsLinks); - // Copy default files (mkdocs.yml and other files can be updated by the sfdx plugin developer later) - const mkdocsYmlFile = path.join(process.cwd(), "mkdocs.yml"); + // Copy default files (mkdocs.yml and other files can be updated by the SF Cli plugin developer later) + const mkdocsYmlFile = path.join(process.cwd(), 'mkdocs.yml'); const mkdocsYmlFileExists = fs.existsSync(mkdocsYmlFile); - await fs.copy(path.join(PACKAGE_ROOT_DIR, "defaults/mkdocs", "."), process.cwd(), { overwrite: false }); + await fs.copy(path.join(PACKAGE_ROOT_DIR, 'defaults/mkdocs', '.'), process.cwd(), { overwrite: false }); if (!mkdocsYmlFileExists) { - uxLog(this, c.blue("Base mkdocs files copied in your sfdx plugin folder")); - uxLog(this, c.yellow("You should probably manually update mkdocs.yml and build-deploy-docs.yml with your repo & plugin information")); + uxLog("log", this, c.grey('Base mkdocs files copied in your SF Cli plugin folder')); + uxLog( + "warning", + this, + c.yellow( + 'You should probably manually update mkdocs.yml and build-deploy-docs.yml with your repo & plugin information' + ) + ); } // Remove changelog if not existing - if (!fs.existsSync(path.join(process.cwd(), "CHANGELOG.md")) && fs.existsSync(path.join(process.cwd(), "docs", "CHANGELOG.md"))) { - await fs.remove(path.join(process.cwd(), "docs", "CHANGELOG.md")); + if ( + !fs.existsSync(path.join(process.cwd(), 'CHANGELOG.md')) && + fs.existsSync(path.join(process.cwd(), 'docs', 'CHANGELOG.md')) + ) { + await fs.remove(path.join(process.cwd(), 'docs', 'CHANGELOG.md')); } // Remove license if not existing - if (!fs.existsSync(path.join(process.cwd(), "LICENSE")) && fs.existsSync(path.join(process.cwd(), "docs", "license.md"))) { - await fs.remove(path.join(process.cwd(), "docs", "license.md")); + if ( + !fs.existsSync(path.join(process.cwd(), 'LICENSE')) && + fs.existsSync(path.join(process.cwd(), 'docs', 'license.md')) + ) { + await fs.remove(path.join(process.cwd(), 'docs', 'license.md')); } // Update mkdocs nav items - const mkdocsYml = yaml.load( - fs - .readFileSync(mkdocsYmlFile, "utf-8") - .replace("!!python/name:materialx.emoji.twemoji", "'!!python/name:materialx.emoji.twemoji'") - .replace("!!python/name:materialx.emoji.to_svg", "'!!python/name:materialx.emoji.to_svg'"), - ); + const mkdocsYml: any = readMkDocsFile(mkdocsYmlFile); mkdocsYml.nav = mkdocsYml.nav.map((navItem: any) => { - if (navItem["Commands"]) { - navItem["Commands"] = commandsNav; + if (navItem['Commands']) { + navItem['Commands'] = commandsNav; } return navItem; }); - const mkdocsYmlStr = yaml - .dump(mkdocsYml) - .replace("'!!python/name:materialx.emoji.twemoji'", "!!python/name:materialx.emoji.twemoji") - .replace("'!!python/name:materialx.emoji.to_svg'", "!!python/name:materialx.emoji.to_svg"); - await fs.writeFile(mkdocsYmlFile, mkdocsYmlStr); - uxLog(this, c.cyan(`Updated ${c.green(mkdocsYmlFile)}`)); + await writeMkDocsFile(mkdocsYmlFile, mkdocsYml); // Return an object to be displayed with --json return { outputString: `Generated documentation` }; @@ -135,12 +159,12 @@ At each merge into master/main branch, the GitHub Action build-deploy-docs will // Generate index file private async generateIndexDoc(config: any, commandsLinks: any) { const lines = [ - "", + "", ]; - const readme = await fs.readFile(path.join(process.cwd(), "README.md"), "utf8"); + const readme = await fs.readFile(path.join(process.cwd(), 'README.md'), 'utf8'); let reusableReadmePartFound = false; // Try to find README content until auto-generated commands - const limitStrings = ["## Commands", "## COMMANDS", "# Commands", ""]; + const limitStrings = ['## Commands', '## COMMANDS', '# Commands', '']; for (const limitString of limitStrings) { if (readme.indexOf(limitString) > 0) { lines.push(...readme.substring(0, readme.indexOf(limitString)).split(/\r?\n/)); @@ -150,87 +174,102 @@ At each merge into master/main branch, the GitHub Action build-deploy-docs will } // Default index.md if (reusableReadmePartFound === false) { - lines.push(...["", `# ${config.pjson.name}`, "", "## Description", "", config.pjson.description.split("\n").join("
"), ""]); + lines.push( + ...[ + '', + `# ${config.pjson.name}`, + '', + '## Description', + '', + config.pjson.description.split('\n').join('
'), + '', + ] + ); } // Build commands (for index.md and commands.md) - const cmdLines = []; - lines.push(...["", "## Commands"]); - cmdLines.push("# Commands"); - let currentSection = ""; - for (const command of sortArray(config.commands, { by: ["id"], order: ["asc"] })) { - const section = command.id.split(":")[0] + ":" + command.id.split(":")[1]; + const cmdLines: any[] = []; + lines.push(...['', '## Commands']); + cmdLines.push('# Commands'); + let currentSection = ''; + for (const command of sortArray(config.commands, { by: ['id'], order: ['asc'] }) as any[]) { + const section = command.id.split(':')[0] + ':' + command.id.split(':')[1]; if (section !== currentSection) { - lines.push(...["", `### ${section}`, "", "|Command|Title|", "|:------|:----------|"]); - cmdLines.push(...["", `## ${section}`, "", "|Command|Title|", "|:------|:----------|"]); + lines.push(...['', `### ${section}`, '', '|Command|Title|', '|:------|:----------|']); + cmdLines.push(...['', `## ${section}`, '', '|Command|Title|', '|:------|:----------|']); currentSection = section; } const commandInstance = command.load(); - const title = commandInstance.title ? commandInstance.title : commandInstance.description ? commandInstance.description.split("\n")[0] : ""; + const title = commandInstance.title + ? commandInstance.title + : commandInstance.description + ? commandInstance.description.split('\n')[0] + : ''; lines.push(...[`|[**${command.id}**](${commandsLinks[command.id]})|${title}|`]); cmdLines.push(...[`|[**${command.id}**](${commandsLinks[command.id]})|${title}|`]); } // Create docs dir if not existing yet - await fs.ensureDir(path.join(process.cwd(), "docs")); + await fs.ensureDir(path.join(process.cwd(), 'docs')); // write in index.md - const indexMdFile = path.join(process.cwd(), "docs", "index.md"); - const indexMdString = lines.join("\n") + "\n"; + const indexMdFile = path.join(process.cwd(), 'docs', 'index.md'); + const indexMdString = lines.join('\n') + '\n'; await fs.writeFile(indexMdFile, indexMdString); // write in commands.md - const commandsMdFile = path.join(process.cwd(), "docs", "commands.md"); - const commandsMdString = cmdLines.join("\n") + "\n"; + const commandsMdFile = path.join(process.cwd(), 'docs', 'commands.md'); + const commandsMdString = cmdLines.join('\n') + '\n'; await fs.writeFile(commandsMdFile, commandsMdString); } // Generate markdown doc for a single command private async generateCommandDoc(command: any) { const lines = [ - "", + "", ]; // Title - const titleLines = [`# ${command.id}`, ""]; + const titleLines = [`# ${command.id}`, '']; lines.push(...titleLines); // Description - const descriptionLines = [`## Description`, "", ...(command.description || "").split("\n"), ""]; + const descriptionLines = [`## Description`, '', ...(command.description || '').split('\n'), '']; lines.push(...descriptionLines); // Flags const flagLines = [ `## Parameters`, - "", - "|Name|Type|Description|Default|Required|Options|", - "|:---|:--:|:----------|:-----:|:------:|:-----:|", + '', + '|Name|Type|Description|Default|Required|Options|', + '|:---|:--:|:----------|:-----:|:------:|:-----:|', ...Object.keys(command.flags || {}) .sort() .map((flagKey: string) => { const flag = command.flags[flagKey]; - const optionsUnique = []; + const optionsUnique: any[] = []; for (const option of flag.options || []) { if (optionsUnique.filter((o) => o.toLowerCase() === option.toLowerCase()).length === 0) { optionsUnique.push(option); } } - return `|${flag.name + (flag.char ? `
-${flag.char}` : "")}|${flag.type}|${flag.description}|${flag.default || ""}|${ - flag.required ? "" : "" - }|${optionsUnique.join("
")}|`; + return `|${flag.name + (flag.char ? `
-${flag.char}` : '')}|${flag.type}|${flag.description}|${flag.default || '' + }|${flag.required ? '' : ''}|${optionsUnique.join('
')}|`; }), - "", + '', ]; lines.push(...flagLines); // Examples const exampleLines = [ `## Examples`, - "", - ...(command.examples || []).map((example: string) => ["```shell", ...(example || "").split("\n"), "```", ""]).flat(), - "", + '', + ...(command.examples || []) + .map((example: string) => ['```shell', ...(example || '').split('\n'), '```', '']) + .flat(), + '', ]; lines.push(...exampleLines); // Write to file - const mdFileName = path.join(process.cwd(), "docs", path.sep, command.id.replace(/:/g, "/") + ".md"); + const mdFileName = path.join(process.cwd(), 'docs', path.sep, command.id.replace(/:/g, '/') + '.md'); await fs.ensureDir(path.dirname(mdFileName)); - const yamlString = lines.join("\n") + "\n"; + const yamlString = lines.join('\n') + '\n'; await fs.writeFile(mdFileName, yamlString); - uxLog(this, c.grey("Generated file " + c.bold(mdFileName))); + uxLog("log", this, c.grey('Generated file ' + c.bold(mdFileName))); } } diff --git a/src/commands/hardis/doc/project2markdown.ts b/src/commands/hardis/doc/project2markdown.ts new file mode 100644 index 000000000..c41a77e94 --- /dev/null +++ b/src/commands/hardis/doc/project2markdown.ts @@ -0,0 +1,1607 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import fs from 'fs-extra'; +import c from "chalk"; +import * as path from "path"; +import { process as ApexDocGen } from '@cparra/apexdocs'; +import { XMLBuilder, XMLParser } from "fast-xml-parser"; +import sortArray from 'sort-array'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { completeAttributesDescriptionWithAi, getMetaHideLines, readMkDocsFile, replaceInFile, writeMkDocsFile } from '../../../common/docBuilder/docUtils.js'; +import { parseXmlFile } from '../../../common/utils/xmlUtils.js'; +import { bool2emoji, createTempDir, execCommand, execSfdxJson, filterPackageXml, getCurrentGitBranch, sortCrossPlatform, uxLog } from '../../../common/utils/index.js'; +import { CONSTANTS, getConfig } from '../../../config/index.js'; +import { listMajorOrgs } from '../../../common/utils/orgConfigUtils.js'; +import { glob } from 'glob'; +import { GLOB_IGNORE_PATTERNS, listApexFiles, listFlowFiles, listPageFiles, returnApexType } from '../../../common/utils/projectUtils.js'; +import { generateFlowMarkdownFile, generateHistoryDiffMarkdown, generateMarkdownFileWithMermaid } from '../../../common/utils/mermaidUtils.js'; +import { MetadataUtils } from '../../../common/metadata-utils/index.js'; +import { PACKAGE_ROOT_DIR } from '../../../settings.js'; +import { BranchStrategyMermaidBuilder } from '../../../common/utils/branchStrategyMermaidBuilder.js'; +import { prettifyFieldName } from '../../../common/utils/flowVisualiser/nodeFormatUtils.js'; +import { ObjectModelBuilder } from '../../../common/docBuilder/objectModelBuilder.js'; +import { generatePdfFileFromMarkdown } from '../../../common/utils/markdownUtils.js'; +import { DocBuilderPage } from '../../../common/docBuilder/docBuilderPage.js'; +import { DocBuilderProfile } from '../../../common/docBuilder/docBuilderProfile.js'; +import { DocBuilderObject } from '../../../common/docBuilder/docBuilderObject.js'; +import { DocBuilderApex } from '../../../common/docBuilder/docBuilderApex.js'; +import { DocBuilderFlow } from '../../../common/docBuilder/docBuilderFlow.js'; +import { DocBuilderLwc } from '../../../common/docBuilder/docBuilderLwc.js'; +import { DocBuilderPackageXML } from '../../../common/docBuilder/docBuilderPackageXml.js'; +import { DocBuilderPermissionSet } from '../../../common/docBuilder/docBuilderPermissionSet.js'; +import { DocBuilderPermissionSetGroup } from '../../../common/docBuilder/docBuilderPermissionSetGroup.js'; +import { DocBuilderAssignmentRules } from '../../../common/docBuilder/docBuilderAssignmentRules.js'; +import { DocBuilderApprovalProcess } from '../../../common/docBuilder/docBuilderApprovalProcess.js'; +import { DocBuilderAutoResponseRules } from "../../../common/docBuilder/docBuilderAutoResponseRules.js"; +import { DocBuilderEscalationRules } from '../../../common/docBuilder/docBuilderEscalationRules.js'; +import { DocBuilderRoles } from '../../../common/docBuilder/docBuilderRoles.js'; +import { DocBuilderPackage } from '../../../common/docBuilder/docBuilderPackage.js'; +import { setConnectionVariables } from '../../../common/utils/orgUtils.js'; +import { makeFileNameGitCompliant } from '../../../common/utils/gitUtils.js'; + + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class Project2Markdown extends SfCommand { + public static title = 'SFDX Project to Markdown'; + + public static htmlInstructions = `## Doc HTML Pages + +To read the documentation as HTML pages, run the following code (you need [**Python**](https://www.python.org/downloads/) on your computer) + +\`\`\`python +pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || python -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || py -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists +mkdocs serve -v || python -m mkdocs serve -v || py -m mkdocs serve -v +\`\`\` + +To just generate HTML pages that you can host anywhere, run \`mkdocs build -v || python -m mkdocs build -v || py -m mkdocs build -v\` +` + + public static description = `Generates a markdown documentation from a SFDX project + +- Objects (with fields, validation rules, relationships and dependencies) +- Automations + - Approval Processes + - Assignment Rules + - AutoResponse Rules + - Escalation Rules + - Flows +- Authorizations + - Profiles + - Permission Set Groups + - Permission Sets +- Code + - Apex + - Lightning Web Components +- Lightning Pages +- Packages +- SFDX-Hardis Config +- Branches & Orgs +- Manifests + +Can work on any sfdx project, no need for it to be a sfdx-hardis flavored one. + +Generates markdown files will be written in **docs** folder (except README.md where a link to doc index is added) + +- You can customize the pages following [mkdocs-material setup documentation](https://squidfunk.github.io/mkdocs-material/setup/) +- You can manually add new markdown files in the "docs" folder to extend this documentation and add references to them in "mkdocs.yml" +- You can also add images in folder "docs/assets" and embed them in markdown files. + +To read Flow documentations if your markdown reader doesn't handle MermaidJS syntax, this command could require @mermaid-js/mermaid-cli + +- Run \`npm install @mermaid-js/mermaid-cli --global\` if puppeteer works in your environment +- It can also be run as a docker image + +Both modes will be tried by default, but you can also force one of them by defining environment variable \`MERMAID_MODES=docker\` or \`MERMAID_MODES=cli\` + +_sfdx-hardis docker image is alpine-based and does not succeed to run mermaid/puppeteer: if you can help, please submit a PR !_ + +If Flow history doc always display a single state, you probably need to update your workflow configuration: + +- on Gitlab: Env variable [\`GIT_FETCH_EXTRA_FLAGS: --depth 10000\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.gitlab-ci.yml#L11) +- on GitHub: [\`fetch-depth: 0\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.github/workflows/org-monitoring.yml#L58) +- on Azure: [\`fetchDepth: "0"\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/azure-pipelines.yml#L39) +- on Bitbucket: [\`step: clone: depth: full\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/bitbucket-pipelines.yml#L18) + +![Screenshot flow doc](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-flow-doc.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-project-doc.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-project-doc-2.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-object-diagram.jpg) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-project-doc-profile.gif) + +![Screenshot project documentation](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-doc-apex.png) + +If it is a sfdx-hardis CI/CD project, a diagram of the branches and orgs strategy will be generated. + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-doc-branches-strategy.jpg) + +If [AI integration](${CONSTANTS.DOC_URL_ROOT}/salesforce-ai-setup/) is configured, documentation will contain a summary of the Flow. + +- Use variable PROMPTS_LANGUAGE (ex: PROMPTS_LANGUAGE=fr) to force language for LLM calls (default:en) + +If you have a complex strategy, you might need to input property **mergeTargets** in branch-scoped sfdx-hardis.yml file to have a correct diagram. + +Define DO_NOT_OVERWRITE_INDEX_MD=true to avoid overwriting the index.md file in docs folder, useful if you want to keep your own index.md file. + +${this.htmlInstructions} +`; + + public static examples = [ + '$ sf hardis:doc:project2markdown', + '$ sf hardis:doc:project2markdown --with-history', + '$ sf hardis:doc:project2markdown --with-history --pdf', + '$ sf hardis:doc:project2markdown --hide-apex-code' + ]; + + public static flags: any = { + "diff-only": Flags.boolean({ + default: false, + description: "Generate documentation only for changed files (used for monitoring)", + }), + "with-history": Flags.boolean({ + default: false, + description: "Generate a markdown file with the history diff of the Flow", + }), + pdf: Flags.boolean({ + description: 'Also generate the documentation in PDF format', + }), + "hide-apex-code": Flags.boolean({ + default: false, + description: "Hide Apex code in the generated documentation for Apex classes.", + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + "target-org": optionalOrgFlagWithDeprecations + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + protected diffOnly: boolean = false; + protected packageXmlCandidates: any[]; + protected outputMarkdownRoot = "docs" + protected outputMarkdownIndexFile = path.join(this.outputMarkdownRoot, "index.md") + protected mdLines: string[] = []; + protected sfdxHardisConfig: any = {}; + protected outputPackageXmlMarkdownFiles: any[] = []; + protected mkDocsNavNodes: any[] = [{ "Home": "index.md" }]; + protected withHistory = false; + protected withPdf = false; + protected hideApexCode = false; + protected debugMode = false; + protected footer: string; + protected apexDescriptions: any[] = []; + protected flowDescriptions: any[] = []; + protected lwcDescriptions: any[] = []; + protected packageDescriptions: any[] = []; + protected pageDescriptions: any[] = []; + protected profileDescriptions: any[] = []; + protected permissionSetsDescriptions: any[] = []; + protected permissionSetGroupsDescriptions: any[] = []; + protected assignmentRulesDescriptions: any[] = []; + protected autoResponseRulesDescriptions: any[] = []; + protected approvalProcessesDescriptions: any[] = []; + protected escalationRulesDescriptions: any[] = []; + protected roleDescriptions: any[] = []; + protected objectDescriptions: any[] = []; + protected objectFiles: string[]; + protected allObjectsNames: string[]; + protected tempDir: string; + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(Project2Markdown); + this.diffOnly = flags["diff-only"] === true ? true : false; + this.withHistory = flags["with-history"] === true ? true : false; + this.withPdf = flags.pdf === true ? true : false; + this.hideApexCode = flags["hide-apex-code"] === true || process?.env?.HIDE_APEX_CODE === 'true' ? true : false; + this.debugMode = flags.debug || false; + await setConnectionVariables(flags['target-org']?.getConnection(), true);// Required for some notifications providers like Email, or for Agentforce + + await fs.ensureDir(this.outputMarkdownRoot); + const currentBranch = await getCurrentGitBranch() + this.footer = `_Documentation generated from branch ${currentBranch} with [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT}) by [Cloudity](${CONSTANTS.WEBSITE_URL}) command [\`sf hardis:doc:project2markdown\`](https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/)_`; + + this.mdLines.push(...[ + "Welcome to the documentation of your Salesforce project.", + "", + // "- [Object Model](object-model.md)", + "- [Objects](objects/index.md)", + "- Automations", + " - [Approval Processes](approvalProcesses/index.md)", + " - [Assignment Rules](assignmentRules/index.md)", + " - [AutoResponse Rules](autoResponseRules/index.md)", + " - [Escalation Rules](escalationRules/index.md)", + " - [Flows](flows/index.md)", + "- Authorizations", + " - [Profiles](profiles/index.md)", + " - [Permission Set Groups](permissionsetgroups/index.md)", + " - [Permission Sets](permissionsets/index.md)", + "- [Roles](roles.md)", + "- Code", + " - [Apex](apex/index.md)", + " - [Lightning Web Components](lwc/index.md)", + "- [Lightning Pages](pages/index.md)", + "- [Packages](packages/index.md)", + "- [SFDX-Hardis Config](sfdx-hardis-params.md)", + "- [Branches & Orgs](sfdx-hardis-branches-and-orgs.md)", + "- [Manifests](manifests.md)", + "" + ]); + + let sfdxHardisParamsLines = ["Available only in a [sfdx-hardis CI/CD project](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/)"]; + let branchesAndOrgsLines = ["Available only in a [sfdx-hardis CI/CD project](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/)"]; + if (fs.existsSync("config/.sfdx-hardis.yml")) { + this.sfdxHardisConfig = await getConfig("project"); + // General sfdx-hardis config + sfdxHardisParamsLines = this.buildSfdxHardisParams(); + // Branches & orgs + branchesAndOrgsLines = await this.buildMajorBranchesAndOrgs(); + } + await fs.writeFile(path.join(this.outputMarkdownRoot, "sfdx-hardis-params.md"), getMetaHideLines() + sfdxHardisParamsLines.join("\n") + `\n${this.footer}\n`); + this.addNavNode("SFDX-Hardis Config", "sfdx-hardis-params.md"); + await fs.writeFile(path.join(this.outputMarkdownRoot, "sfdx-hardis-branches-and-orgs.md"), getMetaHideLines() + branchesAndOrgsLines.join("\n") + `\n${this.footer}\n`); + this.addNavNode("Branches & Orgs", "sfdx-hardis-branches-and-orgs.md"); + + // Object model Mermaid schema + /* Disabled: too messy to read + let mermaidSchema = await new ObjectModelBuilder().buildObjectsMermaidSchema(); + mermaidSchema = "```mermaid\n" + mermaidSchema + "\n```"; + await fs.writeFile(path.join(this.outputMarkdownRoot, "object-model.md"), getMetaHideLines() + mermaidSchema + `\n${this.footer}\n`); + this.addNavNode("Object Model", "object-model.md"); + */ + + // List SFDX packages and generate a manifest for each of them, except if there is only force-app with a package.xml + this.packageXmlCandidates = DocBuilderPackageXML.listPackageXmlCandidates(); + await this.manageLocalPackages(); + const instanceUrl = flags?.['target-org']?.getConnection()?.instanceUrl; + await this.generatePackageXmlMarkdown(this.packageXmlCandidates, instanceUrl); + const { packageLines, packagesForMenu } = await DocBuilderPackageXML.buildIndexTable(this.outputPackageXmlMarkdownFiles); + this.addNavNode("Manifests", packagesForMenu); + await fs.writeFile(path.join(this.outputMarkdownRoot, "manifests.md"), getMetaHideLines() + packageLines.join("\n") + `\n${this.footer}\n`); + + this.tempDir = await createTempDir() + // Convert source to metadata API format to build prompts + uxLog("action", this, c.cyan("Converting source to metadata API format to ease the build of LLM prompts...")); + await execCommand(`sf project convert source --metadata CustomObject --output-dir ${this.tempDir}`, this, { fail: true, output: true, debug: this.debugMode }); + this.objectFiles = (await glob("**/*.object", { cwd: this.tempDir, ignore: GLOB_IGNORE_PATTERNS })); + sortCrossPlatform(this.objectFiles); + this.allObjectsNames = this.objectFiles.map(object => path.basename(object, ".object")); + + // Generate packages documentation + if (!(process?.env?.GENERATE_PACKAGES_DOC === 'false')) { + await this.generatePackagesDocumentation(); + } + + // Generate Apex doc + if (!(process?.env?.GENERATE_APEX_DOC === 'false')) { + await this.generateApexDocumentation(); + } + + // List flows & generate doc + if (!(process?.env?.GENERATE_FLOW_DOC === 'false')) { + await this.generateFlowsDocumentation(); + } + + // List pages & generate doc + if (!(process?.env?.GENERATE_PAGES_DOC === 'false')) { + await this.generatePagesDocumentation(); + } + + // List profiles & generate doc + if (!(process?.env?.GENERATE_PROFILES_DOC === 'false')) { + await this.generateProfilesDocumentation(); + await this.generatePermissionSetGroupsDocumentation(); + await this.generatePermissionSetsDocumentation(); + await this.generateRolesDocumentation(); + } + + // List objects & generate doc + if (!(process?.env?.GENERATE_OBJECTS_DOC === 'false')) { + await this.generateObjectsDocumentation(); + } + + if (!(process?.env?.GENERATE_AUTOMATIONS_DOC === 'false')) { + // List approval processes & generate doc + await this.generateApprovalProcessDocumentation(); + // List assignment rules and generate doc + await this.generateAssignmentRulesDocumentation(); + // List auto response rules and generate doc + await this.generateAutoResponseRulesDocumentation(); + // List escalation rules and generate doc + await this.generateEscalationRulesDocumentation(); + } + + // List LWC & generate doc + if (!(process?.env?.GENERATE_LWC_DOC === 'false')) { + await this.generateLwcDocumentation(); + } + + // Write output index file + await fs.ensureDir(path.dirname(this.outputMarkdownIndexFile)); + if (process.env.DO_NOT_OVERWRITE_INDEX_MD !== 'true' || !fs.existsSync(this.outputMarkdownIndexFile)) { + await fs.writeFile(this.outputMarkdownIndexFile, getMetaHideLines() + this.mdLines.join("\n") + `\n\n${this.footer}\n`); + uxLog("success", this, c.green(`Successfully generated doc index at ${this.outputMarkdownIndexFile}`)); + } + + const readmeFile = path.join(process.cwd(), "README.md"); + if (fs.existsSync(readmeFile)) { + let readme = await fs.readFile(readmeFile, "utf8"); + if (!readme.includes("docs/index.md")) { + readme += ` + +## Documentation + +[Read auto-generated documentation of the SFDX project](docs/index.md) + +${Project2Markdown.htmlInstructions} +`; + await fs.writeFile(readmeFile, readme); + uxLog("success", this, c.green(`Updated README.md to add link to docs/index.md`)); + } + } + + await this.buildMkDocsYml(); + + // Delete files found in docs folder that contain characters not compliant with Windows file system + // (e.g. /, \, :, *, ?, ", <, >, |) + const filesToDelete = await glob("**/*", { cwd: this.outputMarkdownRoot, nodir: true }); + for (const file of filesToDelete) { + const fileName = path.basename(file); + if (fileName.includes("/") || fileName.includes("\\") || fileName.includes(":") || fileName.includes("*") || fileName.includes("?") || fileName.includes('"') || fileName.includes("<") || fileName.includes(">") || fileName.includes("|")) { + const filePath = path.join(this.outputMarkdownRoot, file); + uxLog("warning", this, c.yellow(`Deleting file ${filePath} because it contains characters not compliant with Windows file system`)); + await fs.remove(filePath); + } + } + + + // Open file in a new VsCode tab if available + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(this.outputMarkdownIndexFile, "Project documentation Index", "report"); + } + else { + WebSocketClient.requestOpenFile(this.outputMarkdownIndexFile); + } + + return { outputPackageXmlMarkdownFiles: this.outputPackageXmlMarkdownFiles }; + } + + private async generateApexDocumentation() { + uxLog("action", this, c.cyan("Calling ApexDocGen to initialize Apex documentation... (if you don't want it, define GENERATE_APEX_DOC=false in your environment variables)")); + const tempDir = await createTempDir(); + uxLog("log", this, c.grey(`Using temp directory ${tempDir}`)); + const packageDirs = this.project?.getPackageDirectories() || []; + for (const packageDir of packageDirs) { + try { + await ApexDocGen({ + sourceDir: packageDir.path, + targetDir: tempDir, + exclude: ["**/MetadataService.cls"], + scope: ['global', 'public', 'private'], + targetGenerator: "markdown" + }); + // Copy files to apex folder + const apexDocFolder = path.join(this.outputMarkdownRoot, "apex"); + await fs.ensureDir(apexDocFolder); + await fs.copy(path.join(tempDir, "miscellaneous"), apexDocFolder, { overwrite: true }); + uxLog("log", this, c.grey(`Generated markdown for Apex classes in ${apexDocFolder}`)); + } + catch (e: any) { + uxLog("warning", this, c.yellow(`Error generating Apex documentation: ${JSON.stringify(e, null, 2)}`)); + uxLog("log", this, c.grey(e.stack)); + } + /* + await ApexDocGen({ + sourceDir: packageDir.path, + targetDir: tempDir, + targetGenerator: "openapi" + }); + */ + } + const apexFiles = await listApexFiles(packageDirs); + const apexClassNames = apexFiles.map(file => path.basename(file, ".cls")).filter(name => !name.endsWith(".trigger")); + + // Build relationship between apex classes and objects + for (const apexFile of apexFiles) { + const apexName = path.basename(apexFile, ".cls").replace(".trigger", ""); + const apexContent = await fs.readFile(apexFile, "utf8"); + this.apexDescriptions.push({ + name: apexName, + type: returnApexType(apexContent), + impactedObjects: this.allObjectsNames.filter(objectName => apexContent.includes(`${objectName}`)), + relatedClasses: apexClassNames.filter(name => apexContent.includes(`${name}`) && name !== apexName), + }); + } + + // Complete generated documentation + if (apexFiles.length === 0) { + uxLog("log", this, c.yellow("No Apex class found in the project")); + return; + } + const apexForMenu: any = { "All Apex Classes": "apex/index.md" } + WebSocketClient.sendProgressStartMessage("Generating Apex documentation...", apexFiles.length); + let counter = 0; + for (const apexFile of apexFiles) { + const apexName = path.basename(apexFile, ".cls").replace(".trigger", ""); + const apexContent = await fs.readFile(apexFile, "utf8"); + const mdFile = path.join(this.outputMarkdownRoot, "apex", apexName + ".md"); + if (fs.existsSync(mdFile)) { + const apexName = path.basename(apexFile, ".cls").replace(".trigger", ""); + apexForMenu[apexName] = "apex/" + apexName + ".md"; + let apexMdContent = await fs.readFile(mdFile, "utf8"); + // Replace object links + apexMdContent = apexMdContent.replaceAll("..\\custom-objects\\", "../objects/").replaceAll("../custom-objects/", "../objects/") + // Add text before the first ## + if (!["MetadataService"].includes(apexName) && + // Do not mess with existing apex doc if generation has crashed + !apexMdContent.includes(getMetaHideLines())) { + const mermaidClassDiagram = DocBuilderApex.buildMermaidClassDiagram(apexName, this.apexDescriptions); + let insertion = `${mermaidClassDiagram}\n\n\n\n`; + if (!this.hideApexCode) { + insertion += `## Apex Code\n\n\`\`\`java\n${apexContent}\n\`\`\`\n\n`; + } + const firstHeading = apexMdContent.indexOf("## "); + apexMdContent = apexMdContent.substring(0, firstHeading) + insertion + apexMdContent.substring(firstHeading); + const apexDocBuilder = new DocBuilderApex(apexName, apexContent, "", { + "CLASS_NAME": apexName, + "APEX_CODE": apexContent + }); + apexDocBuilder.markdownDoc = apexMdContent; + apexMdContent = await apexDocBuilder.completeDocWithAiDescription(); + await fs.writeFile(mdFile, getMetaHideLines() + apexMdContent); + } + uxLog("log", this, c.grey(`Generated markdown for Apex class ${apexName}`)); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, apexFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + this.addNavNode("Apex", apexForMenu); + + // Write index file for apex folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "apex")); + const apexIndexFile = path.join(this.outputMarkdownRoot, "apex", "index.md"); + await fs.writeFile(apexIndexFile, getMetaHideLines() + DocBuilderApex.buildIndexTable('', this.apexDescriptions).join("\n") + `\n\n${this.footer}\n`); + } + + private async generatePackagesDocumentation() { + const packagesForMenu: any = { "All Packages": "packages/index.md" } + // List packages + const packages = this.sfdxHardisConfig.installedPackages || []; // CI/CD context + const packageFolder = path.join(process.cwd(), 'installedPackages'); // Monitoring context + if (packages.length === 0 && fs.existsSync(packageFolder)) { + const findManagedPattern = "**/*.json"; + const matchingPackageFiles = await glob(findManagedPattern, { cwd: packageFolder, ignore: GLOB_IGNORE_PATTERNS }); + for (const packageFile of matchingPackageFiles) { + const packageFileFull = path.join(packageFolder, packageFile); + if (!fs.existsSync(packageFileFull)) { + continue; + } + const pckg = await fs.readJSON(packageFileFull); + packages.push(pckg); + } + } + WebSocketClient.sendProgressStartMessage("Generating Installed Packages documentation...", packages.length); + let counter = 0; + // Process packages + for (const pckg of packages) { + const packageName = pckg.SubscriberPackageName; + const mdFile = path.join(this.outputMarkdownRoot, "packages", makeFileNameGitCompliant(packageName) + ".md"); + // Generate package page and add it to menu + packagesForMenu[packageName] = "packages/" + makeFileNameGitCompliant(packageName) + ".md"; + this.packageDescriptions.push({ + name: packageName, + namespace: pckg.SubscriberPackageNamespace || "None", + versionNumber: pckg.SubscriberPackageVersionNumber || "Unknown", + versionName: pckg.SubscriberPackageVersionName || "Unknown", + versionId: pckg.SubscriberPackageVersionId || "Unknown", + }); + let packageMetadatas = "Unable to list package Metadatas"; + const packageWithAllMetadatas = path.join(process.cwd(), "manifest", "package-all-org-items.xml"); + const tmpOutput = path.join(this.tempDir, pckg.SubscriberPackageVersionId + ".xml"); + if (fs.existsSync(packageWithAllMetadatas) && pckg.SubscriberPackageNamespace) { + const filterRes = await filterPackageXml(packageWithAllMetadatas, tmpOutput, { keepOnlyNamespaces: [pckg.SubscriberPackageNamespace] }) + if (filterRes.updated) { + packageMetadatas = await fs.readFile(tmpOutput, "utf8"); + } + } + // Add Packages in documentation + await new DocBuilderPackage(makeFileNameGitCompliant(packageName), pckg, mdFile, { + "PACKAGE_METADATAS": packageMetadatas, + "PACKAGE_FILE": tmpOutput + }).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + // Recovery to save git repos: Kill existing file if it has been created with forbidden characters + const mdFileBad = path.join(this.outputMarkdownRoot, "packages", packageName + ".md"); + if (mdFileBad !== mdFile && fs.existsSync(mdFileBad)) { + await fs.remove(mdFileBad); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, packages.length); + } + this.addNavNode("Packages", packagesForMenu); + // Write index file for packages folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "packages")); + const packagesIndexFile = path.join(this.outputMarkdownRoot, "packages", "index.md"); + await fs.writeFile(packagesIndexFile, getMetaHideLines() + DocBuilderPackage.buildIndexTable('', this.packageDescriptions).join("\n") + `\n\n${this.footer}\n`); + WebSocketClient.sendProgressEndMessage(); + } + + private async generatePagesDocumentation() { + const packageDirs = this.project?.getPackageDirectories() || []; + const pageFiles = await listPageFiles(packageDirs); + const pagesForMenu: any = { "All Lightning pages": "pages/index.md" } + WebSocketClient.sendProgressStartMessage("Generating Lightning Pages documentation...", pageFiles.length); + let counter = 0; + for (const pagefile of pageFiles) { + const pageName = path.basename(pagefile, ".flexipage-meta.xml"); + const mdFile = path.join(this.outputMarkdownRoot, "pages", pageName + ".md"); + pagesForMenu[pageName] = "pages/" + pageName + ".md"; + // Add Pages in documentation + const pageXml = await fs.readFile(pagefile, "utf8"); + const pageXmlParsed = new XMLParser().parse(pageXml); + this.pageDescriptions.push({ + name: pageName, + type: prettifyFieldName(pageXmlParsed?.FlexiPage?.type || "Unknown"), + impactedObjects: this.allObjectsNames.filter(objectName => pageXml.includes(`${objectName}`)) + }); + await new DocBuilderPage(pageName, pageXml, mdFile).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, pageFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + this.addNavNode("Lightning Pages", pagesForMenu); + + // Write index file for pages folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "pages")); + const pagesIndexFile = path.join(this.outputMarkdownRoot, "pages", "index.md"); + await fs.writeFile(pagesIndexFile, getMetaHideLines() + DocBuilderPage.buildIndexTable('', this.pageDescriptions).join("\n") + `\n\n${this.footer}\n`); + } + + private async generateProfilesDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Profiles documentation... (if you don't want it, define GENERATE_PROFILES_DOC=false in your environment variables)")); + const profilesForMenu: any = { "All Profiles": "profiles/index.md" }; + const profilesFiles = (await glob("**/profiles/**.profile-meta.xml", { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS })); + sortCrossPlatform(profilesFiles); + if (profilesFiles.length === 0) { + uxLog("log", this, c.yellow("No profile found in the project")); + return; + } + WebSocketClient.sendProgressStartMessage("Generating Profiles documentation...", profilesFiles.length); + let counter = 0; + for (const profileFile of profilesFiles) { + const profileName = path.basename(profileFile, ".profile-meta.xml"); + const mdFile = path.join(this.outputMarkdownRoot, "profiles", profileName + ".md"); + profilesForMenu[profileName] = "profiles/" + profileName + ".md"; + const profileXml = await fs.readFile(profileFile, "utf8"); + const profileXmlParsed = new XMLParser().parse(profileXml); + this.profileDescriptions.push({ + name: profileName, + userLicense: prettifyFieldName(profileXmlParsed?.Profile?.userLicense || "Unknown"), + impactedObjects: this.allObjectsNames.filter(objectName => profileXml.includes(`${objectName}`)) + }); + // Add Profiles code in documentation + await new DocBuilderProfile(profileName, profileXml, mdFile).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, profilesFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + this.addNavNode("Profiles", profilesForMenu); + // Write index file for profiles folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "profiles")); + const profilesIndexFile = path.join(this.outputMarkdownRoot, "profiles", "index.md"); + await fs.writeFile(profilesIndexFile, getMetaHideLines() + DocBuilderProfile.buildIndexTable('', this.profileDescriptions).join("\n") + `\n\n${this.footer}\n`); + } + + private async generatePermissionSetsDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Permission Sets documentation... (if you don't want it, define GENERATE_PROFILES_DOC=false in your environment variables)")); + const psForMenu: any = { "All Permission Sets": "permissionsets/index.md" }; + const psFiles = (await glob("**/permissionsets/**.permissionset-meta.xml", { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS })); + sortCrossPlatform(psFiles); + if (psFiles.length === 0) { + uxLog("log", this, c.yellow("No permission set found in the project")); + return; + } + WebSocketClient.sendProgressStartMessage("Generating Permission Sets documentation...", psFiles.length); + let counter = 0; + for (const psFile of psFiles) { + const psName = path.basename(psFile, ".permissionset-meta.xml"); + const mdFile = path.join(this.outputMarkdownRoot, "permissionsets", psName + ".md"); + psForMenu[psName] = "permissionsets/" + psName + ".md"; + const psXml = await fs.readFile(psFile, "utf8"); + const psXmlParsed = new XMLParser().parse(psXml); + this.permissionSetsDescriptions.push({ + name: psName, + userLicense: prettifyFieldName(psXmlParsed?.PermissionSet?.license || "Unknown"), + impactedObjects: this.allObjectsNames.filter(objectName => psXml.includes(`${objectName}`)) + }); + // Add Permission Sets code in documentation + await new DocBuilderPermissionSet(psName, psXml, mdFile).generateMarkdownFileFromXml(); + // Permission Set Groups Table + const relatedPsg = DocBuilderPermissionSetGroup.buildIndexTable('../permissionsetgroups/', this.permissionSetGroupsDescriptions, psName); + await replaceInFile(mdFile, '', relatedPsg.join("\n")); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, psFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + this.addNavNode("Permission Sets", psForMenu); + // Write index file for permission sets folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "permissionsets")); + const psIndexFile = path.join(this.outputMarkdownRoot, "permissionsets", "index.md"); + await fs.writeFile(psIndexFile, getMetaHideLines() + DocBuilderPermissionSet.buildIndexTable('', this.permissionSetsDescriptions).join("\n") + `\n\n${this.footer}\n`); + } + + private async generatePermissionSetGroupsDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Permission Set Groups documentation...")); + const psgForMenu: any = { "All Permission Set Groups": "permissionsetgroups/index.md" }; + const psgFiles = (await glob("**/permissionsetgroups/**.permissionsetgroup-meta.xml", { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS })) + sortCrossPlatform(psgFiles); + if (psgFiles.length === 0) { + uxLog("log", this, c.yellow("No permission set group found in the project")); + return; + } + WebSocketClient.sendProgressStartMessage("Generating Permission Set Groups documentation...", psgFiles.length); + let counter = 0; + for (const psgFile of psgFiles) { + const psgName = path.basename(psgFile, ".permissionsetgroup-meta.xml"); + const mdFile = path.join(this.outputMarkdownRoot, "permissionsetgroups", psgName + ".md"); + psgForMenu[psgName] = "permissionsetgroups/" + psgName + ".md"; + const psgXml = await fs.readFile(psgFile, "utf8"); + const psgXmlParsed = new XMLParser().parse(psgXml); + let permissionSets = psgXmlParsed?.PermissionSetGroup?.permissionSets || []; + if (!Array.isArray(permissionSets)) { + permissionSets = [permissionSets]; + } + this.permissionSetGroupsDescriptions.push({ + name: psgName, + description: psgXmlParsed?.PermissionSetGroup?.description || "None", + relatedPermissionSets: permissionSets, + }); + await new DocBuilderPermissionSetGroup(psgName, psgXml, mdFile).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, psgFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + this.addNavNode("Permission Set Groups", psgForMenu); + + // Write index file for permission set groups folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "permissionsetgroups")); + const psgIndexFile = path.join(this.outputMarkdownRoot, "permissionsetgroups", "index.md"); + await fs.writeFile(psgIndexFile, getMetaHideLines() + DocBuilderPermissionSetGroup.buildIndexTable('', this.permissionSetGroupsDescriptions).join("\n") + `\n${this.footer}\n`); + } + + private async generateRolesDocumentation() { + uxLog("action", this, c.cyan("Generating Roles documentation... (if you don't want it, define GENERATE_PROFILES_DOC=false in your environment variables)")); + const roleFiles = (await glob("**/roles/**.role-meta.xml", { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS })); + sortCrossPlatform(roleFiles); + if (roleFiles.length === 0) { + uxLog("log", this, c.yellow("No role found in the project")); + return; + } + for (const roleFile of roleFiles) { + const roleApiName = path.basename(roleFile, ".role-meta.xml"); + const roleXml = await fs.readFile(roleFile, "utf8"); + const roleXmlParsed = new XMLParser().parse(roleXml); + // build object with all XML root tags + const roleInfo = { apiName: roleApiName }; + for (const roleAttribute of Object.keys(roleXmlParsed?.Role || {})) { + roleInfo[roleAttribute] = roleXmlParsed?.Role[roleAttribute] || ""; + } + + this.roleDescriptions.push(roleInfo); + } + this.addNavNode("Roles", "roles.md"); + + // Add Roles documentation + const rolesIndexFile = path.join(this.outputMarkdownRoot, "roles.md"); + await DocBuilderRoles.generateMarkdownFileFromRoles(this.roleDescriptions, rolesIndexFile); + if (this.withPdf) { + await generatePdfFileFromMarkdown(rolesIndexFile); + } + } + + + private async generateAssignmentRulesDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Assignment Rules documentation... " + + "(if you don't want it, define GENERATE_AUTOMATIONS_DOC=false in your environment variables)")); + + const assignmentRulesForMenu: any = { "All Assignment Rules": "assignmentRules/index.md" }; + const assignmentRulesFiles = (await glob("**/assignmentRules/**.assignmentRules-meta.xml", { + cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS + })); + sortCrossPlatform(assignmentRulesFiles); + const builder = new XMLBuilder(); + + // Count total rules for progress tracking + let totalRules = 0; + for (const assignmentRulesFile of assignmentRulesFiles) { + const assignmentRulesXml = await fs.readFile(assignmentRulesFile, "utf8"); + const assignmentRulesXmlParsed = new XMLParser().parse(assignmentRulesXml); + let rulesList = assignmentRulesXmlParsed?.AssignmentRules?.assignmentRule || []; + if (!Array.isArray(rulesList)) { + rulesList = [rulesList]; + } + totalRules += rulesList.length; + } + + if (totalRules === 0) { + uxLog("log", this, c.yellow("No assignment rule found in the project")); + return; + } + + WebSocketClient.sendProgressStartMessage("Generating Assignment Rules documentation...", totalRules); + let counter = 0; + for (const assignmentRulesFile of assignmentRulesFiles) { + + const assignmentRulesXml = await fs.readFile(assignmentRulesFile, "utf8"); + const assignmentRulesXmlParsed = new XMLParser().parse(assignmentRulesXml); + + const assignmentRulesName = path.basename(assignmentRulesFile, ".assignmentRules-meta.xml"); + // parsing one singe XML file with all the Assignment Rules per object: + let rulesList = assignmentRulesXmlParsed?.AssignmentRules?.assignmentRule || []; + if (!Array.isArray(rulesList)) { + rulesList = [rulesList]; + } + + for (const rule of rulesList) { + const currentRuleName = assignmentRulesName + "." + rule?.fullName; + assignmentRulesForMenu[currentRuleName] = "assignmentRules/" + currentRuleName + ".md"; + const mdFile = path.join(this.outputMarkdownRoot, "assignmentRules", currentRuleName + ".md"); + + this.assignmentRulesDescriptions.push({ + name: currentRuleName, + active: rule.active, + }); + + const ruleXml = builder.build({ assignmentRule: rule }); + + await new DocBuilderAssignmentRules(currentRuleName, ruleXml, mdFile).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, totalRules); + } + } + WebSocketClient.sendProgressEndMessage(); + + this.addNavNode("Assignment Rules", assignmentRulesForMenu); + + await fs.ensureDir(path.join(this.outputMarkdownRoot, "assignmentRules")); + const psgIndexFile = path.join(this.outputMarkdownRoot, "assignmentRules", "index.md"); + await fs.writeFile(psgIndexFile, getMetaHideLines() + DocBuilderAssignmentRules.buildIndexTable('', this.assignmentRulesDescriptions).join("\n") + `\n${this.footer}\n`); + } + + private async generateApprovalProcessDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Approval Processes documentation... " + + "(if you don't want it, define GENERATE_AUTOMATIONS_DOC=false in your environment variables)")); + + const approvalProcessesForMenu: any = { "All Approval Processes": "approvalProcesses/index.md" } + const approvalProcessFiles = (await glob("**/approvalProcesses/**.approvalProcess-meta.xml", { + cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS + })); + sortCrossPlatform(approvalProcessFiles); + + if (approvalProcessFiles.length === 0) { + uxLog("log", this, c.yellow("No approval process found in the project")); + return; + } + WebSocketClient.sendProgressStartMessage("Generating Approval Processes documentation...", approvalProcessFiles.length); + let counter = 0; + for (const approvalProcessFile of approvalProcessFiles) { + const approvalProcessName = path.basename(approvalProcessFile, ".approvalProcess-meta.xml"); + const mdFile = path.join(this.outputMarkdownRoot, "approvalProcesses", approvalProcessName + ".md"); + + approvalProcessesForMenu[approvalProcessName] = "approvalProcesses/" + approvalProcessName + ".md"; + const approvalProcessXml = await fs.readFile(approvalProcessFile, "utf8"); + + const approvalProcessXmlParsed = new XMLParser().parse(approvalProcessXml); + this.approvalProcessesDescriptions.push({ + name: approvalProcessName, + active: approvalProcessXmlParsed?.ApprovalProcess?.active, + impactedObjects: this.allObjectsNames.filter(objectName => approvalProcessXml.includes(`${objectName}`)) + }); + + await new DocBuilderApprovalProcess(approvalProcessName, approvalProcessXml, mdFile).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, approvalProcessFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + + this.addNavNode("Approval Processes", approvalProcessesForMenu); + await fs.ensureDir(path.join(this.outputMarkdownRoot, "approvalProcesses")); + const approvalProcessesIndexFile = path.join(this.outputMarkdownRoot, "approvalProcesses", "index.md"); + await fs.writeFile(approvalProcessesIndexFile, getMetaHideLines() + DocBuilderApprovalProcess.buildIndexTable('', this.approvalProcessesDescriptions).join("\n") + `\n\n${this.footer}\n`); + } + + private async generateAutoResponseRulesDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of AutoResponse Rules documentation... " + + "(if you don't want it, define GENERATE_AUTOMATIONS_DOC=false in your environment variables)")); + + const autoResponseRulesForMenu: any = { "All AutoResponse Rules": "autoResponseRules/index.md" }; + const autoResponseRulesFiles = (await glob("**/autoResponseRules/**.autoResponseRules-meta.xml", { + cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS + })); + sortCrossPlatform(autoResponseRulesFiles); + const builder = new XMLBuilder(); + // Count total rules for progress tracking + let totalRules = 0; + for (const autoResponseRulesFile of autoResponseRulesFiles) { + const autoResponseRulesXml = await fs.readFile(autoResponseRulesFile, "utf8"); + const autoResponseRulesXmlParsed = new XMLParser().parse(autoResponseRulesXml); + let rulesList = autoResponseRulesXmlParsed?.AutoResponseRules?.autoResponseRule || []; + if (!Array.isArray(rulesList)) { + rulesList = [rulesList]; + } + totalRules += rulesList.length; + } + + if (totalRules === 0) { + uxLog("log", this, c.yellow("No auto-response rules found in the project")); + return; + } + + WebSocketClient.sendProgressStartMessage("Generating AutoResponse Rules documentation...", totalRules); + let counter = 0; + for (const autoResponseRulesFile of autoResponseRulesFiles) { + + const autoResponseRulesXml = await fs.readFile(autoResponseRulesFile, "utf8"); + const autoResponseRulesXmlParsed = new XMLParser().parse(autoResponseRulesXml); + + const autoResponseRulesName = path.basename(autoResponseRulesFile, ".autoResponseRules-meta.xml"); + + // parsing one single XML file with all the AutoResponse Rules per object: + let rulesList = autoResponseRulesXmlParsed?.AutoResponseRules?.autoResponseRule || []; + if (!Array.isArray(rulesList)) { + rulesList = [rulesList]; + } + + for (const rule of rulesList) { + const currentRuleName = autoResponseRulesName + "." + rule?.fullName; + autoResponseRulesForMenu[currentRuleName] = "autoResponseRules/" + currentRuleName + ".md"; + const mdFile = path.join(this.outputMarkdownRoot, "autoResponseRules", currentRuleName + ".md"); + + this.autoResponseRulesDescriptions.push({ + name: currentRuleName, + active: rule.active, + }); + + const ruleXml = builder.build({ autoResponseRule: rule }); + + await new DocBuilderAutoResponseRules(currentRuleName, ruleXml, mdFile).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, totalRules); + } + } + WebSocketClient.sendProgressEndMessage(); + this.addNavNode("AutoResponse Rules", autoResponseRulesForMenu); + + // Write index file for permission set groups folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "autoResponseRules")); + const psgIndexFile = path.join(this.outputMarkdownRoot, "autoResponseRules", "index.md"); + await fs.writeFile(psgIndexFile, getMetaHideLines() + DocBuilderAutoResponseRules.buildIndexTable('', this.autoResponseRulesDescriptions).join("\n") + `\n${this.footer}\n`); + } + + private async generateEscalationRulesDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Escalation Rules documentation... " + + "(if you don't want it, define GENERATE_AUTOMATIONS_DOC=false in your environment variables)")); + + const escalationRulesForMenu: any = { "All Escalation Rules": "escalationRules/index.md" }; + const escalationRulesFiles = (await glob("**/escalationRules/**.escalationRules-meta.xml", { + cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS + })); + sortCrossPlatform(escalationRulesFiles); + const builder = new XMLBuilder(); + + // Count total rules for progress tracking + let totalRules = 0; + for (const escalationRulesFile of escalationRulesFiles) { + const escalationRulesXml = await fs.readFile(escalationRulesFile, "utf8"); + const escalationRulesXmlParsed = new XMLParser().parse(escalationRulesXml); + let rulesList = escalationRulesXmlParsed?.EscalationRules?.escalationRule || []; + if (!Array.isArray(rulesList)) { + rulesList = [rulesList]; + } + totalRules += rulesList.length; + } + + if (totalRules === 0) { + uxLog("log", this, c.yellow("No escalation rules found in the project")); + return; + } + + WebSocketClient.sendProgressStartMessage("Generating Escalation Rules documentation...", totalRules); + + let counter = 0; + for (const escalationRulesFile of escalationRulesFiles) { + + const escalationRulesXml = await fs.readFile(escalationRulesFile, "utf8"); + const escalationRulesXmlParsed = new XMLParser().parse(escalationRulesXml); + + const escalationRulesName = path.basename(escalationRulesFile, ".escalationRules-meta.xml"); + + // parsing one singe XML file with all the Escalation Rules for Case: + let rulesList = escalationRulesXmlParsed?.EscalationRules?.escalationRule || []; + if (!Array.isArray(rulesList)) { + rulesList = [rulesList]; + } + + for (const rule of rulesList) { + counter++; + WebSocketClient.sendProgressStepMessage(counter); + + const currentRuleName = escalationRulesName + "." + rule?.fullName; + escalationRulesForMenu[currentRuleName] = "escalationRules/" + currentRuleName + ".md"; + const mdFile = path.join(this.outputMarkdownRoot, "escalationRules", currentRuleName + ".md"); + + this.escalationRulesDescriptions.push({ + name: currentRuleName, + active: rule.active, + }); + + const ruleXml = builder.build({ escalationRule: rule }); + + await new DocBuilderEscalationRules(currentRuleName, ruleXml, mdFile).generateMarkdownFileFromXml(); + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + } + } + + WebSocketClient.sendProgressEndMessage(); + + this.addNavNode("Escalation Rules", escalationRulesForMenu); + + await fs.ensureDir(path.join(this.outputMarkdownRoot, "escalationRules")); + const psgIndexFile = path.join(this.outputMarkdownRoot, "escalationRules", "index.md"); + await fs.writeFile(psgIndexFile, getMetaHideLines() + DocBuilderEscalationRules.buildIndexTable('', this.escalationRulesDescriptions).join("\n") + `\n${this.footer}\n`); + } + + private async buildMkDocsYml() { + // Copy default files (mkdocs.yml and other files can be updated by the SF Cli plugin developer later) + const mkdocsYmlFile = path.join(process.cwd(), 'mkdocs.yml'); + const mkdocsYmlFileExists = fs.existsSync(mkdocsYmlFile); + await fs.copy(path.join(PACKAGE_ROOT_DIR, 'defaults/mkdocs-project-doc', '.'), process.cwd(), { overwrite: false }); + if (!mkdocsYmlFileExists) { + uxLog("log", this, c.grey('Base mkdocs files copied in your Salesforce project repo')); + uxLog( + "warning", + this, + c.yellow( + 'You should probably manually update mkdocs.yml to add your own configuration, like theme, site_name, etc.' + ) + ); + } + // Update mkdocs nav items + const mkdocsYml: any = readMkDocsFile(mkdocsYmlFile); + + for (const navMenu of this.mkDocsNavNodes) { + let pos = 0; + let found = false; + for (const navItem of mkdocsYml.nav) { + if (Object.keys(navItem)[0] === Object.keys(navMenu)[0]) { + found = true; + break; + } + pos++; + } + if (found) { + mkdocsYml.nav[pos] = navMenu; + } else { + mkdocsYml.nav.push(navMenu); + } + } + // Add missing javascripts if necessary + const allJavascripts = [ + "https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.4/jquery.min.js", + "https://cdnjs.cloudflare.com/ajax/libs/jstree/3.3.12/jstree.min.js", + "https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/tablesort.min.js", + "javascripts/tables.js", + "javascripts/gtag.js", + "javascripts/jstree-handler.js" + ]; + const extraJavascript = mkdocsYml.extra_javascript || []; + for (const jsItem of allJavascripts) { + if (!extraJavascript.includes(jsItem)) { + extraJavascript.push(jsItem); + } + } + mkdocsYml.extra_javascript = extraJavascript; + + // Add missing CSS if necessary + const allCss = [ + "https://cdnjs.cloudflare.com/ajax/libs/jstree/3.3.12/themes/default/style.min.css", + "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.7.2/css/all.min.css", + "stylesheets/extra.css", + "stylesheets/jstree-custom.css" + ]; + const extraCss = mkdocsYml.extra_css || []; + for (const cssItem of allCss) { + if (!extraCss.includes(cssItem)) { + extraCss.push(cssItem); + } + } + mkdocsYml.extra_css = extraCss; + + // Add missing plugin config if necessary + if (!mkdocsYml.plugins) { + mkdocsYml.plugins = [ + 'search', + { + 'exclude-search': { + 'exclude': [ + "index.md", + "cache-ai-results/*.md", + "*package.xml.md", + "package-*items.xml.md" + ] + } + } + ] + } + + // Remove deprecated items if found + mkdocsYml.nav = mkdocsYml.nav.filter(navItem => !navItem["Flows History"]); + mkdocsYml.nav = mkdocsYml.nav.filter(navItem => !navItem["Installed Packages"]); + + // Add root menus + const rootSections = [ + { menu: "Automations", subMenus: ["Approval Processes", "Assignment Rules", "AutoResponse Rules", "Escalation Rules", "Flows"] }, + { menu: "Authorizations", subMenus: ["Profiles", "Permission Set Groups", "Permission Sets"] }, + { menu: "Code", subMenus: ["Apex", "Lightning Web Components"] }, + ]; + for (const rootSection of rootSections) { + const navSubmenus: any[] = []; + for (const subMenu of rootSection.subMenus) { + // Find submenu + const subMenuContent = mkdocsYml.nav.find(navItem => Object.keys(navItem)[0] === subMenu); + if (subMenuContent) { + navSubmenus.push(subMenuContent); + } + // Remove sub menus from root menus + mkdocsYml.nav = mkdocsYml.nav.filter(navItem => !navItem[subMenu]); + } + // Check if rootSection.menu already exists in nav + const existingRootMenuIndex = mkdocsYml.nav.findIndex(navItem => Object.keys(navItem)[0] === rootSection.menu); + if (existingRootMenuIndex > -1) { + // Append new submenus to existing root menu + const existingSubMenus = mkdocsYml.nav[existingRootMenuIndex][rootSection.menu]; + const uniqueSubMenus = new Map(); + for (const item of [...existingSubMenus, ...navSubmenus]) { + const key = Object.keys(item)[0]; + if (!uniqueSubMenus.has(key) || navSubmenus.some(navItem => Object.keys(navItem)[0] === key)) { + uniqueSubMenus.set(key, item); + } + } + mkdocsYml.nav[existingRootMenuIndex][rootSection.menu] = Array.from(uniqueSubMenus.values()).sort((a, b) => { + const keyA = Object.keys(a)[0].toLowerCase(); + const keyB = Object.keys(b)[0].toLowerCase(); + return keyA.localeCompare(keyB, 'en', { sensitivity: 'base' }); + }); + } + else { + // Add root menu with submenus + mkdocsYml.nav.push({ [rootSection.menu]: navSubmenus }); + } + } + + // Order nav items with this elements in first + const firstItemsInOrder = [ + "Home", + // "Object Model", + "Objects", + "Automations", + "Authorizations", + "Code", + "Lightning Pages", + "Packages", + "Roles", + "SFDX-Hardis Config", + "Branches & Orgs", + "Manifests" + ]; + mkdocsYml.nav = firstItemsInOrder.map(item => mkdocsYml.nav.find(navItem => Object.keys(navItem)[0] === item)).filter(item => item).concat(mkdocsYml.nav.filter(navItem => !firstItemsInOrder.includes(Object.keys(navItem)[0]))); + + + // Update mkdocs file + await writeMkDocsFile(mkdocsYmlFile, mkdocsYml); + uxLog("action", this, c.cyan(`To generate a HTML WebSite with this documentation with a single command, see instructions at ${CONSTANTS.DOC_URL_ROOT}/hardis/doc/project2markdown/`)); + } + + private async generateObjectsDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Objects AI documentation... (if you don't want it, define GENERATE_OBJECTS_DOC=false in your environment variables)")); + + const objectLinksInfo = await this.generateLinksInfo(); + const objectsForMenu: any = { "All objects": "objects/index.md" } + await fs.ensureDir(path.join(this.outputMarkdownRoot, "objects")); + WebSocketClient.sendProgressStartMessage("Generating Objects documentation...", this.objectFiles.length); + let counter = 0; + for (const objectFile of this.objectFiles) { + const objectName = path.basename(objectFile, ".object"); + if ((objectName.endsWith("__dlm") || objectName.endsWith("__dll")) && !(process.env?.INCLUDE_DATA_CLOUD_DOC === "true")) { + uxLog("log", this, c.grey(`Skip Data Cloud Object ${objectName}... (use INCLUDE_DATA_CLOUD_DOC=true to enforce it)`)); + counter++; + WebSocketClient.sendProgressStepMessage(counter, this.objectFiles.length); + continue; + } + uxLog("log", this, c.grey(`Generating markdown for Object ${objectName}...`)); + const objectXml = (await fs.readFile(path.join(this.tempDir, objectFile), "utf8")).toString(); + const objectMdFile = path.join(this.outputMarkdownRoot, "objects", objectName + ".md"); + // Build filtered XML + const objectXmlParsed = new XMLParser().parse(objectXml); + // Main AI markdown + await new DocBuilderObject( + objectName, + objectXml, + objectMdFile, { + "ALL_OBJECTS_LIST": this.allObjectsNames.join(","), + "ALL_OBJECT_LINKS": objectLinksInfo + }).generateMarkdownFileFromXml(); + // Fields table + await this.buildAttributesTables(objectName, objectXmlParsed, objectMdFile); + // Mermaid schema + const mermaidSchema = await new ObjectModelBuilder(objectName).buildObjectsMermaidSchema(); + await replaceInFile(objectMdFile, '', '## Schema\n\n```mermaid\n' + mermaidSchema + '\n```\n'); + if (this.withPdf) { + /** Regenerate using Mermaid CLI to convert Mermaid code into SVG */ + await generateMarkdownFileWithMermaid(objectMdFile, objectMdFile, null, true); + } + // Flows Table + const relatedObjectFlowsTable = DocBuilderFlow.buildIndexTable('../flows/', this.flowDescriptions, this.outputMarkdownRoot, objectName); + await replaceInFile(objectMdFile, '', relatedObjectFlowsTable.join("\n")); + // Apex Table + const relatedApexTable = DocBuilderApex.buildIndexTable('../apex/', this.apexDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedApexTable.join("\n")); + // Lightning Pages table + const relatedPages = DocBuilderPage.buildIndexTable('../pages/', this.pageDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedPages.join("\n")); + // Add Profiles table + const relatedProfilesTable = DocBuilderProfile.buildIndexTable('../profiles/', this.profileDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedProfilesTable.join("\n")); + // Add Permission Sets table + const relatedPermissionSetsTable = DocBuilderPermissionSet.buildIndexTable('../permissionsets/', this.permissionSetsDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedPermissionSetsTable.join("\n")); + // Add Approval Processes table + const relatedApprovalProcessTable = DocBuilderApprovalProcess.buildIndexTable('../approvalProcesses/', this.approvalProcessesDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedApprovalProcessTable.join("\n")); + // Assignment Rules table + const relatedAssignmentRulesTable = DocBuilderAssignmentRules.buildIndexTable('../assignmentRules/', this.assignmentRulesDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedAssignmentRulesTable.join("\n")); + // AutoResponse Rules table + const relatedAutoResponseRulesTable = DocBuilderAutoResponseRules.buildIndexTable('../autoResponseRules/', this.autoResponseRulesDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedAutoResponseRulesTable.join("\n")); + // Escalation Rules table + const relatedEscalationRulesTable = DocBuilderEscalationRules.buildIndexTable('../escalationRules/', this.escalationRulesDescriptions, objectName); + await replaceInFile(objectMdFile, '', relatedEscalationRulesTable.join("\n")); + + this.objectDescriptions.push({ + name: objectName, + label: objectXmlParsed?.CustomObject?.label || "", + description: objectXmlParsed?.CustomObject?.description || "", + }); + objectsForMenu[objectName] = "objects/" + objectName + ".md"; + if (this.withPdf) { + await generatePdfFileFromMarkdown(objectMdFile); + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, this.objectFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + this.addNavNode("Objects", objectsForMenu); + + // Write index file for objects folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "objects")); + const objectsTableLinesForIndex = DocBuilderObject.buildIndexTable('', this.objectDescriptions); + const objectsIndexFile = path.join(this.outputMarkdownRoot, "objects", "index.md"); + await fs.writeFile(objectsIndexFile, getMetaHideLines() + objectsTableLinesForIndex.join("\n") + `\n${this.footer}\n`); + } + + private async buildAttributesTables(objectName: string, objectXmlParsed: any, objectMdFile: string) { + const fieldsTable = DocBuilderObject.buildCustomFieldsTable(objectXmlParsed?.CustomObject?.fields || []); + const validationRulesTable = DocBuilderObject.buildValidationRulesTable(objectXmlParsed?.CustomObject?.validationRules || []); + const attributesLines = [...fieldsTable, ...validationRulesTable]; + const attributesMarkdown = await completeAttributesDescriptionWithAi(attributesLines.join("\n"), objectName) + await replaceInFile(objectMdFile, '', attributesMarkdown); + } + + private async generateLinksInfo(): Promise { + uxLog("log", this, c.cyan("Generate MasterDetail and Lookup infos to provide context to AI prompt")); + const findFieldsPattern = `**/objects/**/fields/**.field-meta.xml`; + const matchingFieldFiles = (await glob(findFieldsPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS })).map(file => file.replace(/\\/g, '/')); + const customFieldsLinks: string[] = []; + for (const fieldFile of matchingFieldFiles) { + const fieldXml = fs.readFileSync(fieldFile, "utf8").toString(); + const fieldDetail = new XMLParser().parse(fieldXml); + if (fieldDetail?.CustomField?.type === "MasterDetail" || fieldDetail?.CustomField?.type === "Lookup") { + const fieldName = path.basename(fieldFile, ".field-meta.xml"); + const objectName = fieldFile.substring(fieldFile.indexOf('objects/')).split("/")[1]; + const linkDescription = `- ${fieldDetail.CustomField.type} field "${fieldName}" defined on object ${objectName}, with target object reference to ${fieldDetail.CustomField.referenceTo} (relationship name: "${fieldDetail.CustomField.relationshipName}", label: "${fieldDetail.CustomField.label}", description: "${fieldDetail.CustomField?.description || ''}")`; + customFieldsLinks.push(linkDescription); + } + } + return customFieldsLinks.join("\n") + "\n"; + } + + private async generateFlowsDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Flows Visual documentation... (if you don't want it, define GENERATE_FLOW_DOC=false in your environment variables)")); + const flowsForMenu: any = { "All flows": "flows/index.md" } + await fs.ensureDir(path.join(this.outputMarkdownRoot, "flows")); + const packageDirs = this.project?.getPackageDirectories(); + const updatedFlowNames = !this.diffOnly ? + [] : + (await MetadataUtils.listChangedOrFromCurrentCommitFiles()).filter(f => f?.path?.endsWith(".flow-meta.xml")).map(f => path.basename(f.path, ".flow-meta.xml")); + const flowFiles = await listFlowFiles(packageDirs); + const flowErrors: string[] = []; + const flowWarnings: string[] = []; + const flowSkips: string[] = []; + + // List flows dependencies + const flowDeps: any = {}; + for (const flowFile of flowFiles) { + const flowName = path.basename(flowFile, ".flow-meta.xml"); + const flowXml = (await fs.readFile(flowFile, "utf8")).toString(); + // Find all occurences of .* in flowXml + const regex = /(.*?)<\/flowName>/g; + const extractedNames = [...flowXml.matchAll(regex)].map(match => match[1]); + flowDeps[flowName] = extractedNames; + } + if (flowFiles.length === 0) { + uxLog("log", this, c.yellow("No flow found in the project")); + return; + } + // Generate Flows documentation + WebSocketClient.sendProgressStartMessage("Generating Flows documentation...", flowFiles.length); + let counter = 0; + for (const flowFile of flowFiles) { + const flowName = path.basename(flowFile, ".flow-meta.xml"); + const flowXml = (await fs.readFile(flowFile, "utf8")).toString(); + const flowContent = await parseXmlFile(flowFile); + this.flowDescriptions.push({ + name: flowName, + description: flowContent?.Flow?.description?.[0] || "", + type: flowContent?.Flow?.processType?.[0] === "Flow" ? "ScreenFlow" : flowContent?.Flow?.start?.[0]?.triggerType?.[0] ?? (flowContent?.Flow?.processType?.[0] || "ERROR (Unknown)"), + object: flowContent?.Flow?.start?.[0]?.object?.[0] || flowContent?.Flow?.processMetadataValues?.filter(pmv => pmv.name[0] === "ObjectType")?.[0]?.value?.[0]?.stringValue?.[0] || "", + impactedObjects: this.allObjectsNames.filter(objectName => flowXml.includes(`>${objectName}<`)) + }); + flowsForMenu[flowName] = "flows/" + flowName + ".md"; + const outputFlowMdFile = path.join(this.outputMarkdownRoot, "flows", flowName + ".md"); + if (this.diffOnly && !updatedFlowNames.includes(flowName) && fs.existsSync(outputFlowMdFile)) { + flowSkips.push(flowFile); + counter++; + WebSocketClient.sendProgressStepMessage(counter, flowFiles.length); + continue; + } + uxLog("log", this, c.grey(`Generating markdown for Flow ${flowFile}...`)); + const genRes = await generateFlowMarkdownFile(flowName, flowXml, outputFlowMdFile, { collapsedDetails: false, describeWithAi: true, flowDependencies: flowDeps }); + if (!genRes) { + flowErrors.push(flowFile); + counter++; + WebSocketClient.sendProgressStepMessage(counter, flowFiles.length); + continue; + } + if (this.debugMode) { + await fs.copyFile(outputFlowMdFile, outputFlowMdFile.replace(".md", ".mermaid.md")); + } + const gen2res = await generateMarkdownFileWithMermaid(outputFlowMdFile, outputFlowMdFile, null, this.withPdf); + if (!gen2res) { + flowWarnings.push(flowFile); + counter++; + WebSocketClient.sendProgressStepMessage(counter, flowFiles.length); + continue; + } + counter++; + WebSocketClient.sendProgressStepMessage(counter, flowFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + this.flowDescriptions = sortArray(this.flowDescriptions, { by: ['object', 'name'], order: ['asc', 'asc'] }) as any[] + + // History + if (this.withHistory) { + WebSocketClient.sendProgressStartMessage("Generating Flows History documentation...", flowFiles.length); + let counter1 = 0; + for (const flowFile of flowFiles) { + const flowName = path.basename(flowFile, ".flow-meta.xml"); + const diffMdFile = path.join("docs", "flows", path.basename(flowFile).replace(".flow-meta.xml", "-history.md")); + if (this.diffOnly && !updatedFlowNames.includes(flowName) && fs.existsSync(diffMdFile)) { + counter1++; + WebSocketClient.sendProgressStepMessage(counter1, flowFiles.length); + continue; + } + try { + await generateHistoryDiffMarkdown(flowFile, this.debugMode); + } catch (e: any) { + uxLog("warning", this, c.yellow(`Error generating history diff markdown: ${e.message}`)); + } + counter1++; + WebSocketClient.sendProgressStepMessage(counter1, flowFiles.length); + } + WebSocketClient.sendProgressEndMessage(); + } + + // Summary + if (flowSkips.length > 0) { + uxLog("warning", this, c.yellow(`Skipped generation for ${flowSkips.length} Flows that have not been updated: ${this.humanDisplay(flowSkips)}`)); + } + uxLog("success", this, c.green(`Successfully generated ${flowFiles.length - flowSkips.length - flowWarnings.length - flowErrors.length} Flows documentation`)); + if (flowWarnings.length > 0) { + uxLog("warning", this, c.yellow(`Partially generated documentation (Markdown with mermaidJs but without SVG) for ${flowWarnings.length} Flows: ${this.humanDisplay(flowWarnings)}`)); + } + if (flowErrors.length > 0) { + uxLog("warning", this, c.yellow(`Error generating documentation for ${flowErrors.length} Flows: ${this.humanDisplay(flowErrors)}`)); + } + + // Write index file for flow folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "flows")); + const flowTableLinesForIndex = DocBuilderFlow.buildIndexTable('', this.flowDescriptions, this.outputMarkdownRoot); + const flowIndexFile = path.join(this.outputMarkdownRoot, "flows", "index.md"); + await fs.writeFile(flowIndexFile, getMetaHideLines() + flowTableLinesForIndex.join("\n") + `\n${this.footer}\n`); + + this.addNavNode("Flows", flowsForMenu); + uxLog("success", this, c.green(`Successfully generated doc index for Flows at ${flowIndexFile}`)); + } + + private humanDisplay(flows) { + return flows.map(flow => path.basename(flow, ".flow-meta.xml")).join(", "); + } + + private buildSfdxHardisParams(): string[] { + const sfdxParamsTableLines: string[] = []; + sfdxParamsTableLines.push(...[ + `## ${this.sfdxHardisConfig?.projectName?.toUpperCase() || "SFDX Project"} CI/CD configuration`, + ""]); + sfdxParamsTableLines.push(...[ + "| Sfdx-hardis Parameter | Value | Description & doc link |", + "| :--------- | :---- | :---------- |" + ]); + const installPackagesDuringCheckDeploy = this.sfdxHardisConfig?.installPackagesDuringCheckDeploy ?? false; + sfdxParamsTableLines.push(`| Install Packages During Check Deploy | ${bool2emoji(installPackagesDuringCheckDeploy)} | [Install 1GP & 2GP packages during deployment check CI/CD job](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/#packages-installation) |`); + const useDeltaDeployment = this.sfdxHardisConfig?.useDeltaDeployment ?? false; + sfdxParamsTableLines.push(`| Use Delta Deployment | ${bool2emoji(useDeltaDeployment)} | [Deploys only updated metadatas , only when a MR/PR is from a minor branch to a major branch](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-delta-deployment/#delta-mode) |`); + const useSmartDeploymentTests = this.sfdxHardisConfig?.useSmartDeploymentTests ?? false; + sfdxParamsTableLines.push(`| Use Smart Deployment Tests | ${bool2emoji(useSmartDeploymentTests)} | [Skip Apex test cases if delta metadatas can not impact them, only when a MR/PR is from a minor branch to a major branch](https://sfdx-hardis.cloudity.com/hardis/project/deploy/smart/#smart-deployments-tests) |`); + sfdxParamsTableLines.push(""); + sfdxParamsTableLines.push("___"); + sfdxParamsTableLines.push(""); + return sfdxParamsTableLines; + } + + private async buildMajorBranchesAndOrgs() { + const branchesOrgsLines: string[] = []; + const majorOrgs = await listMajorOrgs(); + if (majorOrgs.length > 0) { + + branchesOrgsLines.push(...[ + "## Branches & Orgs strategy", + "", + ]); + const mermaidLines = new BranchStrategyMermaidBuilder(majorOrgs).build({ withMermaidTag: true, format: "list" }); + branchesOrgsLines.push(...mermaidLines); + + branchesOrgsLines.push(...[ + "", + "| Git branch | Salesforce Org | Deployment Username |", + "| :--------- | :------------- | :------------------ |" + ]); + for (const majorOrg of majorOrgs) { + const majorOrgLine = `| ${majorOrg.branchName} | ${majorOrg.instanceUrl} | ${majorOrg.targetUsername} |`; + branchesOrgsLines.push(majorOrgLine); + } + branchesOrgsLines.push(""); + branchesOrgsLines.push("___"); + branchesOrgsLines.push(""); + } + return branchesOrgsLines; + } + + private async manageLocalPackages() { + uxLog("action", this, c.cyan("Generating package.xml files for local packages...")); + const packageDirs = this.project?.getPackageDirectories(); + if (!(packageDirs?.length === 1 && packageDirs[0].name === "force-app" && fs.existsSync("manifest/package.xml"))) { + for (const packageDir of packageDirs || []) { + // Generate manifest from package folder + const packageManifestFile = path.join("manifest", packageDir.name + '-package.xml'); + await fs.ensureDir(path.dirname(packageManifestFile)); + try { + await execSfdxJson("sf project generate manifest" + + ` --source-dir ${packageDir.path}` + + ` --name ${packageManifestFile}`, this, + { + fail: true, + output: true, + debug: this.debugMode, + } + ); + // Add package in available packages list + this.packageXmlCandidates.push({ + path: packageManifestFile, + name: packageDir.name, + description: `Package.xml generated from content of SFDX package ${packageDir.name} (folder ${packageDir.path})` + }); + } + catch (e: any) { + uxLog("error", this, c.red(`Unable to generate manifest from ${packageDir.path}: it won't appear in the documentation\n${e.message}`)) + } + } + } + } + + private addNavNode(nodeName, nodeValue) { + const nodeIndex = this.mkDocsNavNodes.findIndex(navNode => Object.keys(navNode)[0] === nodeName); + if (nodeIndex > -1) { + this.mkDocsNavNodes[nodeIndex][nodeName] = nodeValue; + } + else { + const nodeMenu = {}; + nodeMenu[nodeName] = nodeValue; + this.mkDocsNavNodes.push(nodeMenu); + } + } + + private async generatePackageXmlMarkdown(packageXmlCandidates, instanceUrl) { + uxLog("action", this, c.cyan("Generating package.xml documentation...")); + // Generate packageXml doc when found + for (const packageXmlCandidate of packageXmlCandidates) { + if (fs.existsSync(packageXmlCandidate.path)) { + // Generate markdown for package.xml + const packageMarkdownFile = await DocBuilderPackageXML.generatePackageXmlMarkdown(packageXmlCandidate.path, null, packageXmlCandidate, instanceUrl); + // Open file in a new VsCode tab if available + WebSocketClient.requestOpenFile(packageMarkdownFile); + packageXmlCandidate.markdownFile = packageMarkdownFile; + this.outputPackageXmlMarkdownFiles.push(packageXmlCandidate); + } + } + } + + private async generateLwcDocumentation() { + uxLog("action", this, c.cyan("Preparing generation of Lightning Web Components documentation... " + + "(if you don't want it, define GENERATE_LWC_DOC=false in your environment variables)")); + + const lwcForMenu: any = { "All Lightning Web Components": "lwc/index.md" }; + await fs.ensureDir(path.join(this.outputMarkdownRoot, "lwc")); + + const packageDirs = this.project?.getPackageDirectories() || []; + + // Count total LWC components for progress tracking + let totalLwcComponents = 0; + for (const packageDir of packageDirs) { + const lwcMetaFiles = await glob(`${packageDir.path}/**/lwc/**/*.js-meta.xml`, { + cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS + }); + totalLwcComponents += lwcMetaFiles.length; + } + if (totalLwcComponents === 0) { + uxLog("log", this, c.yellow("No Lightning Web Component found in the project")); + return; + } + + WebSocketClient.sendProgressStartMessage("Generating Lightning Web Components documentation...", totalLwcComponents); + + let counter = 0; + // Find all LWC components in all package directories + for (const packageDir of packageDirs) { + // Find LWC components (directories with .js-meta.xml files) + const lwcMetaFiles = await glob(`${packageDir.path}/**/lwc/**/*.js-meta.xml`, { + cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS + }); + + for (const lwcMetaFile of lwcMetaFiles) { + counter++; + WebSocketClient.sendProgressStepMessage(counter); + + const lwcDirPath = path.dirname(lwcMetaFile); + const lwcName = path.basename(lwcDirPath); + const mdFile = path.join(this.outputMarkdownRoot, "lwc", lwcName + ".md"); + + lwcForMenu[lwcName] = "lwc/" + lwcName + ".md"; + + // Read XML metadata for information about the component + const lwcMetaXml = await fs.readFile(lwcMetaFile, "utf8"); + const lwcMetaXmlParsed = new XMLParser().parse(lwcMetaXml); + + // Read JS file to get a better idea of what objects this component works with + const jsFile = path.join(lwcDirPath, `${lwcName}.js`); + let jsContent = "none"; + if (fs.existsSync(jsFile)) { + jsContent = await fs.readFile(jsFile, "utf8"); + } + + // Read HTML template file + const htmlFile = path.join(lwcDirPath, `${lwcName}.html`); + let htmlContent = "none"; + if (fs.existsSync(htmlFile)) { + htmlContent = await fs.readFile(htmlFile, "utf8"); + } + + // Track this LWC in our descriptions array + this.lwcDescriptions.push({ + name: lwcName, + description: lwcMetaXmlParsed?.LightningComponentBundle?.description || + lwcMetaXmlParsed?.LightningComponentBundle?.masterLabel || "", + targets: Array.isArray(lwcMetaXmlParsed?.LightningComponentBundle?.targets?.target) + ? lwcMetaXmlParsed?.LightningComponentBundle?.targets?.target.join(", ") + : lwcMetaXmlParsed?.LightningComponentBundle?.targets?.target || "", + isExposed: lwcMetaXmlParsed?.LightningComponentBundle?.isExposed, + impactedObjects: this.allObjectsNames.filter(objectName => + lwcMetaXml.includes(`${objectName}`) || + jsContent.includes(`${objectName}`) + ) + }); + + // Generate the documentation file + await new DocBuilderLwc(lwcName, "", mdFile, { + LWC_PATH: lwcDirPath, + LWC_NAME: lwcName, + LWC_JS_CODE: jsContent, + LWC_HTML_CODE: htmlContent, + LWC_JS_META: lwcMetaXml + }).generateMarkdownFileFromXml(); + + if (this.withPdf) { + await generatePdfFileFromMarkdown(mdFile); + } + } + } + + WebSocketClient.sendProgressEndMessage(); + + this.addNavNode("Lightning Web Components", lwcForMenu); + + // Write index file for LWC folder + await fs.ensureDir(path.join(this.outputMarkdownRoot, "lwc")); + const lwcIndexFile = path.join(this.outputMarkdownRoot, "lwc", "index.md"); + await fs.writeFile( + lwcIndexFile, + getMetaHideLines() + + DocBuilderLwc.buildIndexTable('', this.lwcDescriptions).join("\n") + + `\n\n${this.footer}\n` + ); + + uxLog("success", this, c.green(`Successfully generated documentation for Lightning Web Components at ${lwcIndexFile}`)); + } +} \ No newline at end of file diff --git a/src/commands/hardis/git/pull-requests/extract.ts b/src/commands/hardis/git/pull-requests/extract.ts new file mode 100644 index 000000000..b3369f316 --- /dev/null +++ b/src/commands/hardis/git/pull-requests/extract.ts @@ -0,0 +1,177 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, selectGitBranch, uxLog } from '../../../../common/utils/index.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { GitProvider } from '../../../../common/gitProvider/index.js'; +import moment from 'moment'; +import { prompts } from '../../../../common/utils/prompts.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class GitPullRequestsExtract extends SfCommand { + public static title = 'Extract pull requests'; + + public static description = ` +## Command Behavior + +**Extracts pull request information from your Git server based on specified filtering criteria.** + +This command provides a powerful way to query and retrieve details about pull requests (or merge requests, depending on your Git provider) in your repository. It's highly useful for reporting, auditing, and analyzing development workflows. + +Key functionalities include: + +- **Target Branch Filtering:** You can filter pull requests by their target branch using the \`--target-branch\` flag. If not specified, the command will prompt you to select one. +- **Status Filtering:** Filter pull requests by their status: \`open\`, \`merged\`, or \`abandoned\` using the \`--status\` flag. An interactive prompt is provided if no status is specified. +- **Minimum Date Filtering:** Use the \`--min-date\` flag to retrieve pull requests created or updated after a specific date. +- **CSV Output:** The extracted pull request data is generated into a CSV file, which can be used for further analysis in spreadsheet software. + +
+Technical explanations + +The command's technical implementation involves interacting with a Git provider's API: + +- **Git Provider Abstraction:** It uses the \`GitProvider.getInstance(true)\` to abstract away the specifics of different Git platforms (e.g., GitHub, GitLab, Azure DevOps). This ensures the command can work across various environments. +- **API Calls:** The \`gitProvider.listPullRequests()\` method is called with a \`prConstraint\` object that encapsulates the filtering criteria (target branch, minimum date, status). +- **Interactive Prompts:** The \`prompts\` library is used to interactively gather input from the user for the target branch and pull request status if they are not provided as command-line flags. +- **Date Handling:** The \`moment\` library is used to parse and handle date inputs for the \`--min-date\` flag. +- **CSV Generation:** The \`generateCsvFile\` utility is responsible for converting the retrieved pull request data into a CSV format, and \`generateReportPath\` determines the output file location. +- **Error Handling:** It includes error handling for cases where a Git provider cannot be identified. +
+`; + + public static examples = [ + '$ sf hardis:git:pull-requests:extract', + '$ sf hardis:git:pull-requests:extract --target-branch main --status merged', + ]; + + public static flags: any = { + "target-branch": Flags.string({ + char: 't', + description: 'Target branch of PRs', + }), + "status": Flags.string({ + char: 'x', + options: [ + "open", + "merged", + "abandoned" + ], + description: 'Status of the PR', + }), + "min-date": Flags.string({ + char: 'm', + description: 'Minimum date for PR', + }), + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }) + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + protected outputFile; + protected outputFilesRes: any = {}; + protected pullRequests: any[]; + protected targetBranch: string | null = null; + protected minDateStr: Date | null = null; + protected minDate: Date | null = null; + protected prStatus: string | null = null; + protected debugMode = false; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(GitPullRequestsExtract); + this.targetBranch = flags["target-branch"] || null; + this.minDateStr = flags["min-date"] || null; + this.prStatus = flags["status"] || null; + this.outputFile = flags.outputfile || null; + this.debugMode = flags.debug || false; + if (this.minDateStr) { + this.minDate = moment(this.minDateStr).toDate() + } + + // Startup + uxLog("action", this, c.cyan(`This command will extract pull request from Git Server`)); + + const gitProvider = await GitProvider.getInstance(true); + if (gitProvider == null) { + throw new SfError("Unable to identify a GitProvider") + } + + // Prompt branch & PR status if not sent + await this.handleUserInput(); + + // Build constraint + const prConstraint: any = {}; + if (this.targetBranch) { + prConstraint.targetBranch = this.targetBranch; + } + if (this.minDate) { + prConstraint.minDate = this.minDate; + } + if (this.prStatus) { + prConstraint.pullRequestStatus = this.prStatus; + } + + // Process call to git provider API + this.pullRequests = await gitProvider.listPullRequests(prConstraint, { formatted: true }); + + this.outputFile = await generateReportPath('pull-requests', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.pullRequests, this.outputFile, { fileTitle: 'Pull Requests' }); + + return { + outputString: `Extracted ${this.pullRequests.length} Pull Requests`, + pullRequests: this.pullRequests, + }; + } + + private async handleUserInput() { + if (!isCI && !this.targetBranch) { + const gitBranch = await selectGitBranch({ + remote: true, + checkOutPull: false, + allowAll: true, + message: "Please select the target branch of PUll Requests" + }); + if (gitBranch && gitBranch !== "ALL BRANCHES") { + this.targetBranch = gitBranch; + } + } + + if (!isCI && !this.prStatus) { + const statusRes = await prompts({ + message: "Please select a status criteria, or all", + type: "select", + description: "Choose which pull request status to filter by", + placeholder: "Select status", + choices: [ + { title: "All status", value: "all" }, + { title: "Merged", value: "merged" }, + { title: "Open", value: "open" }, + { title: "Abandoned", value: "abandoned" } + ] + }); + if (statusRes && statusRes.value !== "all") { + this.prStatus = statusRes.value; + } + } + } +} diff --git a/src/commands/hardis/lint/access.ts b/src/commands/hardis/lint/access.ts index a6c85e72f..3513e37ca 100644 --- a/src/commands/hardis/lint/access.ts +++ b/src/commands/hardis/lint/access.ts @@ -1,91 +1,111 @@ /* jscpd:ignore-start */ // External Libraries -import * as c from "chalk"; -import { glob } from "glob"; -import * as path from "path"; -import * as sortArray from "sort-array"; +import c from 'chalk'; +import { glob } from 'glob'; +import * as path from 'path'; +import sortArray from 'sort-array'; // Salesforce Specific -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import * as fs from "fs-extra"; -import { AnyJson } from "@salesforce/ts-types"; +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import fs from 'fs-extra'; +import { AnyJson } from '@salesforce/ts-types'; // Common Utilities -import { isCI, uxLog } from "../../../common/utils"; -import { prompts } from "../../../common/utils/prompts"; -import { parseXmlFile, writeXmlFile } from "../../../common/utils/xmlUtils"; -import { generateCsvFile, generateReportPath } from "../../../common/utils/filesUtils"; -import { NotifProvider, NotifSeverity } from "../../../common/notifProvider"; -import { Parser } from "xml2js"; +import { isCI, uxLog, uxLogTable } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { parseXmlFile, writeXmlFile } from '../../../common/utils/xmlUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../common/utils/filesUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../common/notifProvider/index.js'; +import { Parser } from 'xml2js'; // Config -import { getConfig } from "../../../config"; -import { getBranchMarkdown, getNotificationButtons, getSeverityIcon } from "../../../common/utils/notifUtils"; +import { CONSTANTS, getConfig } from '../../../config/index.js'; +import { getBranchMarkdown, getNotificationButtons, getSeverityIcon } from '../../../common/utils/notifUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; +import { setConnectionVariables } from '../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class LintAccess extends SfCommand { + public static title = 'check permission access'; -export default class Access extends SfdxCommand { - public static title = "check permission access"; + public static description = ` +## Command Behavior - public static description = `Check if elements(apex class and field) are at least in one permission set - -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-missing-access/) and can output Grafana, Slack and MsTeams Notifications. +**Checks if specified Salesforce metadata elements (Apex classes and custom fields) have at least one permission defined in any Permission Set or Profile.** + +This command is crucial for maintaining proper access control and identifying potential security vulnerabilities or misconfigurations in your Salesforce project. It helps ensure that all custom elements are accessible to the intended users through appropriate permission assignments. + +Key functionalities: + +- **Element Validation:** Verifies that Apex classes and custom fields have \`enabled\` (for Apex classes) or \`readable\`/\`editable\` (for custom fields) access in at least one Permission Set or Profile. +- **Configurable Ignores:** Allows you to ignore specific elements or entire types of elements (e.g., all Apex classes, a particular custom field) using the \`--elementsignored\` flag or project configuration. +- **Permission Set/Profile Filtering:** You can specify Permission Sets or Profiles to ignore during the access check using the \`--ignorerights\` flag. +- **Reporting:** Generates a CSV report of all missing access elements, which can be used for auditing or further analysis. +- **Notifications:** Integrates with notification providers (Grafana, Slack, MS Teams) to alert about missing access issues, making it suitable for CI/CD monitoring. +- **Interactive Fix:** In non-CI environments, it offers an interactive prompt to automatically add missing accesses to selected Permission Sets. + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-missing-access/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **File System Traversal:** Uses \`glob\` to find all Apex class (\`.cls\`) and custom field (\`.field-meta.xml\`) files within the specified root folder. +- **XML Parsing:** Parses the XML content of Permission Set (\`.permissionset-meta.xml\`) and Profile (\`.profile-meta.xml\`) files to extract access configurations. +- **Element Filtering:** Filters out elements that are explicitly ignored (via flags or configuration) or are not subject to access checks (e.g., Master-Detail fields, required fields, Custom Metadata Types, Custom Settings). +- **Access Verification Logic:** Iterates through each element to check and verifies if it has the necessary access enabled in any of the non-ignored Permission Sets or Profiles. +- **Data Aggregation:** Collects all elements with missing access into a \`missingElements\` array and \`missingElementsMap\` for reporting and notification purposes. +
`; public static examples = [ - "$ sfdx hardis:lint:access", - '$ sfdx hardis:lint:access -e "ApexClass:ClassA, CustomField:Account.CustomField"', - '$ sfdx hardis:lint:access -i "PermissionSet:permissionSetA, Profile"', + '$ sf hardis:lint:access', + '$ sf hardis:lint:access -e "ApexClass:ClassA, CustomField:Account.CustomField"', + '$ sf hardis:lint:access -i "PermissionSet:permissionSetA, Profile"', ]; - protected static flagsConfig = { - elementsignored: flags.string({ - char: "e", - default: "", - description: "Ignore specific elements separated by commas", + public static flags: any = { + elementsignored: Flags.string({ + char: 'e', + default: '', + description: 'Ignore specific elements separated by commas', }), - ignorerights: flags.string({ - char: "i", - default: "", - description: "Ignore permission sets or profiles", + ignorerights: Flags.string({ + char: 'i', + default: '', + description: 'Ignore permission sets or profiles', }), - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + outputfile: Flags.string({ + char: 'x', + description: 'Force the path and name of output report file. Must end with .csv', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': optionalOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - protected static supportsUsername = true; - - // Comment this out if your command does not support a hub org username protected static supportsDevhubUsername = false; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; protected customSettingsNames: string[] = []; @@ -94,13 +114,13 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co protected outputFile; protected outputFilesRes: any = {}; - protected static sourceElements = [ + protected static sourceElements: any[] = [ { regex: `/**/*.cls`, - type: "ApexClass", - xmlField: "apexClass", - xmlChildren: "classAccesses", - xmlAccessField: "enabled", + type: 'ApexClass', + xmlField: 'apexClass', + xmlChildren: 'classAccesses', + xmlAccessField: 'enabled', ignore: { all: false, elements: [], @@ -108,10 +128,10 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co }, { regex: `/**/objects/**/fields/*__c.field-meta.xml`, - type: "CustomField", - xmlField: "field", - xmlChildren: "fieldPermissions", - xmlAccessField: "readable", + type: 'CustomField', + xmlField: 'field', + xmlChildren: 'fieldPermissions', + xmlAccessField: 'readable', ignore: { all: false, elements: [], @@ -119,26 +139,26 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co }, ]; - private permissionSet = { + private permissionSet: any = { regex: `/**/permissionsets/*.permissionset-meta.xml`, - type: "Permission sets", - name: "PermissionSet", + type: 'Permission sets', + name: 'PermissionSet', isIgnoredAll: false, elementsIgnored: [], }; - private profiles = { + private profiles: any = { regex: `/**/profiles/*.profile-meta.xml`, - type: "Profiles", - name: "Profile", + type: 'Profiles', + name: 'Profile', isIgnoredAll: false, elementsIgnored: [], }; private static messages = { - header: "Check if elements(apex class and field) are at least in one permission set", - allElementsHaveRights: "All elements are included in at least one Permission set or Profile", - someElementsDontHaveRights: "Some elements are not included in at least one Permission set or Profile", + header: 'Check if elements(apex class and field) are at least in one permission set', + allElementsHaveRights: 'All elements are included in at least one Permission set or Profile', + someElementsDontHaveRights: 'Some elements are not included in at least one Permission set or Profile', }; private hasElementsWithNoRights = false; @@ -146,38 +166,47 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co private hasToDisplayJsonOnly = false; public async run(): Promise { - const config = await getConfig("user"); - this.folder = this.flags.folder || "./force-app"; - this.hasToDisplayJsonOnly = process.argv.includes("--json"); + const { flags } = await this.parse(LintAccess); + const config = await getConfig('user'); + this.folder = flags.folder || './force-app'; + this.hasToDisplayJsonOnly = this.argv.includes('--json'); - this.ignoreSourceElementsIfDefined(); - this.ignoreRightElementsIfDefined(config); + this.ignoreSourceElementsIfDefined(flags); + this.ignoreRightElementsIfDefined(config, flags); this.customSettingsNames = (await this.listLocalCustomSettings()).map((cs) => cs.name); - uxLog(this, c.green(Access.messages.header)); + uxLog("action", this, c.cyan(LintAccess.messages.header)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); - const elementsToCheckByType = { apexClass: [], field: [] }; + const elementsToCheckByType: any = { apexClass: [], field: [] }; /* ELEMENTS TO CHECK */ - for (const sourceElement of Access.sourceElements) { + for (const sourceElement of LintAccess.sourceElements) { //if the type(apex class, field) is ignored we pass to the next type if (sourceElement.ignore.all) { continue; } - const findManagedPattern = rootFolder + sourceElement["regex"]; - const matchedElements = await glob(findManagedPattern, { cwd: process.cwd() }); + const findManagedPattern = rootFolder + sourceElement['regex']; + const matchedElements = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); switch (sourceElement.type) { - case "CustomField": - elementsToCheckByType.field = await this.retrieveElementToCheck(matchedElements, sourceElement.xmlField, sourceElement.ignore.elements); + case 'CustomField': + elementsToCheckByType.field = await this.retrieveElementToCheck( + matchedElements, + sourceElement.xmlField, + sourceElement.ignore.elements + ); break; - case "ApexClass": - elementsToCheckByType.apexClass = await this.retrieveElementToCheck(matchedElements, sourceElement.xmlField, sourceElement.ignore.elements); + case 'ApexClass': + elementsToCheckByType.apexClass = await this.retrieveElementToCheck( + matchedElements, + sourceElement.xmlField, + sourceElement.ignore.elements + ); break; default: @@ -186,99 +215,106 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } const remainingElements = await this.listElementIfNotInProfileOrPermission(rootFolder, elementsToCheckByType); - await this.verifyMultipleObjectsInPermissionSets(path.join(process.cwd(), this.folder, "**/permissionsets/*.permissionset-meta.xml")); + await this.verifyMultipleObjectsInPermissionSets( + path.join(process.cwd(), this.folder, '**/permissionsets/*.permissionset-meta.xml') + ); // Write report await this.writeOutputFile(); // Send notification - await this.manageNotification(); + await this.manageNotification(flags); // Prompt user if he/she wants to update a Permission set with missing elements await this.handleFixIssues(); // Handle output status & exitCode const statusCode = this.hasElementsWithNoRights ? 1 : 0; - if ((this.argv || []).includes("audittrail")) { + if ((this.argv || []).includes('audittrail')) { process.exitCode = statusCode; } return { statusCode: statusCode, outputString: remainingElements }; } - private ignoreSourceElementsIfDefined() { - const ignoreElements = this.flags.elementsignored; + private ignoreSourceElementsIfDefined(flags) { + const ignoreElements = flags.elementsignored; - for (const ignoredElement of ignoreElements.split(",")) { - const elementTrimmed = ignoredElement.trim(); + for (const ignoredElement of ignoreElements.split(',')) { + const elementTrimmed: string = ignoredElement.trim(); //check if all elements of a type are ignored - if (elementTrimmed === "ApexClass") { - Access.sourceElements[0].ignore.all = true; - } else if (elementTrimmed === "CustomField") { - Access.sourceElements[1].ignore.all = true; + if (elementTrimmed === 'ApexClass') { + LintAccess.sourceElements[0].ignore.all = true; + } else if (elementTrimmed === 'CustomField') { + LintAccess.sourceElements[1].ignore.all = true; } //check individual elements (ex : ApexClass:ClassB) - else if (elementTrimmed.startsWith("ApexClass")) { - Access.sourceElements[0].ignore.elements.push(elementTrimmed.substring(elementTrimmed.indexOf(":") + 1).trim()); - } else if (elementTrimmed.startsWith("CustomField")) { - Access.sourceElements[1].ignore.elements.push(elementTrimmed.substring(elementTrimmed.indexOf(":") + 1).trim()); + else if (elementTrimmed.startsWith('ApexClass')) { + LintAccess.sourceElements[0].ignore.elements.push( + elementTrimmed.substring(elementTrimmed.indexOf(':') + 1).trim() + ); + } else if (elementTrimmed.startsWith('CustomField')) { + LintAccess.sourceElements[1].ignore.elements.push( + elementTrimmed.substring(elementTrimmed.indexOf(':') + 1).trim() + ); } } } - private ignoreRightElementsIfDefined(projectConfig) { - const ignoreElements = this.flags.ignorerights ? this.flags.ignorerights : projectConfig.linterIgnoreRightMetadataFile; + private ignoreRightElementsIfDefined(projectConfig, flags) { + const ignoreElements = flags.ignorerights ? flags.ignorerights : projectConfig.linterIgnoreRightMetadataFile; if (!ignoreElements) { return; } - for (const ignoredElement of ignoreElements.split(",")) { - const elementTrimmed = ignoredElement.trim(); + for (const ignoredElement of ignoreElements.split(',')) { + const elementTrimmed: string = ignoredElement.trim(); if (elementTrimmed === this.profiles.name) { this.profiles.isIgnoredAll = true; } else if (elementTrimmed.startsWith(this.profiles.name)) { - this.profiles.elementsIgnored.push(elementTrimmed.substring(elementTrimmed.indexOf(":") + 1).trim()); + this.profiles.elementsIgnored.push(elementTrimmed.substring(elementTrimmed.indexOf(':') + 1).trim()); } if (elementTrimmed === this.permissionSet.name) { this.permissionSet.isIgnoredAll = true; } else if (elementTrimmed.startsWith(this.permissionSet.name)) { - this.permissionSet.elementsIgnored.push(elementTrimmed.substring(elementTrimmed.indexOf(":") + 1).trim()); + this.permissionSet.elementsIgnored.push(elementTrimmed.substring(elementTrimmed.indexOf(':') + 1).trim()); } } } private formatElementNameFromPath(path, type): string { - if (type === "field") { - const fieldRoute = path.substring(path.indexOf("objects/")); + if (type === 'field') { + const fieldRoute = path.substring(path.indexOf('objects/')); const objectField = fieldRoute - .substring(fieldRoute.indexOf("/") + 1) - .replace("/fields/", ".") - .replace(".field-meta.xml", ""); + .substring(fieldRoute.indexOf('/') + 1) + .replace('/fields/', '.') + .replace('.field-meta.xml', ''); return objectField; - } else if (type === "apexClass") { - return path.substring(path.indexOf("classes/")).replace("classes/", "").replace(".cls", "").split("/").pop(); + } else if (type === 'apexClass') { + return path.substring(path.indexOf('classes/')).replace('classes/', '').replace('.cls', '').split('/').pop(); } - return ""; + return ''; } private async retrieveElementToCheck(elements, xmlField, excludedElements): Promise> { - let fieldsToSearch = []; + let fieldsToSearch: any[] = []; - for (const element of elements) { + for (let element of elements) { + element = element.replace(/\\/g, '/'); // Exclude mandatory fields - if (element.endsWith(".field-meta.xml")) { + if (element.endsWith('.field-meta.xml')) { const fieldXml = await parseXmlFile(element); // Mater detail - if (fieldXml?.CustomField?.type && fieldXml?.CustomField?.type[0] === "MasterDetail") { + if (fieldXml?.CustomField?.type && fieldXml?.CustomField?.type[0] === 'MasterDetail') { continue; } // Required - if (fieldXml?.CustomField?.required && fieldXml?.CustomField?.required[0] === "true") { + if (fieldXml?.CustomField?.required && fieldXml?.CustomField?.required[0] === 'true') { continue; } // Check Parent is not eligible to fields access - const parentObject = element.substring(element.indexOf("objects/")).split("/")[1]; + const parentObject = element.substring(element.indexOf('objects/')).split('/')[1]; // Custom Metadata or DataCloud - if (parentObject.endsWith("__mdt") || parentObject.endsWith("__dll") || parentObject.endsWith("__dlm")) { + if (parentObject.endsWith('__mdt') || parentObject.endsWith('__dll') || parentObject.endsWith('__dlm')) { continue; } // Custom Setting @@ -303,73 +339,93 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private ruleBasedCheckForFields(el: string): Array { - const otherElementsToCheck = []; + const otherElementsToCheck: any[] = []; // Activity is the parent object of Task and Event: check also rights to avoid false positives - if (el.startsWith("Activity.")) { - const field = el.split(".")[1]; - otherElementsToCheck.push("Task." + field); - otherElementsToCheck.push("Event." + field); + if (el.startsWith('Activity.')) { + const field = el.split('.')[1]; + otherElementsToCheck.push('Task.' + field); + otherElementsToCheck.push('Event.' + field); } return otherElementsToCheck; } private async listElementIfNotInProfileOrPermission(rootFolder, elementsToCheckByType) { - const profilesFiles = await glob(rootFolder + this.profiles["regex"], { cwd: process.cwd() }); + const profilesFiles = await glob(rootFolder + this.profiles['regex'], { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); let remainingElements = elementsToCheckByType; //CHECK PROFILES FIRST if (!this.profiles.isIgnoredAll) { - remainingElements = await this.retrieveElementsWithoutRights(this.profiles.name, profilesFiles, elementsToCheckByType); + remainingElements = await this.retrieveElementsWithoutRights( + this.profiles.name, + profilesFiles, + elementsToCheckByType + ); } if (this.hasRemainingElementsToCheck(remainingElements) && !this.permissionSet.isIgnoredAll) { - const permissionSetFiles = await glob(rootFolder + this.permissionSet["regex"], { cwd: process.cwd() }); - remainingElements = await this.retrieveElementsWithoutRights(this.permissionSet.name, permissionSetFiles, remainingElements); + const permissionSetFiles = await glob(rootFolder + this.permissionSet['regex'], { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); + remainingElements = await this.retrieveElementsWithoutRights( + this.permissionSet.name, + permissionSetFiles, + remainingElements + ); } if (!this.hasRemainingElementsToCheck(remainingElements)) { - uxLog(this, c.green(Access.messages.allElementsHaveRights)); - return Access.messages.allElementsHaveRights; + uxLog("success", this, c.green(LintAccess.messages.allElementsHaveRights)); + return LintAccess.messages.allElementsHaveRights; } else { //list remaining elements after checking on profiles and permissions sets this.missingElementsMap = Object.assign({}, remainingElements); this.missingElements = []; - const severityIcon = getSeverityIcon("warning"); + const severityIcon = getSeverityIcon('warning'); for (const missingType of Object.keys(this.missingElementsMap)) { for (const missingItem of this.missingElementsMap[missingType]) { - this.missingElements.push({ type: missingType, element: missingItem, severity: "warning", severityIcon: severityIcon }); + this.missingElements.push({ + type: missingType, + element: missingItem, + severity: 'warning', + severityIcon: severityIcon, + }); } } remainingElements = this.constructLogAndDisplayTable(remainingElements); } - return this.hasToDisplayJsonOnly ? remainingElements : ""; + return this.hasToDisplayJsonOnly ? remainingElements : ''; } private formatPathPermissionSetOrProfile(typeFile, path) { if (typeFile == this.profiles.name) { - return path.substring(path.indexOf("profiles/")).replace("profiles/", "").replace(".profile-meta.xml", ""); + return path.substring(path.indexOf('profiles/')).replace('profiles/', '').replace('.profile-meta.xml', ''); } else if (typeFile == this.permissionSet.name) { - return path.substring(path.indexOf("permissionsets/")).replace("permissionsets/", "").replace(".permissionset-meta.xml", ""); + return path + .substring(path.indexOf('permissionsets/')) + .replace('permissionsets/', '') + .replace('.permissionset-meta.xml', ''); } - return ""; + return ''; } private async retrieveElementsWithoutRights(typeFile, files, elementsToCheckByType) { const remainingElements = elementsToCheckByType; if (typeFile == this.profiles.name) { - files = files.filter((e) => !this.profiles.elementsIgnored.includes(this.formatPathPermissionSetOrProfile(typeFile, e))); + files = files.filter( + (e) => !this.profiles.elementsIgnored.includes(this.formatPathPermissionSetOrProfile(typeFile, e)) + ); } else if (typeFile === this.permissionSet.name) { - files = files.filter((e) => !this.permissionSet.elementsIgnored.includes(this.formatPathPermissionSetOrProfile(typeFile, e))); + files = files.filter( + (e) => !this.permissionSet.elementsIgnored.includes(this.formatPathPermissionSetOrProfile(typeFile, e)) + ); } for (const file of files) { const fileXml = await parseXmlFile(file); //checking all elements in the current type - for (const currentType of Access.sourceElements) { + for (const currentType of LintAccess.sourceElements) { //checking if current type is at least once in the current profile or permission set if (!(currentType.xmlChildren in fileXml[typeFile]) || fileXml[typeFile][currentType.xmlChildren].length == 0) { continue; @@ -379,11 +435,11 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co //only readable(for fields) or enabled(apex class) rights are relevant if ( permission && - permission[currentType.xmlAccessField][0] == "true" && - elementsToCheckByType[currentType.xmlField].includes(permission[currentType.xmlField][0]) + permission[currentType.xmlAccessField]?.[0] == 'true' && + elementsToCheckByType[currentType.xmlField].includes(permission[currentType.xmlField]?.[0]) ) { remainingElements[currentType.xmlField] = remainingElements[currentType.xmlField].filter( - (e) => e !== permission[currentType.xmlField][0], + (e) => e !== permission[currentType.xmlField]?.[0] ); } } @@ -402,17 +458,17 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private constructLogAndDisplayTable(remainingElements) { - const remainingElementsTable = []; + const remainingElementsTable: any[] = []; let counterTable = 0; - for (const currentType of Access.sourceElements) { + for (const currentType of LintAccess.sourceElements) { for (const e of remainingElements[currentType.xmlField]) { if (!remainingElementsTable[counterTable]) { remainingElementsTable[counterTable] = {}; } - remainingElementsTable[counterTable]["Type"] = currentType.type; - remainingElementsTable[counterTable]["Element"] = e; + remainingElementsTable[counterTable]['Type'] = currentType.type; + remainingElementsTable[counterTable]['Element'] = e; counterTable++; this.hasElementsWithNoRights = true; } @@ -420,8 +476,8 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co //we create an object to have a custom header in the table if (!this.hasToDisplayJsonOnly) { - uxLog(this, c.red(Access.messages.someElementsDontHaveRights)); - console.table(remainingElementsTable); + uxLog("action", this, c.cyan(LintAccess.messages.someElementsDontHaveRights)); + uxLogTable(this, remainingElementsTable); } return remainingElements; @@ -431,19 +487,19 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co if (this.missingElements.length === 0) { return; } - this.outputFile = await generateReportPath("lint-access", this.outputFile); - this.outputFilesRes = await generateCsvFile(this.missingElements, this.outputFile); + this.outputFile = await generateReportPath('lint-access', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.missingElements, this.outputFile, { fileTitle: 'Missing Access Elements' }); } - private async manageNotification() { + private async manageNotification(flags) { const branchMd = await getBranchMarkdown(); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No custom elements have no access defined in any Profile or Permission set in ${branchMd}`; - let attachments = []; + let attachments: any[] = []; // Manage detail in case there are issues if (this.missingElements.length > 0) { - notifSeverity = "warning"; + notifSeverity = 'warning'; notifText = `${this.missingElements.length} custom elements have no access defined in any Profile or Permission set in ${branchMd}`; let notifDetailText = ``; for (const missingType of Object.keys(this.missingElementsMap)) { @@ -457,9 +513,9 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co attachments = [{ text: notifDetailText }]; } - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "LINT_ACCESS", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'LINT_ACCESS', text: notifText, attachments: attachments, buttons: notifButtons, @@ -474,37 +530,42 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private async handleFixIssues() { - if (!isCI && this.missingElements.length > 0 && this.argv.includes("--websocket")) { + if (!isCI && this.missingElements.length > 0 && this.argv.includes('--websocket')) { const promptUpdate = await prompts({ - type: "confirm", - message: c.cyanBright("Do you want to add the missing accesses in permission sets ?"), + type: 'confirm', + message: c.cyanBright('Do you want to add the missing accesses in permission sets ?'), + description: 'Confirm if you want to automatically fix missing access issues', }); if (promptUpdate.value === true) { const availablePermissionSets = await this.listLocalPermissionSets(); const promptsElementsPs = await prompts([ { - type: "multiselect", - name: "elements", - message: "Please select the elements you want to add in Permission Set(s)", + type: 'multiselect', + name: 'elements', + message: 'Please select the elements you want to add in Permission Set(s)', + description: 'Choose which missing access elements to add to permission sets', choices: this.missingElements.map((elt) => { return { title: `${elt.type}: ${elt.element}`, value: elt }; }), }, { - type: "multiselect", - name: "permissionSets", - message: "Please select the permission sets you want to update with selected elements", + type: 'multiselect', + name: 'permissionSets', + message: 'Please select the permission sets you want to update with selected elements', + description: 'Choose which permission sets should receive the selected access elements', choices: availablePermissionSets.map((elt) => { return { title: elt.name, value: elt.filePath }; }), }, { - type: "select", - name: "access", - message: "Please select the accesses to set for the custom fields", + type: 'select', + name: 'access', + message: 'Please select the accesses to set for the custom fields', + description: 'Choose the level of access to grant for custom fields', + placeholder: 'Select access level', choices: [ - { title: "Readable", value: "readable" }, - { title: "Readable & Editable", value: "editable" }, + { title: 'Readable', value: 'readable' }, + { title: 'Readable & Editable', value: 'editable' }, ], }, ]); @@ -513,24 +574,26 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co await this.updatePermissionSets( promptsElementsPs.permissionSets, promptsElementsPs.elements, - promptsElementsPs.access === "editable" ? { readable: true, editable: true } : { readable: true, editable: false }, + promptsElementsPs.access === 'editable' + ? { readable: true, editable: true } + : { readable: true, editable: false } ); } } } else if (this.missingElements.length > 0) { - uxLog(this, c.yellow("Please add missing access on permission set(s)")); - uxLog(this, c.yellow("You can do it by running VsCode SFDX Hardis command Audit -> Detect missing permissions")); + uxLog("warning", this, c.yellow('Please add missing access on permission set(s)')); + uxLog("warning", this, c.yellow('You can do it by running VsCode SFDX Hardis command Audit -> Detect missing permissions')); } } private async listLocalCustomSettings() { const globPatternObjects = process.cwd() + `/**/*.object-meta.xml`; - const objectFiles = await glob(globPatternObjects); - const csList = []; + const objectFiles = await glob(globPatternObjects, { ignore: GLOB_IGNORE_PATTERNS }); + const csList: any[] = []; for (const objectFile of objectFiles) { const objectXml = await parseXmlFile(objectFile); if (objectXml?.CustomObject?.customSettingsType?.length > 0) { - csList.push({ name: path.basename(objectFile).replace(".object-meta.xml", ""), filePath: objectFile }); + csList.push({ name: path.basename(objectFile).replace('.object-meta.xml', ''), filePath: objectFile }); } } return csList; @@ -538,10 +601,10 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co private async listLocalPermissionSets() { const globPatternPS = process.cwd() + `/**/*.permissionset-meta.xml`; - const psFiles = await glob(globPatternPS); - const psList = []; + const psFiles = await glob(globPatternPS, { ignore: GLOB_IGNORE_PATTERNS }); + const psList: any[] = []; for (const ps of psFiles) { - psList.push({ name: path.basename(ps).replace(".permissionset-meta.xml", ""), filePath: ps }); + psList.push({ name: path.basename(ps).replace('.permissionset-meta.xml', ''), filePath: ps }); } return psList; } @@ -550,9 +613,10 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co for (const permissionSetFile of permissionSetFiles) { const psFileXml = await parseXmlFile(permissionSetFile); for (const element of elements) { + element.element = element.element.replace(/\\/g, '/'); // Apex class access - if (element.type === "apexClass") { - const className = element.element.split("/").pop(); + if (element.type === 'apexClass') { + const className = element.element.split('/').pop(); let classAccesses = psFileXml.PermissionSet?.classAccesses || []; let updated = false; classAccesses = classAccesses.map((item) => { @@ -569,12 +633,12 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co }); } psFileXml.PermissionSet.classAccesses = sortArray(classAccesses, { - by: ["apexClass"], - order: ["asc"], + by: ['apexClass'], + order: ['asc'], }); } // Custom field permission - else if (element.type === "field") { + else if (element.type === 'field') { let fieldPermissions = psFileXml.PermissionSet?.fieldPermissions || []; let updated = false; fieldPermissions = fieldPermissions.map((item) => { @@ -593,18 +657,20 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co }); } psFileXml.PermissionSet.fieldPermissions = sortArray(fieldPermissions, { - by: ["field"], - order: ["asc"], + by: ['field'], + order: ['asc'], }); } } await writeXmlFile(permissionSetFile, psFileXml); } - throw new SfdxError(c.red("Your permission sets has been updated: please CHECK THE UPDATES then commit and push !")); + uxLog("action", this, c.cyan('Permission sets updated successfully!')); + uxLog("warning", this, c.yellow('Please commit and push your changes to the repository!')); + throw new SfError(c.red('Your permission sets has been updated: please CHECK THE UPDATES then commit and push !')); } private async readFile(filePath: string): Promise { - return fs.readFile(filePath, "utf8"); + return fs.readFile(filePath, 'utf8'); } private async parseString(xml: string): Promise { @@ -613,7 +679,7 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private async verifyMultipleObjectsInPermissionSets(permissionsetsDirectory: string): Promise { - const permissionFiles = await glob(permissionsetsDirectory, { cwd: process.cwd() }); + const permissionFiles = await glob(permissionsetsDirectory, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); for (const permissionFile of permissionFiles) { const content = await this.readFile(permissionFile); @@ -635,11 +701,11 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co this.hasElementsWithNoRights = true; const permissionSetName = path.basename(permissionFile); for (const obj of multipleOccurrences) { - this.missingElements.push({ type: "MultipleObjectPermissions", element: `${obj} in ${permissionSetName}` }); - if (!this.missingElementsMap["MultipleObjectPermissions"]) { - this.missingElementsMap["MultipleObjectPermissions"] = []; + this.missingElements.push({ type: 'MultipleObjectPermissions', element: `${obj} in ${permissionSetName}` }); + if (!this.missingElementsMap['MultipleObjectPermissions']) { + this.missingElementsMap['MultipleObjectPermissions'] = []; } - this.missingElementsMap["MultipleObjectPermissions"].push(`${obj} in ${permissionSetName}`); + this.missingElementsMap['MultipleObjectPermissions'].push(`${obj} in ${permissionSetName}`); } } } diff --git a/src/commands/hardis/lint/metadatastatus.ts b/src/commands/hardis/lint/metadatastatus.ts index b8e4fd4c0..063188443 100644 --- a/src/commands/hardis/lint/metadatastatus.ts +++ b/src/commands/hardis/lint/metadatastatus.ts @@ -1,104 +1,193 @@ /* jscpd:ignore-start */ // External Libraries and Node.js Modules -import { glob } from "glob"; -import * as fs from "fs-extra"; -import * as path from "path"; +import { glob } from 'glob'; +import fs from 'fs-extra'; +import * as path from 'path'; // Salesforce Specific -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; // Project Specific Utilities -import { uxLog } from "../../../common/utils"; -import { NotifProvider, NotifSeverity } from "../../../common/notifProvider"; -import { MessageAttachment } from "@slack/types"; -import { getBranchMarkdown, getNotificationButtons, getSeverityIcon } from "../../../common/utils/notifUtils"; -import { generateCsvFile, generateReportPath } from "../../../common/utils/filesUtils"; -import { GLOB_IGNORE_PATTERNS } from "../../../common/utils/projectUtils"; - -// Initialize and Load Messages -Messages.importMessagesDirectory(__dirname); -const messages = Messages.loadMessages("sfdx-hardis", "org"); +import { uxLog } from '../../../common/utils/index.js'; +import { NotifProvider, NotifSeverity } from '../../../common/notifProvider/index.js'; +import { MessageAttachment } from '@slack/types'; +import { getBranchMarkdown, getNotificationButtons, getSeverityIcon } from '../../../common/utils/notifUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../common/utils/filesUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; +import { CONSTANTS } from '../../../config/index.js'; +import { setConnectionVariables } from '../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + /* jscpd:ignore-end */ -export default class metadatastatus extends SfdxCommand { - public static title = "check inactive metadatas"; - public static description = `Check if elements (flows and validation rules) are inactive in the project +export default class LintMetadataStatus extends SfCommand { + public static title = 'check inactive metadatas'; + public static description = ` +## Command Behavior + +**Checks for inactive metadata elements within your Salesforce DX project, helping to maintain a clean and efficient codebase.** + +This command identifies various types of metadata components that are marked as inactive in your local project files. Keeping metadata active and relevant is crucial for deployment success, performance, and avoiding confusion. This tool helps you pinpoint and address such inactive elements. + +It specifically checks for the inactive status of: + +- **Approval Processes** +- **Assignment Rules** +- **Auto Response Rules** +- **Escalation Rules** +- **Flows** (specifically those in 'Draft' status) +- **Forecasting Types** +- **Record Types** +- **Validation Rules** +- **Workflow Rules** -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-inactive-metadata/) and can output Grafana, Slack and MsTeams Notifications. +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/detect-inactive-metadata.gif) + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-inactive-metadata/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses \`glob\` patterns (e.g., \`**/flows/**/*.flow-meta.xml\`, \`**/objects/**/validationRules/*.validationRule-meta.xml\`) to locate relevant metadata files within your project. +- **XML Parsing:** For each identified metadata file, it reads the XML content and parses it to extract the \`active\` or \`status\` flag (e.g., \`false\`, \`Draft\`). +- **Status Verification:** It checks the value of these flags to determine if the metadata component is inactive. +- **Data Aggregation:** All detected inactive items are collected into a list, including their type, name, and a severity level. +- **Report Generation:** It generates a CSV report (\`lint-metadatastatus.csv\`) containing details of all inactive metadata elements, which can be used for further analysis or record-keeping. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications (e.g., to Slack, MS Teams, Grafana) about the presence and count of inactive metadata, making it suitable for automated monitoring in CI/CD pipelines. +- **Error Handling:** It includes basic error handling for file operations and ensures that the process continues even if some files cannot be read. +
`; - public static examples = ["$ sfdx hardis:lint:metadatastatus"]; + + public static examples = ['$ sf hardis:lint:metadatastatus']; /* jscpd:ignore-start */ - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': optionalOrgFlagWithDeprecations, }; /* jscpd:ignore-end */ - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - protected static supportsUsername = true; - // Comment this out if your command does not support a hub org username protected static supportsDevhubUsername = false; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; - private flowFilePattern = "**/flows/**/*.flow-meta.xml"; - private validationRuleFilePattern = "**/objects/**/validationRules/*.validationRule-meta.xml"; + public static requiresProject = true; + private flowFilePattern = '**/flows/**/*.flow-meta.xml'; + private validationRuleFilePattern = '**/objects/**/validationRules/*.validationRule-meta.xml'; private ignorePatterns: string[] = GLOB_IGNORE_PATTERNS; - protected inactiveItems = []; + protected inactiveItems: any[] = []; protected outputFile: string; protected outputFilesRes: any = {}; public async run(): Promise { + const { flags } = await this.parse(LintMetadataStatus); + + const inactiveApprovalProcesses = await this.verifyApprovalProcesses(); + const inactiveAssignmentRules = await this.verifyAssignmentRules(); + const inactiveAutoResponseRules = await this.verifyAutoResponseRules(); + const inactiveEscalationRules = await this.verifyEscalationRules(); const draftFlows = await this.verifyFlows(); + const inactiveForecastingTypes = await this.verifyForecastingTypes(); + const inactiveRecordTypes = await this.verifyRecordTypes(); const inactiveValidationRules = await this.verifyValidationRules(); + const inactiveWorkflows = await this.verifyWorkflowRules(); + this.inactiveItems = [ + ...inactiveApprovalProcesses, + ...inactiveAssignmentRules, + ...inactiveAutoResponseRules, + ...draftFlows, + ...inactiveEscalationRules, + ...inactiveForecastingTypes, + ...inactiveRecordTypes, + ...inactiveValidationRules, + ...inactiveWorkflows, + ]; // Prepare notifications const branchMd = await getBranchMarkdown(); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No inactive configuration elements has been found in ${branchMd}`; const attachments: MessageAttachment[] = []; - if (draftFlows.length > 0 || inactiveValidationRules.length > 0) { - notifSeverity = "warning"; + if (this.inactiveItems.length > 0) { + notifSeverity = 'warning'; + if (inactiveApprovalProcesses.length > 0) { + attachments.push({ + text: `*Inactive Approval Processes*\n${inactiveApprovalProcesses.map((file) => `• ${file.name}`).join('\n')}`, + }); + } + if (inactiveAssignmentRules.length > 0) { + attachments.push({ + text: `*Inactive Assignment Rules*\n${inactiveAssignmentRules.map((file) => `• ${file.name}`).join('\n')}`, + }); + } + if (inactiveAutoResponseRules.length > 0) { + attachments.push({ + text: `*Inactive Auto Response Rules*\n${inactiveAutoResponseRules.map((file) => `• ${file.name}`).join('\n')}`, + }); + } + if (inactiveEscalationRules.length > 0) { + attachments.push({ + text: `*Inactive Escalation Rules*\n${inactiveEscalationRules.map((file) => `• ${file.name}`).join('\n')}`, + }); + } if (draftFlows.length > 0) { attachments.push({ - text: `*Inactive Flows*\n${draftFlows.map((file) => `• ${file.name}`).join("\n")}`, + text: `*Inactive Flows*\n${draftFlows.map((file) => `• ${file.name}`).join('\n')}`, + }); + } + if (inactiveForecastingTypes.length > 0) { + attachments.push({ + text: `*Inactive Forecasting Types*\n${inactiveForecastingTypes.map((file) => `• ${file.name}`).join('\n')}`, + }); + } + if (inactiveRecordTypes.length > 0) { + attachments.push({ + text: `*Inactive Record Types*\n${inactiveRecordTypes.map((file) => `• ${file.name}`).join('\n')}`, }); } if (inactiveValidationRules.length > 0) { attachments.push({ - text: `*Inactive Validation Rules*\n${inactiveValidationRules.map((file) => `• ${file.name}`).join("\n")}`, + text: `*Inactive Validation Rules*\n${inactiveValidationRules.map((file) => `• ${file.name}`).join('\n')}`, + }); + } + if (inactiveWorkflows.length > 0) { + attachments.push({ + text: `*Inactive Workflow Rules*\n${inactiveWorkflows.map((file) => `• ${file.name}`).join('\n')}`, }); } - const numberInactive = draftFlows.length + inactiveValidationRules.length; - notifText = `${numberInactive} inactive configuration elements have been found in ${branchMd}`; - await this.buildCsvFile(draftFlows, inactiveValidationRules); + + notifText = `${this.inactiveItems.length} inactive configuration elements have been found in ${branchMd}`; + // Build result file + await this.buildCsvFile(); } else { - uxLog(this, "No draft flow or validation rule files detected."); + uxLog("other", this, 'No draft flow or validation rule files detected.'); } // Post notifications - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "METADATA_STATUS", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'METADATA_STATUS', text: notifText, attachments: attachments, buttons: notifButtons, severity: notifSeverity, - sideImage: "flow", + sideImage: 'flow', attachedFiles: this.outputFilesRes.xlsxFile ? [this.outputFilesRes.xlsxFile] : [], logElements: this.inactiveItems, data: { metric: this.inactiveItems.length }, @@ -120,16 +209,16 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co private async verifyFlows(): Promise { const draftFiles: any[] = []; const flowFiles: string[] = await glob(this.flowFilePattern, { ignore: this.ignorePatterns }); - const severityIcon = getSeverityIcon("warning"); + const severityIcon = getSeverityIcon('warning'); for (const file of flowFiles) { - const flowContent: string = await fs.readFile(file, "utf-8"); - if (flowContent.includes("Draft")) { - const fileName = path.basename(file, ".flow-meta.xml"); - draftFiles.push({ type: "Draft Flow", name: fileName, severity: "warning", severityIcon: severityIcon }); + const flowContent: string = await fs.readFile(file, 'utf-8'); + if (flowContent.includes('Draft')) { + const fileName = path.basename(file, '.flow-meta.xml'); + draftFiles.push({ type: 'Flow (draft)', name: fileName, severity: 'warning', severityIcon: severityIcon }); } } - return draftFiles; + return draftFiles.sort((a, b) => a.name.localeCompare(b.name)); } /** @@ -142,17 +231,190 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co private async verifyValidationRules(): Promise { const inactiveRules: any[] = []; const validationRuleFiles: string[] = await glob(this.validationRuleFilePattern, { ignore: this.ignorePatterns }); - const severityIcon = getSeverityIcon("warning"); + const severityIcon = getSeverityIcon('warning'); for (const file of validationRuleFiles) { - const ruleContent: string = await fs.readFile(file, "utf-8"); - if (ruleContent.includes("false")) { - const ruleName = path.basename(file, ".validationRule-meta.xml"); + // Skip if validation rule is from a managed package + if (path.basename(file).includes('__')) { + continue; + } + const ruleContent: string = await fs.readFile(file, 'utf-8'); + if (ruleContent.includes('false')) { + const ruleName = path.basename(file, '.validationRule-meta.xml'); const objectName = path.basename(path.dirname(path.dirname(file))); - inactiveRules.push({ type: "Inactive VR", name: `${objectName} - ${ruleName}`, severity: "warning", severityIcon: severityIcon }); + inactiveRules.push({ + type: 'Validation Rule (inactive)', + name: `${objectName} - ${ruleName}`, + severity: 'warning', + severityIcon: severityIcon, + }); + } + } + + return inactiveRules.sort((a, b) => a.name.localeCompare(b.name)); + } + + private async verifyRecordTypes(): Promise { + const inactiveRecordTypes: any[] = []; + const recordTypeFiles: string[] = await glob('**/objects/**/recordTypes/*.recordType-meta.xml', { + ignore: this.ignorePatterns, + }); + const severityIcon = getSeverityIcon('warning'); + for (const file of recordTypeFiles) { + const recordTypeName = path.basename(file, '.recordType-meta.xml'); + const objectName = path.basename(path.dirname(path.dirname(file))); + // Skip if record type is from a managed package + if (path.basename(recordTypeName).includes('__')) { + continue; + } + const recordTypeXml: string = await fs.readFile(file, 'utf-8'); + if (recordTypeXml.includes('false')) { + inactiveRecordTypes.push({ + type: 'Record Type (inactive)', + name: `${objectName} - ${recordTypeName}`, + severity: 'warning', + severityIcon: severityIcon, + }); } } - return inactiveRules; + return inactiveRecordTypes.sort((a, b) => a.name.localeCompare(b.name)); + } + + private async verifyApprovalProcesses(): Promise { + const inactiveApprovalProcesses: any[] = []; + const approvalProcessFiles: string[] = await glob('**/approvalProcesses/**/*.approvalProcess-meta.xml', { + ignore: this.ignorePatterns, + }); + const severityIcon = getSeverityIcon('warning'); + for (const file of approvalProcessFiles) { + const approvalProcessFullName = path.basename(file, '.approvalProcess-meta.xml'); + const [objectName, approvalProcessName] = approvalProcessFullName.split('.'); + // Skip if approval process is from a managed package + if (path.basename(approvalProcessName).includes('__')) { + continue; + } + const approvalProcessXml: string = await fs.readFile(file, 'utf-8'); + if (approvalProcessXml.includes('false')) { + inactiveApprovalProcesses.push({ + type: 'Approval Process (inactive)', + name: `${objectName} - ${approvalProcessName}`, + severity: 'warning', + severityIcon: severityIcon, + }); + } + } + + return inactiveApprovalProcesses.sort((a, b) => a.name.localeCompare(b.name)); + } + + private async verifyForecastingTypes(): Promise { + const inactiveForecastTypes: any[] = []; + const forecastTypeFiles: string[] = await glob('**/forecastingTypes/**/*.forecastingType-meta.xml', { + ignore: this.ignorePatterns, + }); + const severityIcon = getSeverityIcon('warning'); + for (const file of forecastTypeFiles) { + const forecastingTypeName = path.basename(file, '.forecastingType-meta.xml'); + const forecastTypeXml: string = await fs.readFile(file, 'utf-8'); + if (forecastTypeXml.includes('false')) { + inactiveForecastTypes.push({ + type: 'Forecasting Type (inactive)', + name: forecastingTypeName, + severity: 'warning', + severityIcon: severityIcon, + }); + } + } + + return inactiveForecastTypes.sort((a, b) => a.name.localeCompare(b.name)); + } + + private async verifyWorkflowRules(): Promise { + const inactiveWorkflowRules: any[] = []; + const workflowRuleFiles: string[] = await glob('**/workflows/**/*.workflow-meta.xml', { + ignore: this.ignorePatterns, + }); + const severityIcon = getSeverityIcon('warning'); + for (const file of workflowRuleFiles) { + const workflowRuleName = path.basename(file, '.workflow-meta.xml'); + const workflowRuleXml: string = await fs.readFile(file, 'utf-8'); + if (workflowRuleXml.includes('false')) { + inactiveWorkflowRules.push({ + type: 'Workflow Rule (inactive)', + name: workflowRuleName, + severity: 'warning', + severityIcon: severityIcon, + }); + } + } + + return inactiveWorkflowRules.sort((a, b) => a.name.localeCompare(b.name)); + } + + private async verifyAssignmentRules(): Promise { + const inactiveAssignmentRules: any[] = []; + const assignmentRuleFiles: string[] = await glob('**/assignmentRules/**/*.assignmentRules-meta.xml', { + ignore: this.ignorePatterns, + }); + const severityIcon = getSeverityIcon('warning'); + for (const file of assignmentRuleFiles) { + const assignmentRuleName = path.basename(file, '.assignmentRules-meta.xml'); + const assignmentRuleXml: string = await fs.readFile(file, 'utf-8'); + if (assignmentRuleXml.includes('false')) { + inactiveAssignmentRules.push({ + type: 'Assignment Rule (inactive)', + name: assignmentRuleName, + severity: 'warning', + severityIcon: severityIcon, + }); + } + } + + return inactiveAssignmentRules.sort((a, b) => a.name.localeCompare(b.name)); + } + + private async verifyAutoResponseRules(): Promise { + const inactiveAutoResponseRules: any[] = []; + const autoResponseRuleFiles: string[] = await glob('**/autoResponseRules/**/*.autoResponseRules-meta.xml', { + ignore: this.ignorePatterns, + }); + const severityIcon = getSeverityIcon('warning'); + for (const file of autoResponseRuleFiles) { + const autoResponseRuleName = path.basename(file, '.autoResponseRules-meta.xml'); + const autoResponseRuleXml: string = await fs.readFile(file, 'utf-8'); + if (autoResponseRuleXml.includes('false')) { + inactiveAutoResponseRules.push({ + type: 'Auto Response Rule (inactive)', + name: autoResponseRuleName, + severity: 'warning', + severityIcon: severityIcon, + }); + } + } + + return inactiveAutoResponseRules.sort((a, b) => a.name.localeCompare(b.name)); + } + + private async verifyEscalationRules(): Promise { + const inactiveEscalationRules: any[] = []; + const escalationRuleFiles: string[] = await glob('**/escalationRules/**/*.escalationRules-meta.xml', { + ignore: this.ignorePatterns, + }); + const severityIcon = getSeverityIcon('warning'); + for (const file of escalationRuleFiles) { + const escalationRuleName = path.basename(file, '.escalationRules-meta.xml'); + const escalationRuleXml: string = await fs.readFile(file, 'utf-8'); + if (escalationRuleXml.includes('false')) { + inactiveEscalationRules.push({ + type: 'Escalation Rule (inactive)', + name: escalationRuleName, + severity: 'warning', + severityIcon: severityIcon, + }); + } + } + + return inactiveEscalationRules.sort((a, b) => a.name.localeCompare(b.name)); } /** @@ -161,14 +423,10 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co * It then maps the draft flows and inactive validation rules into an array of objects, each with a 'type' property set to either "Draft Flow" or "Inactive VR" and a 'name' property set to the file or rule name. * Finally, it generates a CSV file from this array and writes it to the output file. * - * @param {string[]} draftFlows - An array of draft flow names. - * @param {string[]} inactiveValidationRules - An array of inactive validation rule names. * @returns {Promise} - A Promise that resolves when the CSV file has been successfully generated. */ - private async buildCsvFile(draftFlows: string[], inactiveValidationRules: string[]): Promise { - this.outputFile = await generateReportPath("lint-metadatastatus", this.outputFile); - this.inactiveItems = [...draftFlows, ...inactiveValidationRules]; - - this.outputFilesRes = await generateCsvFile(this.inactiveItems, this.outputFile); + private async buildCsvFile(): Promise { + this.outputFile = await generateReportPath('lint-metadatastatus', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.inactiveItems, this.outputFile, { fileTitle: 'Inactive Metadata Elements' }); } } diff --git a/src/commands/hardis/lint/missingattributes.ts b/src/commands/hardis/lint/missingattributes.ts index 9a23bf8d9..c5e415a5b 100644 --- a/src/commands/hardis/lint/missingattributes.ts +++ b/src/commands/hardis/lint/missingattributes.ts @@ -1,96 +1,120 @@ /* jscpd:ignore-start */ // External Libraries and Node.js Modules -import * as fs from "fs-extra"; -import * as xml2js from "xml2js"; -import { glob } from "glob"; -import * as path from "path"; +import fs from 'fs-extra'; +import * as xml2js from 'xml2js'; +import { glob } from 'glob'; +import * as path from 'path'; // Salesforce Specific -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; // Project Specific Utilities -import { uxLog } from "../../../common/utils"; -import { NotifProvider, NotifSeverity } from "../../../common/notifProvider"; -import { MessageAttachment } from "@slack/types"; -import { getBranchMarkdown, getNotificationButtons, getSeverityIcon } from "../../../common/utils/notifUtils"; -import { generateCsvFile, generateReportPath } from "../../../common/utils/filesUtils"; -import { GLOB_IGNORE_PATTERNS } from "../../../common/utils/projectUtils"; - -// Initialize and Load Messages -Messages.importMessagesDirectory(__dirname); -const messages = Messages.loadMessages("sfdx-hardis", "org"); +import { uxLog } from '../../../common/utils/index.js'; +import { NotifProvider, NotifSeverity } from '../../../common/notifProvider/index.js'; +import { MessageAttachment } from '@slack/types'; +import { getBranchMarkdown, getNotificationButtons, getSeverityIcon } from '../../../common/utils/notifUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../common/utils/filesUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; +import { setConnectionVariables } from '../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); /* jscpd:ignore-end */ -export default class metadatastatus extends SfdxCommand { - public static title = "check missing description on custom fields"; - public static description = "Check if elements(custom fields) aren't description"; - public static examples = ["$ sfdx hardis:lint:missingattributes"]; +export default class MetadataStatus extends SfCommand { + public static title = 'check missing description on custom fields'; + public static description = ` +## Command Behavior + +**Checks for missing descriptions on custom fields within your Salesforce DX project.** + +This command helps enforce documentation standards by identifying custom fields that lack a descriptive explanation. Comprehensive field descriptions are crucial for: + +- **Maintainability:** Making it easier for developers and administrators to understand the purpose and usage of each field. +- **Data Governance:** Ensuring data quality and consistency. +- **User Adoption:** Providing clear guidance to end-users on how to interact with fields. + +It specifically targets custom fields (ending with \`__c\`) and excludes standard fields, managed package fields, and fields on Custom Settings or Data Cloud objects. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses \`glob\` to find all custom field metadata files (\`.field-meta.xml\`) within your project. +- **Custom Setting Exclusion:** It first filters out fields belonging to Custom Settings by reading the corresponding object metadata files (\`.object-meta.xml\`) and checking for the \`\` tag. It also excludes Data Cloud objects (\`__dlm\`, \`__dll\`) and managed package fields. +- **XML Parsing:** For each remaining custom field file, it reads the XML content and parses it using \`xml2js\` to extract the \`fullName\` and \`description\` attributes. +- **Description Check:** It verifies if the \`description\` attribute is present and not empty for each custom field. +- **Data Aggregation:** All custom fields found to be missing a description are collected into a list, along with their object and field names. +- **Report Generation:** It generates a CSV report (\`lint-missingattributes.csv\`) containing details of all fields with missing descriptions. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications (e.g., to Slack, MS Teams, Grafana) about the presence and count of fields with missing descriptions, making it suitable for automated quality checks in CI/CD pipelines. +
+`; + public static examples = ['$ sf hardis:lint:missingattributes']; /* jscpd:ignore-start */ - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': optionalOrgFlagWithDeprecations, }; /* jscpd:ignore-end */ - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - protected static supportsUsername = true; - // Comment this out if your command does not support a hub org username protected static supportsDevhubUsername = false; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; - private objectFileDirectory = "**/objects/**/fields/*.*"; - protected fieldsWithoutDescription = []; + public static requiresProject = true; + private objectFileDirectory = '**/objects/**/fields/*.*'; + protected fieldsWithoutDescription: any[] = []; protected outputFile: string; protected outputFilesRes: any = {}; private nonCustomSettingsFieldDirectories: string[] = []; private ignorePatterns: string[] = GLOB_IGNORE_PATTERNS; public async run(): Promise { + const { flags } = await this.parse(MetadataStatus); await this.filterOutCustomSettings(); this.fieldsWithoutDescription = await this.verifyFieldDescriptions(); // Build notifications const branchMd = await getBranchMarkdown(); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No missing descriptions on fields has been found in ${branchMd}`; let attachments: MessageAttachment[] = []; if (this.fieldsWithoutDescription.length > 0) { - notifSeverity = "warning"; + notifSeverity = 'warning'; notifText = `${this.fieldsWithoutDescription.length} missing descriptions on fields have been found in ${branchMd}`; await this.buildCsvFile(this.fieldsWithoutDescription); attachments = [ { - text: `*Missing descriptions*\n${this.fieldsWithoutDescription.map((file) => `• ${file.name}`).join("\n")}`, + text: `*Missing descriptions*\n${this.fieldsWithoutDescription.map((file) => `• ${file.name}`).join('\n')}`, }, ]; } else { - uxLog(this, "No missing descriptions on fields have been found"); + uxLog("other", this, 'No missing descriptions on fields have been found'); } // Post notifications - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "MISSING_ATTRIBUTES", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'MISSING_ATTRIBUTES', text: notifText, attachments: attachments, buttons: notifButtons, severity: notifSeverity, - sideImage: "flow", + sideImage: 'flow', logElements: this.fieldsWithoutDescription, data: { metric: this.fieldsWithoutDescription.length }, metrics: { @@ -105,11 +129,15 @@ export default class metadatastatus extends SfdxCommand { const objectDirectories: string[] = await glob(this.objectFileDirectory, { ignore: this.ignorePatterns }); for (const directory of objectDirectories) { const objectName = path.basename(path.dirname(path.dirname(directory))); + // Filter Data Cloud & managed items + if (objectName.endsWith("__dlm") || objectName.endsWith("__dll") || objectName.split('__').length > 2) { + continue; + } const objectMetaFilePath = path.join(path.dirname(path.dirname(directory)), `${objectName}.object-meta.xml`); if (fs.existsSync(objectMetaFilePath)) { try { - const objectMetaFileContent = fs.readFileSync(objectMetaFilePath, "utf8"); + const objectMetaFileContent = fs.readFileSync(objectMetaFilePath, 'utf8'); let isCustomSettingsObject = false; const result = await parserCS.parseStringPromise(objectMetaFileContent); @@ -135,22 +163,23 @@ export default class metadatastatus extends SfdxCommand { this.nonCustomSettingsFieldDirectories.map(async (fieldFile) => { const fieldContent = await this.readFileAsync(fieldFile); return await this.parseXmlStringAsync(fieldContent); - }), + }) ); - const severityIconInfo = getSeverityIcon("info"); + const severityIconInfo = getSeverityIcon('info'); for (let i = 0; i < fieldResults.length; i++) { const fieldResult = fieldResults[i]; if (fieldResult && fieldResult.CustomField) { const fieldName = fieldResult.CustomField.fullName[0]; - if (fieldName.endsWith("__c") && !fieldResult.CustomField.description) { - const fieldFile = this.nonCustomSettingsFieldDirectories[i]; - const objectName = fieldFile.split("/").slice(-3, -2)[0]; + // Skip standard and managed fields + if (fieldName.endsWith('__c') && !fieldResult.CustomField.description && (fieldName.match(/__/g) || []).length < 2) { + const fieldFile = this.nonCustomSettingsFieldDirectories[i].replace(/\\/g, '/'); + const objectName = fieldFile.split('/').slice(-3, -2)[0]; const fullFieldName = `${objectName}.${fieldName}`; fieldsWithoutDescription.push({ name: fullFieldName, object: objectName, field: fieldName, - severity: "info", + severity: 'info', severityIcon: severityIconInfo, }); } @@ -173,7 +202,7 @@ export default class metadatastatus extends SfdxCommand { private readFileAsync(filePath: string): Promise { return new Promise((resolve, reject) => { - fs.readFile(filePath, "utf8", (err, data) => { + fs.readFile(filePath, 'utf8', (err, data) => { if (err) { reject(err); } else { @@ -183,9 +212,9 @@ export default class metadatastatus extends SfdxCommand { }); } - private async buildCsvFile(fieldsWithoutDescription: string[]): Promise { - this.outputFile = await generateReportPath("lint-missingattributes", this.outputFile); - const csvData = fieldsWithoutDescription.map((field) => ({ type: "Field", name: field })); - this.outputFilesRes = await generateCsvFile(csvData, this.outputFile); + private async buildCsvFile(fieldsWithoutDescription: any[]): Promise { + this.outputFile = await generateReportPath('lint-missingattributes', this.outputFile); + const csvData = fieldsWithoutDescription.map((field) => ({ type: 'Field', name: field.name })); + this.outputFilesRes = await generateCsvFile(csvData, this.outputFile, { fileTitle: 'Missing Attributes' }); } } diff --git a/src/commands/hardis/lint/unusedmetadatas.ts b/src/commands/hardis/lint/unusedmetadatas.ts index 417fa7985..cf490ce80 100644 --- a/src/commands/hardis/lint/unusedmetadatas.ts +++ b/src/commands/hardis/lint/unusedmetadatas.ts @@ -1,107 +1,144 @@ /* jscpd:ignore-start */ // External Libraries -import { glob } from "glob"; -import * as fs from "fs-extra"; -import * as xml2js from "xml2js"; -import * as path from "path"; +import { glob } from 'glob'; +import fs from 'fs-extra'; +import * as xml2js from 'xml2js'; +import * as path from 'path'; +import c from 'chalk'; // Salesforce Specific -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; // Project Specific Utilities -import { NotifProvider, NotifSeverity } from "../../../common/notifProvider"; -import { MessageAttachment } from "@slack/types"; -import { getNotificationButtons, getBranchMarkdown, getSeverityIcon } from "../../../common/utils/notifUtils"; -import { generateCsvFile, generateReportPath } from "../../../common/utils/filesUtils"; -import { uxLog } from "../../../common/utils"; -import { GLOB_IGNORE_PATTERNS } from "../../../common/utils/projectUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); -// Load Messages -const messages = Messages.loadMessages("sfdx-hardis", "org"); +import { NotifProvider, NotifSeverity } from '../../../common/notifProvider/index.js'; +import { MessageAttachment } from '@slack/types'; +import { getNotificationButtons, getBranchMarkdown, getSeverityIcon } from '../../../common/utils/notifUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../common/utils/filesUtils.js'; +import { uxLog } from '../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; +import { CONSTANTS } from '../../../config/index.js'; +import { setConnectionVariables } from '../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); /* jscpd:ignore-end */ -export default class UnusedMetadatas extends SfdxCommand { - public static title = "check unused labels and custom permissions"; - public static description = `Check if elements (custom labels and custom permissions) are used in the project +export default class UnusedMetadatas extends SfCommand { + public static title = 'check unused labels and custom permissions'; + public static description = ` +## Command Behavior + +**Checks for unused custom labels and custom permissions within your Salesforce DX project.** + +This command helps identify and report on custom labels and custom permissions that are defined in your project but do not appear to be referenced anywhere in your codebase. Identifying unused metadata is crucial for: + +- **Code Cleanliness:** Removing dead code and unnecessary metadata improves project maintainability. +- **Performance:** Reducing the overall size of your metadata, which can positively impact deployment times and org performance. +- **Clarity:** Ensuring that all defined components serve a purpose, making the codebase easier to understand. + +It specifically scans for references to custom labels (e.g., \`$Label.MyLabel\`) and custom permissions (by their API name or label) across various file types (Apex, JavaScript, HTML, XML, etc.). + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-unused-metadata/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-unused-metadata/) and can output Grafana, Slack and MsTeams Notifications. - `; - public static examples = ["$ sfdx hardis:lint:unusedmetadatas"]; +The command's technical implementation involves: + +- **File Discovery:** It uses \`glob\` to find all relevant project files (Apex classes, triggers, JavaScript, HTML, XML, Aura components, Visualforce pages) and custom label (\`CustomLabels.labels-meta.xml\`) and custom permission (\`.customPermission-meta.xml\`) definition files. +- **XML Parsing:** It uses \`xml2js\` to parse the XML content of \`CustomLabels.labels-meta.xml\` and custom permission files to extract the full names of labels and permissions. +- **Content Scanning:** For each label and custom permission, it iterates through all other project files and checks if their names or associated labels are present in the file content. It performs case-insensitive checks for labels. +- **Usage Tracking:** It maintains a count of how many times each custom permission is referenced. Labels are checked for any inclusion. +- **Unused Identification:** Elements with no or very few references (for custom permissions, less than 2 to account for their own definition file) are flagged as unused. +- **Data Aggregation:** All identified unused labels and custom permissions are collected into a list. +- **Report Generation:** It generates a CSV report (\`lint-unusedmetadatas.csv\`) containing details of all unused metadata elements. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications (e.g., to Slack, MS Teams, Grafana) about the presence and count of unused metadata, making it suitable for automated monitoring in CI/CD pipelines. +
+`; + public static examples = ['$ sf hardis:lint:unusedmetadatas']; /* jscpd:ignore-start */ - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': optionalOrgFlagWithDeprecations, }; /* jscpd:ignore-end */ - protected unusedData = []; + protected unusedData: any[] = []; protected outputFile: string; protected outputFilesRes: any = {}; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - protected static supportsUsername = true; - // Comment this out if your command does not support a hub org username + protected static supportsDevhubUsername = false; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; private ignorePatterns: string[] = GLOB_IGNORE_PATTERNS; private projectFiles: string[]; - private labelFilePattern = "**/CustomLabels.labels-meta.xml"; - private customPermissionFilePattern = "**/customPermissions/*.xml"; + private labelFilePattern = '**/CustomLabels.labels-meta.xml'; + private customPermissionFilePattern = '**/customPermissions/*.xml'; public async run(): Promise { + const { flags } = await this.parse(UnusedMetadatas); await this.setProjectFiles(); + uxLog("action", this, c.cyan('Checking for unused labels...')); const unusedLabels = await this.verifyLabels(); + uxLog("action", this, c.cyan('Checking for unused custom permissions...')); const unusedCustomPermissions = await this.verifyCustomPermissions(); // Build notification const branchMd = await getBranchMarkdown(); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No unused metadatas has been detected in ${branchMd}`; const attachments: MessageAttachment[] = []; if (unusedLabels.length > 0) { attachments.push({ - text: `*Unused Labels*\n${unusedLabels.map((label) => `• ${label.name}`).join("\n")}`, + text: `*Unused Labels*\n${unusedLabels.map((label) => `• ${label.name}`).join('\n')}`, }); } if (unusedCustomPermissions.length > 0) { attachments.push({ - text: `*Unused Custom Permissions*\n${unusedCustomPermissions.map((permission) => `• ${permission.name}`).join("\n")}`, + text: `*Unused Custom Permissions*\n${unusedCustomPermissions + .map((permission) => `• ${permission.name}`) + .join('\n')}`, }); } + uxLog("action", this, c.cyan("Summary")); if (unusedLabels.length > 0 || unusedCustomPermissions.length > 0) { - notifSeverity = "warning"; + notifSeverity = 'warning'; notifText = `${this.unusedData.length} unused metadatas have been detected in ${branchMd}`; + if (unusedLabels.length > 0) { + uxLog("warning", this, c.yellow(`Unused Labels: ${unusedLabels.length}`)); + } + if (unusedCustomPermissions.length > 0) { + uxLog("warning", this, c.yellow(`Unused Custom Permissions: ${unusedCustomPermissions.length}`)); + } await this.buildCsvFile(unusedLabels, unusedCustomPermissions); } else { - uxLog(this, "No unused labels or custom permissions detected."); + uxLog("success", this, c.green('No unused labels or custom permissions detected.')); } // Post notification - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "UNUSED_METADATAS", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'UNUSED_METADATAS', text: notifText, attachments: attachments, buttons: notifButtons, severity: notifSeverity, - sideImage: "flow", + sideImage: 'flow', logElements: this.unusedData, data: { metric: this.unusedData.length }, metrics: { @@ -121,45 +158,61 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co const labelFilePath = labelFiles[0]; if (!labelFilePath) { - console.warn("No label file found."); + uxLog("warning", this, c.yellow('No label file found.')); return []; } return new Promise((resolve, reject) => { - fs.readFile(labelFilePath, "utf-8", (errorReadingFile, data) => { - if (errorReadingFile) { - reject(errorReadingFile); - return; - } - - xml2js.parseString(data, (errorParseString, result: any) => { - if (errorParseString) { - reject(errorParseString); + try { + fs.readFile(labelFilePath, 'utf-8', (errorReadingFile, data) => { + if (errorReadingFile) { + reject(errorReadingFile); return; } - const severityIconInfo = getSeverityIcon("info"); - const labelsArray: string[] = result.CustomLabels.labels.map((label: any) => label.fullName[0]); - const unusedLabels: any[] = labelsArray - .filter((label) => { - const labelLower = `label.${label.toLowerCase()}`; - const cLower = `c.${label.toLowerCase()}`; - const auraPattern = `{!$Label.c.${label.toLowerCase()}}`; - return !this.projectFiles.some((filePath) => { - const fileContent = fs.readFileSync(filePath, "utf-8").toLowerCase(); - return fileContent.includes(labelLower) || fileContent.includes(cLower) || fileContent.includes(auraPattern); + + xml2js.parseString(data, (errorParseString, result: any) => { + if (errorParseString) { + reject(errorParseString); + return; + } + const severityIconInfo = getSeverityIcon('info'); + const labelsArray: string[] = result.CustomLabels.labels.map((label: any) => label.fullName[0]); + const unusedLabels: any[] = labelsArray + .filter((label) => { + const labelLower = `label.${label.toLowerCase()}`; + const cLower = `c.${label.toLowerCase()}`; + const auraPattern = `{!$Label.c.${label.toLowerCase()}}`; + return !this.projectFiles.some((filePath) => { + if (!fs.existsSync(filePath)) { + uxLog("warning", this, c.yellow(`File not found: ${filePath}`)); + return false; + } + try { + const fileContent = fs.readFileSync(filePath, 'utf-8').toLowerCase(); + return ( + fileContent.includes(labelLower) || fileContent.includes(cLower) || fileContent.includes(auraPattern) + ); + } catch (error) { + uxLog("warning", this, c.yellow(`Error reading file ${filePath}: ${error}`)); + return false; + } + }); + }) + .map((label) => { + return { + name: label, + severity: 'info', + severityIcon: severityIconInfo, + }; }); - }) - .map((label) => { - return { - name: label, - severity: "info", - severityIcon: severityIconInfo, - }; - }); - - resolve(unusedLabels); + + resolve(unusedLabels); + }); }); - }); + } catch (error) { + uxLog("warning", this, c.yellow(`Error processing label file: ${error}`)); + reject(error); + } }); } @@ -169,40 +222,46 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co */ private async verifyCustomPermissions(): Promise { const foundLabels = new Map(); - const customPermissionFiles: string[] = await glob(this.customPermissionFilePattern, { ignore: this.ignorePatterns }); + const customPermissionFiles: string[] = await glob(this.customPermissionFilePattern, { + ignore: this.ignorePatterns, + }); if (!customPermissionFiles) { - console.warn("No custom permission file found."); + uxLog("warning", this, c.yellow('No custom permission file found.')); return []; } for (const file of customPermissionFiles) { - const fileData = await fs.readFile(file, "utf-8"); - const fileName = path.basename(file, ".customPermission-meta.xml"); - let label = ""; - - xml2js.parseString(fileData, (error, result) => { - if (error) { - console.error(`Error parsing XML: ${error}`); - return; - } - label = result.CustomPermission.label[0]; - }); - for (const filePath of this.projectFiles) { - const fileContent: string = fs.readFileSync(filePath, "utf-8"); - if (fileContent.includes(fileName) || fileContent.includes(label)) { - const currentCount = foundLabels.get(fileName) || 0; - foundLabels.set(fileName, currentCount + 1); + try { + const fileData = await fs.readFile(file, 'utf-8'); + const fileName = path.basename(file, '.customPermission-meta.xml'); + let label = ''; + + xml2js.parseString(fileData, (error, result) => { + if (error) { + uxLog("warning", this, c.yellow(`Error parsing XML: ${error}`)); + return; + } + label = result.CustomPermission.label[0]; + }); + for (const filePath of this.projectFiles) { + const fileContent: string = fs.readFileSync(filePath, 'utf-8'); + if (fileContent.includes(fileName) || fileContent.includes(label)) { + const currentCount = foundLabels.get(fileName) || 0; + foundLabels.set(fileName, currentCount + 1); + } } + } catch (error) { + uxLog("warning", this, c.yellow(`Error processing custom permission file ${file}: ${error}`)); } } - const severityIconInfo = getSeverityIcon("info"); + const severityIconInfo = getSeverityIcon('info'); const result = [...foundLabels.keys()] .filter((key) => (foundLabels.get(key) || 0) < 2) .map((name) => { return { name: name, - severity: "info", + severity: 'info', severityIcon: severityIconInfo, }; }); @@ -210,16 +269,16 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private async setProjectFiles(): Promise { - this.projectFiles = await glob("**/*.{cls,trigger,js,html,xml,cmp,email,page}", { ignore: this.ignorePatterns }); + this.projectFiles = await glob('**/*.{cls,trigger,js,html,xml,cmp,email,page}', { ignore: this.ignorePatterns }); } private async buildCsvFile(unusedLabels: string[], unusedCustomPermissions: string[]): Promise { - this.outputFile = await generateReportPath("lint-unusedmetadatas", this.outputFile); + this.outputFile = await generateReportPath('lint-unusedmetadatas', this.outputFile); this.unusedData = [ - ...unusedLabels.map((label) => ({ type: "Label", name: label })), - ...unusedCustomPermissions.map((permission) => ({ type: "Custom Permission", name: permission })), + ...unusedLabels.map((label: any) => ({ type: 'Label', name: label?.name || label })), + ...unusedCustomPermissions.map((permission: any) => ({ type: 'Custom Permission', name: permission.name || permission })), ]; - this.outputFilesRes = await generateCsvFile(this.unusedData, this.outputFile); + this.outputFilesRes = await generateCsvFile(this.unusedData, this.outputFile, { fileTitle: 'Unused Metadatas' }); } } diff --git a/src/commands/hardis/mdapi/deploy.ts b/src/commands/hardis/mdapi/deploy.ts index 43ed29e90..4478e3fb0 100644 --- a/src/commands/hardis/mdapi/deploy.ts +++ b/src/commands/hardis/mdapi/deploy.ts @@ -1,91 +1,126 @@ /* jscpd:ignore-start */ -import { flags, FlagsConfig, SfdxCommand } from "@salesforce/command"; -import { Duration } from "@salesforce/kit"; -import { AnyJson } from "@salesforce/ts-types"; -import { wrapSfdxCoreCommand } from "../../../common/utils/wrapUtils"; +import { Flags, requiredOrgFlagWithDeprecations, SfCommand } from '@salesforce/sf-plugins-core'; +import c from 'chalk'; +import { AnyJson } from '@salesforce/ts-types'; +import { wrapSfdxCoreCommand } from '../../../common/utils/wrapUtils.js'; +import { uxLog } from '../../../common/utils/index.js'; -const xorFlags = ["zipfile", "validateddeployrequestid", "deploydir"]; -export class Deploy extends SfdxCommand { - public static readonly description = `sfdx-hardis wrapper for sfdx force:mdapi:deploy that displays tips to solve deployment errors. +const xorFlags = ['zipfile', 'validateddeployrequestid', 'deploydir']; +export class Deploy extends SfCommand { + public static readonly description = ` +## Command Behavior + +**A wrapper command for Salesforce CLI's \`sf project deploy start\` (formerly \`sfdx force:mdapi:deploy\`), designed to assist with deployment error resolution.** + +This command facilitates the deployment of metadata API source (either from a zip file, a deployment directory, or a validated deploy request ID) to a Salesforce org. Its primary enhancement over the standard Salesforce CLI command is its ability to provide tips and guidance for solving common deployment errors. + +Key features: + +- **Flexible Input:** Supports deploying from a \`.zip\` file (\`--zipfile\`), a local directory (\`--deploydir\`), or by referencing a previously validated deployment (\`--validateddeployrequestid\`). +- **Test Level Control:** Allows specifying the test level for deployments (\`NoTestRun\`, \`RunSpecifiedTests\`, \`RunLocalTestsInOrg\`, \`RunAllTestsInOrg\`). +- **Error Handling Assistance:** Displays helpful tips and links to documentation to guide you through resolving deployment failures. + +**Important Note:** The underlying Salesforce CLI command \`sfdx force:mdapi:deploy\` is being deprecated by Salesforce in November 2024. It is recommended to migrate to \`sf project deploy start\` for future compatibility. See [Salesforce CLI Migration Guide](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm) for more information. + +For visual assistance with solving deployment errors, refer to this article: [![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) -[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_mdapi.htm#cli_reference_force_mdapi_deploy) +
+Technical explanations + +This command acts as an intelligent wrapper around the Salesforce CLI's metadata deployment functionality: + +- **Command Wrapping:** It uses the \`wrapSfdxCoreCommand\` utility to execute the \`sfdx force:mdapi:deploy\` (or its equivalent \`sf project deploy start\`) command, passing through all relevant flags and arguments. +- **Error Analysis (Implicit):** While the direct code snippet doesn't show explicit error analysis, the description implies that the \`wrapSfdxCoreCommand\` or a subsequent process intercepts deployment failures and provides contextual help. +- **User Guidance:** It logs messages to the console, including deprecation warnings and pointers to external documentation for troubleshooting. +- **Argument Passthrough:** It directly passes the command-line arguments (\`this.argv\`) to the underlying Salesforce CLI command, ensuring all standard deployment options are supported. +
`; public static readonly examples = []; - public static readonly requiresUsername = true; - public static readonly flagsConfig: FlagsConfig = { - checkonly: flags.boolean({ - char: "c", - description: "checkOnly", - }), - deploydir: flags.directory({ - char: "d", - description: "deployDir", + public static readonly flags: any = { + checkonly: Flags.boolean({ + char: 'c', + description: 'checkOnly', + }), + deploydir: Flags.directory({ + char: 'd', + description: 'deployDir', exactlyOne: xorFlags, }), - wait: flags.minutes({ - char: "w", - description: "wait", - default: Duration.minutes(0), + wait: Flags.integer({ + char: 'w', + description: 'wait', + default: 120, min: -1, }), - testlevel: flags.enum({ - char: "l", - description: "testLevel", - options: ["NoTestRun", "RunSpecifiedTests", "RunLocalTests", "RunAllTestsInOrg"], - default: "NoTestRun", + testlevel: Flags.string({ + char: 'l', + description: 'testLevel', + options: ['NoTestRun', 'RunSpecifiedTests', 'RunLocalTests', 'RunAllTestsInOrg'], + default: 'NoTestRun', }), - runtests: flags.array({ - char: "r", - description: "runTests", + runtests: Flags.string({ + char: 'r', + description: 'runTests', default: [], + multiple: true, }), - ignoreerrors: flags.boolean({ - char: "o", - description: "ignoreErrors", + ignoreerrors: Flags.boolean({ + description: 'ignoreErrors', }), - ignorewarnings: flags.boolean({ - char: "g", - description: "ignoreWarnings", + ignorewarnings: Flags.boolean({ + char: 'g', + description: 'ignoreWarnings', }), - validateddeployrequestid: flags.id({ - char: "q", - description: "validatedDeployRequestId", + validateddeployrequestid: Flags.string({ + char: 'q', + description: 'validatedDeployRequestId', exactlyOne: xorFlags, - exclusive: ["testlevel", "runtests", "ignoreerrors", "ignorewarnings", "checkonly"], + exclusive: ['testlevel', 'runtests', 'ignoreerrors', 'ignorewarnings', 'checkonly'], }), - verbose: flags.builtin({ - description: "verbose", + verbose: Flags.boolean({ + description: 'verbose', }), - zipfile: flags.filepath({ - char: "f", - description: "zipFile", + zipfile: Flags.file({ + char: 'f', + description: 'zipFile', exactlyOne: xorFlags, }), - singlepackage: flags.boolean({ - char: "s", - description: "singlePackage", + singlepackage: Flags.boolean({ + char: 's', + description: 'singlePackage', }), - soapdeploy: flags.boolean({ - description: "soapDeploy", + soapdeploy: Flags.boolean({ + description: 'soapDeploy', }), - purgeondelete: flags.boolean({ - description: "purgeOnDelete", + purgeondelete: Flags.boolean({ + description: 'purgeOnDelete', }), - concise: flags.builtin({ - description: "concise", + concise: Flags.boolean({ + description: 'concise', }), - debug: flags.boolean({ + debug: Flags.boolean({ default: false, - description: "debug", + description: 'debug', }), - websocket: flags.string({ - description: "websocket", + websocket: Flags.string({ + description: 'websocket', }), + 'target-org': requiredOrgFlagWithDeprecations, }; /* jscpd:ignore-end */ public async run(): Promise { - return await wrapSfdxCoreCommand("sfdx force:mdapi:deploy", this.argv, this, this.flags.debug); + const { flags } = await this.parse(Deploy); + uxLog("error", this, c.red('This command will be removed by Salesforce in November 2024.')); + uxLog("error", this, c.red('Please migrate to command sf hardis project deploy start')); + uxLog( + "error", + this, + c.red( + 'See https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm' + ) + ); + return await wrapSfdxCoreCommand('sfdx force:mdapi:deploy', this.argv, this, flags.debug); } } diff --git a/src/commands/hardis/misc/custom-label-translations.ts b/src/commands/hardis/misc/custom-label-translations.ts new file mode 100644 index 000000000..5e7ad1d0a --- /dev/null +++ b/src/commands/hardis/misc/custom-label-translations.ts @@ -0,0 +1,415 @@ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import c from "chalk"; +import * as path from "path"; +import fs from "fs-extra"; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { isCI, uxLog } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { parseStringPromise, Builder } from 'xml2js'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; +import { glob } from 'glob'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CustomLabelTranslations extends SfCommand { + public static title = 'Custom Label Translations'; + + public static description = ` +## Command Behavior + +**Extracts selected custom labels, or all custom labels used within a given Lightning Web Component (LWC), from all available language translation files in the project.** + +This command streamlines the process of managing and isolating specific custom label translations. It's particularly useful for: + +- **Localization Management:** Focusing on translations for a subset of labels or for labels relevant to a specific UI component. +- **Collaboration:** Sharing only the necessary translation files with translators, reducing complexity. +- **Debugging:** Isolating translation issues for specific labels or components. + +Key functionalities: + +- **Label Selection:** You can specify custom label names directly using the \`--label\` flag (comma-separated). +- **LWC-based Extraction:** Alternatively, you can provide an LWC developer name using the \`--lwc\` flag, and the command will automatically identify and extract all custom labels referenced within that LWC's JavaScript files. +- **Interactive Prompts:** If neither \`--label\` nor \`--lwc\` is provided, the command will interactively prompt you to choose between selecting specific labels or extracting from an LWC. +- **Output Generation:** For each language found in your project's \`translations\` folder, it generates a new \`.translation-meta.xml\` file containing only the extracted custom labels and their translations. These files are placed in a timestamped output directory. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses \`glob\` to find all \`*.translation-meta.xml\` files in the \`**/translations/\` directory and, if an LWC is specified, it searches for the LWC's JavaScript files (\`**/lwc/**/*.js\`). +- **LWC Label Extraction:** The \`extractLabelsFromLwc\` function uses regular expressions (\`@salesforce/label/c.([a-zA-Z0-9_]+)\`) to parse LWC JavaScript files and identify referenced custom labels. +- **XML Parsing and Building:** It uses \`xml2js\` (\`parseStringPromise\` and \`Builder\`) to: + - Read and parse existing \`.translation-meta.xml\` files. + - Filter the \`customLabels\` array to include only the requested labels. + - Construct a new XML structure containing only the filtered labels. + - Build a new XML string with proper formatting and write it to a new file. +- **Interactive Prompts:** The \`prompts\` library is used extensively to guide the user through the selection of extraction methods (labels or LWC) and specific labels/components. +- **File System Operations:** It uses \`fs-extra\` for creating output directories (\`extracted-translations/\`) and writing the generated translation files. +- **WebSocket Communication:** It uses \`WebSocketClient.requestOpenFile\` to open the output directory in VS Code for easy access to the generated files. +
+`; + + public static examples = [ + '$ sf hardis:misc:custom-label-translations --label CustomLabelName', + '$ sf hardis:misc:custom-label-translations --label Label1,Label2', + '$ sf hardis:misc:custom-label-translations --lwc MyComponent' + ]; + + private outputDirPrefix = 'extract-'; + + public static flags: any = { + label: Flags.string({ + char: 'l', + description: 'Developer name(s) of the custom label(s), comma-separated', + }), + lwc: Flags.string({ + char: 'c', + description: 'Developer name of the Lightning Web Component', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + /** + * Extract custom label names from LWC JS files + */ + private async extractLabelsFromLwc(lwcName: string, debugMode: boolean): Promise { + uxLog("log", this, c.grey(`Looking for LWC '${lwcName}' JS files...`)); + + const lwcFiles = await glob(`**/lwc/${lwcName}/**/*.js`); + + if (lwcFiles.length === 0) { + throw new Error(`No JS files found for LWC '${lwcName}'`); + } + + uxLog("log", this, c.grey(`Found ${lwcFiles.length} JS files for component '${lwcName}'`)); + + const labelNames = new Set(); + const labelImportRegex = /@salesforce\/label\/c\.([a-zA-Z0-9_]+)/g; + + for (const jsFile of lwcFiles) { + const content = await fs.readFile(jsFile, 'utf8'); + + let match; + while ((match = labelImportRegex.exec(content)) !== null) { + labelNames.add(match[1]); + } + + if (debugMode) { + uxLog("log", this, c.grey(`Processed file: ${jsFile}`)); + } + } + + const extractedLabels = Array.from(labelNames); + + if (extractedLabels.length === 0) { + throw new Error(`No custom labels found in LWC '${lwcName}'`); + } + + uxLog("log", this, c.grey(`Found ${extractedLabels.length} custom labels in LWC '${lwcName}': ${extractedLabels.join(', ')}`)); + this.outputDirPrefix = lwcName; + + return extractedLabels; + } + + public async run(): Promise { + const { flags } = await this.parse(CustomLabelTranslations); + const debugMode = flags.debug || false; + + let labelNames: string[] = []; + + if (flags.lwc) { + try { + labelNames = await this.extractLabelsFromLwc(flags.lwc, debugMode); + } catch (error: any) { + uxLog("error", this, c.red(error.message)); + return { success: false, message: error.message }; + } + } else if (flags.label) { + labelNames = flags.label.split(',').map(label => label.trim()); + } else if (!isCI) { + const selection = await CustomLabelTranslations.promptExtractionMethod(); + if (selection.type == 'labels') { + labelNames = selection.values; + } else if (selection.type == 'lwc') { + labelNames = await this.extractLabelsFromLwc(selection.values, debugMode); + } + } + + if (!labelNames || labelNames.length === 0) { + const errorMsg = 'No custom labels specified. Use --label or --lwc flag.'; + uxLog("error", this, c.red(errorMsg)); + return { success: false, message: errorMsg }; + } + + uxLog("log", this, c.grey(`Processing custom labels: ${labelNames.join(', ')}`)); + + try { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const outputDir = path.join('extracted-translations', `${this.outputDirPrefix}-${timestamp}`); + await fs.ensureDir(outputDir); + + const translationFiles = await glob('**/translations/*.translation-meta.xml'); + + if (translationFiles.length === 0) { + uxLog("warning", this, c.yellow(`No translation files found in **/translations/`)); + return { success: false, message: 'No translation files found' }; + } + + const results = {}; + + for (const translationFile of translationFiles) { + const languageCode = path.basename(translationFile).replace('.translation-meta.xml', ''); + uxLog("log", this, c.grey(`Processing translation file for ${languageCode}...`)); + + const xmlContent = await fs.readFile(translationFile, 'utf8'); + + const parsedXml = await parseStringPromise(xmlContent, { explicitArray: false }); + + if (!parsedXml.Translations) { + uxLog("warning", this, c.yellow(`Invalid translation file format: ${translationFile}`)); + continue; + } + + if (!parsedXml.Translations.customLabels) { + uxLog("warning", this, c.yellow(`No custom labels found in ${translationFile}`)); + continue; + } + + const customLabels = Array.isArray(parsedXml.Translations.customLabels) + ? parsedXml.Translations.customLabels + : [parsedXml.Translations.customLabels]; + + const matchedLabels = customLabels.filter(label => + labelNames.includes(label.name) + ); + + if (matchedLabels.length === 0) { + uxLog("warning", this, c.yellow(`No matching custom labels found in ${languageCode}`)); + continue; + } + + const newXml = { + Translations: { + $: { xmlns: "http://soap.sforce.com/2006/04/metadata" }, + customLabels: matchedLabels + } + }; + + const builder = new Builder({ + xmldec: { version: '1.0', encoding: 'UTF-8' }, + renderOpts: { pretty: true, indent: ' ', newline: '\n' } + }); + const outputXml = builder.buildObject(newXml); + + const outputFile = path.join(outputDir, `${languageCode}.translation-meta.xml`); + + await fs.writeFile(outputFile, outputXml); + + results[languageCode] = { + file: outputFile, + matchedLabels: matchedLabels.length + }; + + if (debugMode) { + uxLog("log", this, c.grey(`Found ${matchedLabels.length} labels in ${languageCode}:`)); + matchedLabels.forEach(label => { + uxLog("log", this, c.grey(` ${label.name} = "${label.label}"`)); + }); + } + } + + const totalFiles = Object.keys(results).length; + + if (totalFiles === 0) { + uxLog("warning", this, c.yellow('No matching labels found in any translation file.')); + return { success: false, message: 'No matching labels found' }; + } + + uxLog("success", this, c.green(`Successfully extracted custom labels to ${outputDir}`)); + uxLog("log", this, c.grey(`Processed ${totalFiles} translation files`)); + + WebSocketClient.requestOpenFile(outputDir); + + // Return an object to be displayed with --json + return { + success: true, + outputDirectory: outputDir, + results: results + }; + + } catch (err: any) { + uxLog("error", this, c.red(`Error processing custom labels: ${err.message}`)); + throw err; + } + } + + public static async promptCustomLabels() { + try { + const customLabelsFiles = await glob('**/labels/CustomLabels.labels-meta.xml', { ignore: GLOB_IGNORE_PATTERNS }); + if (customLabelsFiles.length == 0) { + throw new Error('No CustomLabels.labels-meta.xml was found'); + } + + const choices: any = []; + + for (const customLabelsFile of customLabelsFiles) { + const xmlContent = await fs.readFile(customLabelsFile, 'utf8'); + const parsedXml = await parseStringPromise(xmlContent); + + if (!parsedXml.CustomLabels || !parsedXml.CustomLabels.labels) { + throw new Error('No custom labels found in the file'); + } + + const labels = Array.isArray(parsedXml.CustomLabels.labels) + ? parsedXml.CustomLabels.labels + : [parsedXml.CustomLabels.labels]; + + labels.sort((a, b) => { + const nameA = a.fullName ? a.fullName[0] : a.name ? a.name[0] : ''; + const nameB = b.fullName ? b.fullName[0] : b.name ? b.name[0] : ''; + return nameA.localeCompare(nameB, 'en', { sensitivity: 'base' }); + }); + + labels.map(label => { + const name = label.fullName ? label.fullName[0] : label.name ? label.name[0] : ''; + const value = label.value ? label.value[0] : ''; + const shortDesc = value.length > 40 ? value.substring(0, 40) + '...' : value; + + choices.push({ + value: name, + title: name, + description: shortDesc + }); + }); + } + + const labelSelectRes = await prompts({ + type: 'multiselect', + message: 'Please select the Custom Labels you want to extract from translations', + description: 'Choose which custom labels to include in the translation extraction', + choices: choices + }); + + return labelSelectRes.value; + + } catch (err: any) { + console.error('Error while processing custom labels:', err.message); + throw err; + } + } + + public static async promptLwcComponent() { + try { + const lwcMetaFiles = await glob('**/lwc/*/*.js-meta.xml'); + + if (lwcMetaFiles.length === 0) { + throw new Error('No Lightning Web Components found in the project'); + } + + const componentsInfo: Array = []; + for (const metaFile of lwcMetaFiles) { + try { + const xmlContent = await fs.readFile(metaFile, 'utf8'); + const parsedXml = await parseStringPromise(xmlContent); + + const pathParts = metaFile.split('/'); + const componentName = pathParts[pathParts.length - 1].replace('.js-meta.xml', ''); + + let masterLabel = componentName; + + if (parsedXml.LightningComponentBundle && + parsedXml.LightningComponentBundle.masterLabel && + parsedXml.LightningComponentBundle.masterLabel.length > 0) { + masterLabel = parsedXml.LightningComponentBundle.masterLabel[0]; + } + + componentsInfo.push({ + name: componentName, + label: masterLabel, + path: metaFile + }); + } catch (err: any) { + console.warn(`Could not parse meta file: ${metaFile}`, err.message); + } + } + + componentsInfo.sort((a, b) => a.name.localeCompare(b.name, 'en', { sensitivity: 'base' })); + + const choices = componentsInfo.map(component => ({ + value: component.name, + title: component.label, + description: `Name: ${component.name}` + })); + + const componentSelectRes = await prompts({ + type: 'select', + name: 'value', + message: 'Select a Lightning Web Component to extract custom labels from', + description: 'Choose which LWC component to analyze for custom label usage', + placeholder: 'Select a component', + choices: choices + }); + + return componentSelectRes.value; + } catch (err: any) { + console.error('Error while finding LWC components:', err.message); + throw err; + } + } + + public static async promptExtractionMethod() { + try { + const methodSelectRes = await prompts({ + type: 'select', + name: 'method', + message: 'How would you like to extract custom label translations?', + description: 'Choose your preferred method for extracting custom label translations', + placeholder: 'Select extraction method', + choices: [ + { + value: 'labels', + title: 'Select specific custom labels', + description: 'Choose one or more custom labels from the full list' + }, + { + value: 'lwc', + title: 'Extract from a Lightning Web Component', + description: 'Find all custom labels used in a specific LWC' + } + ] + }); + + let values; + if (methodSelectRes.method === 'labels') { + values = await CustomLabelTranslations.promptCustomLabels(); + } else if (methodSelectRes.method === 'lwc') { + values = await CustomLabelTranslations.promptLwcComponent(); + } + + return { + type: methodSelectRes.method, + values: values + } + } catch (err: any) { + console.error('Error during extraction method selection:', err.message); + throw err; + } + } +} \ No newline at end of file diff --git a/src/commands/hardis/misc/purge-references.ts b/src/commands/hardis/misc/purge-references.ts new file mode 100644 index 000000000..e277f7743 --- /dev/null +++ b/src/commands/hardis/misc/purge-references.ts @@ -0,0 +1,194 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import ora, { Ora } from 'ora'; +import * as path from 'path'; + +import { execCommand, uxLog } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { MetadataUtils } from '../../../common/metadata-utils/index.js'; +import { glob } from 'glob'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; +import { applyAllReplacementsDefinitions } from '../../../common/utils/xmlUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class PurgeRef extends SfCommand { + public static title = 'Purge References'; + + public static description = ` +## Command Behavior + +**Purges references to specified strings within your Salesforce metadata files before deployment.** + +This command is a powerful, yet dangerous, tool designed to modify your local Salesforce metadata by removing or altering references to specific strings. It's primarily intended for advanced use cases, such as refactoring a custom field's API name (e.g., changing a Master-Detail relationship to a Lookup) where direct string replacement across many files is necessary. + +**USE WITH EXTREME CAUTION AND CAREFULLY READ ALL MESSAGES!** Incorrect usage can lead to data loss or metadata corruption. + +Key functionalities: + +- **Reference String Input:** You can provide a comma-separated list of strings (e.g., \`Affaire__c,MyField__c\`) that you want to find and modify within your metadata. +- **Automatic Related Field Inclusion:** If a custom field API name (ending with \`__c\`) is provided, it automatically includes its relationship name (ending with \`__r\`) in the list of references to purge, ensuring comprehensive cleanup. +- **Source Synchronization Check:** Prompts you to confirm if your local sources are up-to-date with the target org, offering to retrieve metadata if needed. +- **Targeted File Scan:** Scans \`.cls\`, \`.trigger\`, and \`.xml\` files within your SFDX project to identify occurrences of the specified reference strings. +- **Configurable Replacements:** Applies predefined replacement rules based on file type (e.g., Apex classes, XML files) to modify the content where references are found. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Input:** Uses \`prompts\` to get the list of reference strings from the user if not provided via flags. +- **Metadata Retrieval:** If the user indicates that local sources are not up-to-date, it executes \`sf project retrieve start\` to fetch the latest metadata from the target org. +- **File System Scan:** It uses \`glob\` to efficiently find all relevant source files (\`.cls\`, \`.trigger\`, \`.xml\`) within the project's package directories. +- **Content Matching:** Reads the content of each source file and checks for the presence of any of the specified reference strings. + +The core utility function for replacements is called \`applyAllReplacementsDefinitions\`. It is responsible for iterating through the identified files and applying the defined replacement rules. These rules are structured to target specific patterns (for example, \`,{{REF}},\` or \`{{REF}}[ |=].+\` in Apex code) and replace them with a desired string (often an empty string or a modified version). + +- **Regular Expressions:** The replacement rules heavily rely on regular expressions (\`regex\`) to precisely match and modify the content. +- **User Feedback:** Provides real-time feedback using \`ora\` for spinners and \`uxLog\` for logging messages about the progress and results of the operation. +
+`; + + public static examples = ['$ sf hardis:misc:purge-references']; + + public static flags: any = { + references: Flags.string({ + char: 'r', + description: 'Comma-separated list of references to find in metadatas', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + /* jscpd:ignore-end */ + private ignorePatterns: string[] = GLOB_IGNORE_PATTERNS; + protected referenceStrings: string[] = []; + protected referenceStringsLabel: string; + protected allMatchingSourceFiles: string[] = []; + protected spinnerCustom: Ora; + + public async run(): Promise { + uxLog("warning", this, c.yellow(c.bold(PurgeRef.description))); + const { flags } = await this.parse(PurgeRef); + // Collect input parameters + this.referenceStrings = (flags?.references || '').split(','); + if (this.referenceStrings.length == 1 && this.referenceStrings[0] === '') { + const refPromptResult = await prompts({ + type: 'text', + message: 'Please input a comma-separated list of strings that you want to purge (example: Affaire__c)', + description: 'Enter the reference strings to purge from your metadata files', + placeholder: 'Ex: Affaire__c,MyField__c,CustomObject__c', + }); + this.referenceStrings = refPromptResult.value.split(','); + } + if (this.referenceStrings.length == 1 && this.referenceStrings[0] === '') { + throw new SfError('You must input at least one string to check for references'); + } + for (const refString of this.referenceStrings) { + if (refString.endsWith('__c') && !this.referenceStrings.includes(refString.replace('__c', '__r'))) { + this.referenceStrings.push(refString.replace('__c', '__r')); + } + } + this.referenceStringsLabel = this.referenceStrings.join(','); + + // Retrieve metadatas if necessary + const retrieveNeedRes = await prompts({ + type: 'select', + message: `Are your local sources up to date with target org ${flags[ + 'target-org' + ].getUsername()}, or do you need to retrieve some of them ?`, + description: 'Confirm whether your local metadata is synchronized with the target org', + placeholder: 'Select an option', + choices: [ + { value: true, title: 'My local sfdx sources are up to date with the target org' }, + { value: false, title: 'I need to retrieve metadatas :)' }, + ], + }); + if (retrieveNeedRes.value === false) { + const metadatas = await MetadataUtils.promptMetadataTypes(); + const metadataArg = metadatas.map((metadataType: any) => metadataType.xmlName).join(' '); + await execCommand(`sf project retrieve start --ignore-conflicts --metadata ${metadataArg}`, this, { fail: true }); + } + + // Find sources that contain references + this.spinnerCustom = ora({ + text: `Browsing sources to find references to ${this.referenceStringsLabel}...`, + spinner: 'moon', + }).start(); + const packageDirectories = this.project?.getPackageDirectories() || []; + this.allMatchingSourceFiles = []; + for (const packageDirectory of packageDirectories) { + const sourceFiles = await glob('*/**/*.{cls,trigger,xml}', { + ignore: this.ignorePatterns, + cwd: packageDirectory.fullPath, + }); + const matchingSourceFiles = sourceFiles + .filter((sourceFile) => { + sourceFile = path.join(packageDirectory.path, sourceFile); + const fileContent = fs.readFileSync(sourceFile, 'utf8'); + return this.referenceStrings.some((refString) => fileContent.includes(refString)); + }) + .map((sourceFile) => path.join(packageDirectory.path, sourceFile)); + this.allMatchingSourceFiles.push(...matchingSourceFiles); + } + this.spinnerCustom.succeed(`Found ${this.allMatchingSourceFiles.length} sources with references`); + this.allMatchingSourceFiles.sort(); + uxLog("other", this, 'Matching files:\n' + c.grey(this.allMatchingSourceFiles.join('\n'))); + + // Handling Apex classes + await applyAllReplacementsDefinitions( + this.allMatchingSourceFiles, + this.referenceStrings, + this.getAllReplacements() + ); + + return { message: 'Command completed' }; + } + + private getAllReplacements() { + return [ + // Apex + { + extensions: ['.cls', '.trigger'], + label: 'Apex', + type: 'code', + replaceMode: ['line'], + refRegexes: [ + // , REF , + { regex: `,{{REF}},`, replace: ',' }, + { regex: `, {{REF}},`, replace: ',' }, + { regex: `,{{REF}} ,`, replace: ',' }, + { regex: `, {{REF}} ,`, replace: ',' }, + // , REF = xxx , + { regex: `,{{REF}}[ |=].+\\,`, replace: ',' }, + { regex: `, {{REF}}[ |=].+\\,`, replace: ',' }, + { regex: `,{{REF}}[ |=].+\\, `, replace: ',' }, + { regex: `, {{REF}}[ |=].+\\ ,`, replace: ',' }, + // , REF = xxx ) + { regex: `,{{REF}}[ |=].+\\)`, replace: ')' }, + { regex: `, {{REF}}[ |=].+\\)`, replace: ')' }, + // REF = xxx , + { regex: `{{REF}}[ |=].+\\)`, replace: ')' }, + ], + }, + ]; + } +} diff --git a/src/commands/hardis/misc/servicenow-report.ts b/src/commands/hardis/misc/servicenow-report.ts new file mode 100644 index 000000000..9cc2eae22 --- /dev/null +++ b/src/commands/hardis/misc/servicenow-report.ts @@ -0,0 +1,337 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { generateCsvFile, generateReportPath } from '../../../common/utils/filesUtils.js'; +import { soqlQuery } from '../../../common/utils/apiUtils.js'; +import axios from 'axios'; +import c from 'chalk'; +import { uxLog } from '../../../common/utils/index.js'; +import { glob } from 'glob'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import fs from 'fs-extra'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +/* jscpd:ignore-end */ +export default class ServiceNowReport extends SfCommand { + public static title = 'ServiceNow Report'; + public static description = `This command retrieves user stories from Salesforce and enriches them with data from ServiceNow. + +Define the following environment variables (in CICD variables or locally in a **.env** file): + +- SERVICENOW_URL: The base URL of the ServiceNow API (ex: https://your-instance.service-now.com/) +- SERVICENOW_USERNAME: The username for ServiceNow API authentication. +- SERVICENOW_PASSWORD: The password for ServiceNow API authentication. + +You also need to define JSON configuration file(e) in folder **config/user-stories/** + +Example: + +\`\`\`json +{ + "userStoriesConfig": { + "fields": [ + "Id", + "Name", + "Ticket_Number__c", + "copado__User_Story_Title__c", + "CreatedBy.Name", + "copado__Release__r.Name", + "copado__Environment__r.Name" + ], + "table": "copado__User_Story__c", + "where": "copado__Environment__r.Name ='UAT'", + "whereChoices": { + "UAT all": "copado__Environment__r.Name ='UAT'", + "UAT postponed": "copado__Environment__r.Name ='UAT' AND copado__Release__r.Name = 'postponed'", + "UAT in progress": "copado__Environment__r.Name ='UAT' AND copado__Release__r.Name != 'postponed' AND copado__Release__r.Name != 'cancelled'" + }, + "orderBy": "Ticket_Number__c ASC", + "ticketField": "Ticket_Number__c", + "reportFields": [ + { "key": "US Name", "path": "Name" }, + { "key": "US SN Identifier", "path": "Ticket_Number__c" }, + { "key": "US Title", "path": "copado__User_Story_Title__c" }, + { "key": "US Created By", "path": "CreatedBy.Name" }, + { "key": "US Environment", "path": "copado__Environment__r.Name" }, + { "key": "US Release", "path": "copado__Release__r.Name" }, + { "key": "SN Identifier", "path": "serviceNowInfo.number", "default": "NOT FOUND" }, + { "key": "SN Title", "path": "serviceNowInfo.short_description", "default": "NOT FOUND" }, + { "key": "SN Status", "path": "serviceNowInfo.state", "default": "NOT FOUND" }, + { "key": "SN Created By", "path": "serviceNowInfo.sys_created_by", "default": "NOT FOUND" }, + { "key": "SN URL", "special": "serviceNowTicketUrl" } + ] + }, + "serviceNowConfig": { + "tables": [ + { "tableName": "demand" }, + { "tableName": "incident" } + ] + } +} +\`\`\` + `; + + public static examples = ['$ sf hardis:misc:servicenow-report']; + /* jscpd:ignore-start */ + public static flags: any = { + config: Flags.string({ + char: 'c', + description: 'Path to JSON config file containing user stories and ServiceNow configuration', + }), + 'where-choice': Flags.string({ + char: 'w', + description: 'Where selection for user stories. If not provided, you will be prompted to select one from the config file.', + }), + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': optionalOrgFlagWithDeprecations, + }; + /* jscpd:ignore-end */ + + protected static supportsDevhubUsername = false; + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + protected configFile: string | undefined; + protected whereChoice: string | undefined; + protected outputFile: string; + protected outputFilesRes: any = {}; + protected userStories: any[] = []; + protected results: any[] = []; + + protected userStoriesConfig: any = { + fields: [ + 'Id', + 'Name', + 'Ticket_Number__c', + 'copado__User_Story_Title__c', + 'CreatedBy.Name', + 'copado__Release__r.Name', + 'copado__Environment__r.Name', + ], + table: 'copado__User_Story__c', + where: "copado__Environment__r.Name ='UAT'", + whereChoices: { + 'UAT all': "copado__Environment__r.Name ='UAT'", + 'UAT postponed': "copado__Environment__r.Name ='UAT' AND copado__Release__r.Name = 'postponed'", + 'UAT in progress': "copado__Environment__r.Name ='UAT' AND copado__Release__r.Name != 'postponed' AND copado__Release__r.Name != 'cancelled'" + }, + orderBy: 'Ticket_Number__c ASC', + ticketField: 'Ticket_Number__c', + reportFields: [ + { key: 'US Name', path: 'Name' }, + { key: 'US SN Identifier', path: 'Ticket_Number__c' }, + { key: 'US Title', path: 'copado__User_Story_Title__c' }, + { key: 'US Created By', path: 'CreatedBy.Name' }, + { key: 'US Environment', path: 'copado__Environment__r.Name' }, + { key: 'US Release', path: 'copado__Release__r.Name' }, + { key: 'SN Identifier', path: 'serviceNowInfo.number', default: 'NOT FOUND' }, + { key: 'SN Title', path: 'serviceNowInfo.short_description', default: 'NOT FOUND' }, + { key: 'SN Status', path: 'serviceNowInfo.state', default: 'NOT FOUND' }, + { key: 'SN Created By', path: 'serviceNowInfo.sys_created_by', default: 'NOT FOUND' }, + { key: 'SN URL', special: 'serviceNowTicketUrl' } + ] + }; + + protected serviceNowConfig: any = { + tables: [ + { tableName: 'demand' }, + { tableName: 'incident' }, + ] + }; + + + public async run(): Promise { + const { flags } = await this.parse(ServiceNowReport); + this.configFile = flags.config; + this.whereChoice = flags['where-choice']; + this.outputFile = flags.outputfile || null; + + await this.initializeConfiguration(); + + const conn = flags['target-org']?.getConnection(); + // List user stories matching with criteria + const ticketNumbers = await this.fetchUserStories(conn); + + // Get matching demands and incidents from ServiceNow API with axios using ticket numbers + const { serviceNowUrl, serviceNowApiOptions } = this.getServiceNowConfig(); + + // Check each service now table to get the tickets infos + for (const table of this.serviceNowConfig.tables) { + const serviceNowApiResource = `/api/now/table/${table.tableName}`; + const serviceNowApiQuery = `?sysparm_query=numberIN${ticketNumbers.join(',')}&sysparm_display_value=true`; + const serviceNowApiUrlWithQuery = `${serviceNowUrl}${serviceNowApiResource}${serviceNowApiQuery}`; + // Make API call to ServiceNow + uxLog("other", this, `Fetching Service now using query: ${serviceNowApiUrlWithQuery}`); + let serviceNowApiRes; + try { + serviceNowApiRes = await axios.get(serviceNowApiUrlWithQuery, serviceNowApiOptions); + } + catch (error: any) { + uxLog("error", this, c.red(`ServiceNow API call failed: ${error.message}\n${JSON.stringify(error?.response?.data || {})}`)); + continue; + } + // Complete user stories with ServiceNow data + const serviceNowRecords = serviceNowApiRes.data.result; + uxLog("other", this, `ServiceNow API call succeeded: ${serviceNowRecords.length} records found`); + for (const userStory of this.userStories) { + const ticketNumber = userStory?.[this.userStoriesConfig.ticketField]; + const serviceNowRecord = serviceNowRecords.find((record: any) => record.number === ticketNumber); + if (serviceNowRecord) { + userStory.serviceNowInfo = serviceNowRecord; + userStory.serviceNowTableName = table.tableName; + } + } + } + + // Build final result + this.results = this.userStories.map((userStory: any) => { + const serviceNowInfo = userStory.serviceNowInfo || {}; + // Build result object dynamically based on config + const result: any = {}; + for (const field of this.userStoriesConfig.reportFields) { + if (field.special === "serviceNowTicketUrl") { + if (!serviceNowInfo.sys_id) { + result[field.key] = 'NOT FOUND'; + } + else { + result[field.key] = `${process.env.SERVICENOW_URL}/nav_to.do?uri=/${userStory.serviceNowTableName}.do?sys_id=${serviceNowInfo.sys_id}`; + } + } else if (field.path) { + // Support nested paths like "CreatedBy.Name" or "serviceNowInfo.number" + const value = field.path.split('.').reduce((obj, prop) => obj && obj[prop], { ...userStory, serviceNowInfo }); + result[field.key] = value !== undefined && value !== null ? value : (field.default ?? 'NOT FOUND'); + } + } + return result; + }); + + uxLog("log", this, c.grey(JSON.stringify(this.results, null, 2))); + + // Generate CSV file + await this.buildCsvFile(); + + return { results: this.results, outputFilesRes: this.outputFilesRes }; + } + + private getServiceNowConfig() { + const serviceNowUrl = process.env.SERVICENOW_URL; + if (!serviceNowUrl) { + throw new SfError('ServiceNow API URL is not set. Please set SERVICENOW_URL environment variable.'); + } + const serviceNowApiUser = process.env.SERVICENOW_USERNAME || ''; + const serviceNowApiPassword = process.env.SERVICENOW_PASSWORD || ''; + const serviceNowApiHeaders = { + 'Content-Type': 'application/json', + Accept: 'application/json', + }; + const serviceNowApiOptions = this.buildServiceNowAuthHeaders(serviceNowApiHeaders, serviceNowApiUser, serviceNowApiPassword); + return { serviceNowUrl, serviceNowApiOptions }; + } + + private async fetchUserStories(conn: any) { + if (this.userStoriesConfig.whereChoices) { + // If whereChoices is defined, use the provided whereChoice flag or prompt user to select one + if (this.whereChoice) { + // If whereChoice is provided, use it directly + this.userStoriesConfig.where = this.userStoriesConfig.whereChoices[this.whereChoice]; + } + else { + // If whereChoice is not provided, prompt user to select one + uxLog("warning", this, c.yellow('No WHERE choice provided. Please select one from the available choices.')); + // If whereChoices is defined, prompt user to select one + const whereChoices = Object.keys(this.userStoriesConfig.whereChoices).map((key) => ({ + title: key, + description: this.userStoriesConfig.whereChoices[key], + value: key, + })); + const whereChoiceRes = await prompts({ + type: 'select', + message: 'Select a WHERE condition for user stories:', + description: 'Choose a predefined WHERE condition to filter user stories', + placeholder: 'Select a condition', + choices: whereChoices, + }); + this.whereChoice = whereChoiceRes.value; + this.userStoriesConfig.where = this.userStoriesConfig.whereChoices[this.whereChoice || '']; + } + } + const userStoriesQuery = `SELECT ${this.userStoriesConfig.fields.join(', ')} FROM ${this.userStoriesConfig.table} WHERE ${this.userStoriesConfig.where} ORDER BY ${this.userStoriesConfig.orderBy}`; + const userStoriesRes = await soqlQuery(userStoriesQuery, conn); + this.userStories = userStoriesRes.records; + // Get list of tickets from user stories + const ticketNumbers = userStoriesRes.records.map((record: any) => record?.[this.userStoriesConfig.ticketField]); + return ticketNumbers; + } + + private async initializeConfiguration() { + if (!this.configFile) { + // If no config file is provided, prompt users to select a JSON file in all files found in folder config/user-stories/ + const configFiles = await glob('config/user-stories/*.json', { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); + if (configFiles.length === 0) { + uxLog("warning", this, c.yellow('No configuration files found in config/user-stories/ directory. Using default config...')); + } + else if (configFiles.length === 1) { + this.configFile = configFiles[0]; + uxLog("other", this, `Single config file found: ${this.configFile}`); + } + else { + // If multiple files are found, prompt user to select one + const configFileRes = await prompts({ + type: 'select', + message: 'Multiple configuration files found. Please select one:', + description: 'Choose which configuration file to use for the ServiceNow report', + placeholder: 'Select a config file', + choices: configFiles.map((file) => ({ title: file, value: file })), + }); + this.configFile = configFileRes.value; + } + } + if (this.configFile) { + // Load configuration from JSON file + try { + const configData = await fs.readJSON(this.configFile); + this.userStoriesConfig = configData.userStoriesConfig || this.userStoriesConfig; + this.serviceNowConfig = configData.serviceNowConfig || this.serviceNowConfig; + uxLog("other", this, `Configuration loaded from ${this.configFile}`); + } + catch (error: any) { + throw new SfError(`Failed to load configuration file: ${error.message}`); + } + } + } + + private buildServiceNowAuthHeaders(serviceNowApiHeaders: { 'Content-Type': string; Accept: string; }, serviceNowApiUser: string, serviceNowApiPassword: string) { + if (!serviceNowApiUser || !serviceNowApiPassword) { + throw new SfError('ServiceNow API credentials are not set. Please set SERVICENOW_USERNAME and SERVICENOW_PASSWORD environment variables.'); + } + return { + headers: serviceNowApiHeaders, + auth: { + username: serviceNowApiUser, + password: serviceNowApiPassword, + }, + }; + } + + private async buildCsvFile(): Promise { + this.outputFile = await generateReportPath('user-story-report' + (this.whereChoice ? `-${this.whereChoice}` : ''), this.outputFile, { withDate: true }); + this.outputFilesRes = await generateCsvFile(this.results, this.outputFile, { fileTitle: 'User Stories Report' }); + } +} \ No newline at end of file diff --git a/src/commands/hardis/misc/toml2csv.ts b/src/commands/hardis/misc/toml2csv.ts index 3caf04842..495861e7c 100644 --- a/src/commands/hardis/misc/toml2csv.ts +++ b/src/commands/hardis/misc/toml2csv.ts @@ -1,92 +1,124 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as moment from "moment"; -import * as ora from "ora"; -import * as path from "path"; -import * as readline from "readline"; - -import { stripAnsi, uxLog } from "../../../common/utils"; -import { countLinesInFile } from "../../../common/utils/filesUtils"; -import { getRecordTypeId } from "../../../common/utils/orgUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class Toml2Csv extends SfdxCommand { - public static title = "TOML to CSV"; - - public static description = "Split TOML file into distinct CSV files"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import moment from 'moment'; +import ora from 'ora'; +import * as path from 'path'; +import * as readline from 'readline'; + +import { stripAnsi, uxLog } from '../../../common/utils/index.js'; +import { countLinesInFile } from '../../../common/utils/filesUtils.js'; +import { getRecordTypeId } from '../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class Toml2Csv extends SfCommand { + public static title = 'TOML to CSV'; + + public static description = ` +## Command Behavior + +**Splits a TOML (Tom's Obvious, Minimal Language) file into multiple CSV files, applying transformations and filters based on a JSON configuration.** + +This command is designed for data processing workflows where data is initially stored in a TOML-like format and needs to be converted into structured CSV files for import into Salesforce or other systems. It offers powerful capabilities for data manipulation and cleansing during the conversion process. + +Key functionalities: + +- **TOML Parsing:** Reads an input TOML file, identifying sections (e.g., \`[COMPTES]\`) and processing data lines within each section. +- **Configurable Transformations:** Applies transformations to individual data fields based on a JSON configuration file (\`transfoConfig.json\`). This can include: + - **Date Formatting:** Reformatting date strings to a desired output format. + - **Enum Transcoding:** Mapping input values to predefined output values using lookup tables (enums). + - **Concatenation:** Combining multiple input fields into a single output field. + - **Record Type ID Resolution:** Dynamically retrieving Salesforce Record Type IDs. +- **Data Filtering:** Filters data lines based on specified criteria (e.g., date ranges, parent ID existence, column values), allowing you to exclude irrelevant data from the output. +- **Duplicate Removal:** Optionally removes duplicate lines from the output CSV files. +- **Error Handling and Reporting:** Catches transformation errors, logs them, and can output problematic lines to separate error CSV files for review. +- **CSV Output:** Generates one or more CSV files, with configurable separators and headers, ready for Salesforce Data Loader or other import tools. + +
+Technical explanations + +The command's technical implementation involves: + +- **File I/O:** Uses \`fs-extra\` for file system operations (reading TOML, writing CSVs, creating directories) and \`readline\` for efficient line-by-line processing of large TOML files. +- **Configuration Loading:** Reads and parses the \`transfoConfig.json\` file, which defines the mapping rules, transformations, and filters. It also loads external enum files if specified in the configuration. +- **Data Processing Pipeline:** Iterates through each line of the TOML file: + - Identifies section headers to determine the current data context. + - Parses data lines based on the input separator. + - Applies filters defined in \`transfoConfig\` to decide whether to process or skip a line. + - Performs data transformations (date formatting, enum lookups, concatenations) as specified in the \`transfoConfig\`. + - Resolves Salesforce Record Type IDs by querying the target org using \`getRecordTypeId\`. + - Formats the output CSV cells, handling special characters and separators. + - Writes the transformed data to the appropriate CSV output stream. +- **Error Management:** Catches exceptions during transformation and logs detailed error messages, including the problematic line and the reason for the error. +- **Progress Indication:** Uses \`ora\` for a command-line spinner to provide visual feedback on the processing progress. +- **Statistics Collection:** Tracks various statistics, such as the number of processed lines, successful lines, error lines, and filtered lines, providing a summary at the end. +- **File Copying:** Optionally copies generated CSV files to other specified locations. +
+`; public static examples = [ - "$ sfdx hardis:misc:toml2csv --tomlfile 'D:/clients/toto/V1_full.txt' ", - "$ sfdx hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' ", - "$ sfdx hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' --outputdir 'C:/tmp/rrrr'", - "$ NODE_OPTIONS=--max_old_space_size=9096 sfdx hardis:misc:toml2csv --skiptransfo --tomlfile './input/V1.txt' --outputdir './output' --filtersections 'COMPTES,SOUS'", + "$ sf hardis:misc:toml2csv --tomlfile 'D:/clients/toto/V1_full.txt' ", + "$ sf hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' ", + "$ sf hardis:misc:toml2csv --skiptransfo --tomlfile 'D:/clients/toto/V1_full.txt' --outputdir 'C:/tmp/rrrr'", + "$ NODE_OPTIONS=--max_old_space_size=9096 sf hardis:misc:toml2csv --skiptransfo --tomlfile './input/V1.txt' --outputdir './output' --filtersections 'COMPTES,SOUS'", ]; - protected static flagsConfig = { - tomlfile: flags.string({ - char: "f", - description: "Input TOML file path", + public static flags: any = { + tomlfile: Flags.string({ + char: 'f', + description: 'Input TOML file path', required: true, }), - transfoconfig: flags.string({ - char: "t", - description: "Path to JSON config file for mapping and transformation", + transfoconfig: Flags.string({ + char: 't', + description: 'Path to JSON config file for mapping and transformation', }), - filtersections: flags.array({ - char: "l", - description: "List of sections to process (if not set, all sections will be processed)", + filtersections: Flags.string({ + char: 'l', + description: 'List of sections to process (if not set, all sections will be processed)', default: [], + multiple: true, }), - skiptransfo: flags.boolean({ - char: "s", + skiptransfo: Flags.boolean({ + char: 's', default: false, - description: "Do not apply transformation to input data", + description: 'Do not apply transformation to input data', }), - outputdir: flags.string({ - char: "o", - description: "Output directory", + outputdir: Flags.string({ + char: 'z', + description: 'Output directory', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected transfoConfig: any = {}; protected transfoConfigFile: string; protected rootConfigDirectory: string; protected outputDir: string; protected skipTransfo = false; - protected filterSections = []; + protected filterSections: any[] = []; protected doFilterSections = false; - protected spinner: any; + protected spinnerCustom: any; protected spinnerInterval: any; protected inputFileSeparator: string; protected outputFileSeparator: string; @@ -118,14 +150,15 @@ export default class Toml2Csv extends SfdxCommand { public async run(): Promise { // Collect input parameters - const tomlFile = this.flags.tomlfile; - const tomlFileEncoding = this.flags.tomlfileencoding || "utf8"; - this.transfoConfigFile = this.flags.transfoconfig || path.join(process.cwd(), "transfoConfig.json"); + const { flags } = await this.parse(Toml2Csv); + const tomlFile = flags.tomlfile; + const tomlFileEncoding = 'utf8'; + this.transfoConfigFile = flags.transfoconfig || path.join(process.cwd(), 'transfoConfig.json'); this.rootConfigDirectory = path.dirname(this.transfoConfigFile); - this.outputDir = this.flags.outputdir || path.join(process.cwd(), path.parse(tomlFile).name); - const debugMode = this.flags.debug || false; - this.skipTransfo = this.flags.skiptransfo || false; - this.filterSections = this.flags.filtersections || []; + this.outputDir = flags.outputdir || path.join(process.cwd(), path.parse(tomlFile).name); + const debugMode = flags.debug || false; + this.skipTransfo = flags.skiptransfo || false; + this.filterSections = flags.filtersections || []; this.doFilterSections = this.filterSections.length > 0; // Check TOML file is existing @@ -137,31 +170,39 @@ export default class Toml2Csv extends SfdxCommand { if (!fs.existsSync(this.transfoConfigFile)) { this.triggerError(c.red(`Mapping/Transco config ${c.bold(this.transfoConfigFile)} not found`)); } - const transfoConfigInit = JSON.parse(fs.readFileSync(this.transfoConfigFile, "utf-8")); + const transfoConfigInit = JSON.parse(fs.readFileSync(this.transfoConfigFile, 'utf-8')); this.transfoConfig = this.completeTransfoConfig(transfoConfigInit); // Set separators - this.inputFileSeparator = this.transfoConfig?.inputFile?.separator || ","; - this.outputFileSeparator = this.transfoConfig?.outputFile?.separator || ","; + this.inputFileSeparator = this.transfoConfig?.inputFile?.separator || ','; + this.outputFileSeparator = this.transfoConfig?.outputFile?.separator || ','; // Create output directory if not existing yet await fs.ensureDir(this.outputDir); // Empty output dir if (!this.transfoConfig?.skipResetOutputDir === true) { await fs.emptyDir(this.outputDir); - await fs.ensureDir(path.join(this.outputDir, "errors")); + await fs.ensureDir(path.join(this.outputDir, 'errors')); } - uxLog(this, c.cyan(`Generating CSV files from ${c.green(tomlFile)} (encoding ${tomlFileEncoding}) into folder ${c.green(this.outputDir)}`)); + uxLog( + "action", + this, + c.cyan( + `Generating CSV files from ${c.green(tomlFile)} (encoding ${tomlFileEncoding}) into folder ${c.green( + this.outputDir + )}` + ) + ); // Start spinner - this.spinner = ora({ text: `Processing...`, spinner: "moon" }).start(); + this.spinnerCustom = ora({ text: `Processing...`, spinner: 'moon' }).start(); this.spinnerInterval = setInterval(() => { this.updateSpinner(); }, 10000); // Read TOML file and process lines section by section - const fileStream = fs.createReadStream(tomlFile, { encoding: this.transfoConfig?.inputFile?.encoding || "utf8" }); + const fileStream = fs.createReadStream(tomlFile, { encoding: this.transfoConfig?.inputFile?.encoding || 'utf8' }); const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity, @@ -169,7 +210,7 @@ export default class Toml2Csv extends SfdxCommand { for await (const line of rl) { this.stats.totalLinesNb++; if (debugMode) { - uxLog(this, c.grey(line)); + uxLog("log", this, c.grey(line)); } // Empty line if (line.length === 0) { @@ -177,9 +218,9 @@ export default class Toml2Csv extends SfdxCommand { continue; } // Section line - if (line.startsWith("[")) { + if (line.startsWith('[')) { this.stats.sectionLinesNb++; - this.currentSection = /\[(.*)\]/gm.exec(line)[1]; // ex: get COMPTES from [COMPTES] + this.currentSection = (/\[(.*)\]/gm.exec(line) || '')[1]; // ex: get COMPTES from [COMPTES] if (this.doFilterSections && !this.filterSections.includes(this.currentSection)) { continue; } @@ -197,9 +238,15 @@ export default class Toml2Csv extends SfdxCommand { this.sectionLines[this.currentSection] = this.sectionLines[this.currentSection] || []; // Init section files writeStreams if (this.tomlSectionsFileWriters[this.currentSection] == null) { - this.tomlSectionsFileWriters[this.currentSection] = await this.createSectionWriteStream(this.currentSection, false); + this.tomlSectionsFileWriters[this.currentSection] = await this.createSectionWriteStream( + this.currentSection, + false + ); if (!this.skipTransfo) { - this.tomlSectionsErrorsFileWriters[this.currentSection] = await this.createSectionWriteStream(this.currentSection, true); + this.tomlSectionsErrorsFileWriters[this.currentSection] = await this.createSectionWriteStream( + this.currentSection, + true + ); } } } @@ -240,7 +287,7 @@ export default class Toml2Csv extends SfdxCommand { } else { // With transformation try { - await this.convertLineToSfThenWrite(this.currentSection, lineSplit); + await this.convertLineToSfThenWrite(this.currentSection, lineSplit, flags); } catch (e) { // Manage error this.stats.dataErrorLinesNb++; @@ -251,21 +298,21 @@ export default class Toml2Csv extends SfdxCommand { .map((val) => (this.inputFileSeparator !== this.outputFileSeparator ? this.formatCsvCell(val) : val)) // Add quotes if value contains a separator .join(this.outputFileSeparator) + this.outputFileSeparator + - stripAnsi(`"${e.message.replace(/"/g, "'")}"`); + stripAnsi(`"${(e as Error).message.replace(/"/g, "'")}"`); if (this.checkNotDuplicate(this.currentSection, lineError)) { await this.writeLine(lineError, this.tomlSectionsErrorsFileWriters[this.currentSection]); this.addLineInCache(this.currentSection, lineSplit, lineError, false); } - if (this.lineErrorMessages[e.message]) { - this.lineErrorMessages[e.message]++; + if (this.lineErrorMessages[(e as Error).message]) { + this.lineErrorMessages[(e as Error).message]++; } else { - this.lineErrorMessages[e.message] = 1; - uxLog(this, c.red(e.message)); + this.lineErrorMessages[(e as Error).message] = 1; + uxLog("error", this, c.red((e as Error).message)); } } } } else { - uxLog(this, c.yellow(`Line without declared section before: skipped (${line})`)); + uxLog("warning", this, c.yellow(`Line without declared section before: skipped (${line})`)); } } @@ -283,7 +330,9 @@ export default class Toml2Csv extends SfdxCommand { // Stop spinner clearInterval(this.spinnerInterval); - this.spinner.succeed(`File processing complete of ${this.stats.dataLinesNb} data lines (${this.stats.dataErrorLinesNb} in error)`); + this.spinnerCustom.succeed( + `File processing complete of ${this.stats.dataLinesNb} data lines (${this.stats.dataErrorLinesNb} in error)` + ); // Manage file copy to data workspace folders for (const sectionKey of Object.keys(this.transfoConfig.entities)) { @@ -294,41 +343,56 @@ export default class Toml2Csv extends SfdxCommand { } if (fs.existsSync(this.tomlSectionsFileWriters[sectionKey].path)) { await fs.copy(this.tomlSectionsFileWriters[sectionKey].path, sectionData.outputFile.copyFilePath); - uxLog(this, c.grey(`- copied ${this.tomlSectionsFileWriters[sectionKey].path} to ${sectionData.outputFile.copyFilePath}`)); + uxLog( + "log", + this, + c.grey( + `- copied ${this.tomlSectionsFileWriters[sectionKey].path} to ${sectionData.outputFile.copyFilePath}` + ) + ); } } } // Display full stats - uxLog(this, c.grey("Stats: \n" + JSON.stringify(this.stats, null, 2))); + uxLog("log", this, c.grey('Stats: \n' + JSON.stringify(this.stats, null, 2))); // Display errors summary if (Object.keys(this.lineErrorMessages).length > 0) { - uxLog(this, c.yellow("There have been parsing errors:")); + uxLog("warning", this, c.yellow('There have been parsing errors:')); for (const errMsg of Object.keys(this.lineErrorMessages)) { - uxLog(this, c.yellow("- " + this.lineErrorMessages[errMsg] + " lines: " + errMsg)); + uxLog("warning", this, c.yellow('- ' + this.lineErrorMessages[errMsg] + ' lines: ' + errMsg)); } - uxLog(this, ""); + uxLog("other", this, ''); } // Display human-readable stats for (const section of Object.keys(this.stats.sections)) { const sectionStats = this.stats.sections[section]; if (sectionStats.dataLinesNb > 0) { - uxLog(this, c.grey(`[${section}] kept ${sectionStats.dataSuccessLinesNb} entries on ${sectionStats.dataLinesNb}`)); + uxLog( + "log", + this, + c.grey(`[${section}] kept ${sectionStats.dataSuccessLinesNb} entries on ${sectionStats.dataLinesNb}`) + ); } } - uxLog(this, c.grey(`[TOTAL] kept ${this.stats.dataSuccessLinesNb} entries on ${this.stats.dataLinesNb}`)); + uxLog("log", this, c.grey(`[TOTAL] kept ${this.stats.dataSuccessLinesNb} entries on ${this.stats.dataLinesNb}`)); const message = `TOML file ${tomlFile} has been split into ${this.csvFiles.length} CSV files in directory ${this.outputDir}`; uxLog( + "action", this, - c.cyan(`TOML file ${c.green(tomlFile)} has been split into ${c.green(this.csvFiles.length)} CSV files in directory ${c.green(this.outputDir)}`), + c.cyan( + `TOML file ${c.green(tomlFile)} has been split into ${c.green( + this.csvFiles.length + )} CSV files in directory ${c.green(this.outputDir)}` + ) ); return { outputString: message, csvfiles: this.csvFiles, stats: this.stats }; } updateSpinner() { - this.spinner.text = + this.spinnerCustom.text = `Processing section ${this.currentSection} (total lines: ${this.stats.dataLinesNb},` + ` success: ${this.stats.dataSuccessLinesNb},` + ` errors: ${this.stats.dataErrorLinesNb}, filtered: ${this.stats.dataFilteredLinesNb})`; @@ -340,8 +404,8 @@ export default class Toml2Csv extends SfdxCommand { if (this.skipTransfo) { const outputFile = path.join(this.outputDir, `${section}.csv`); // Init writeStream - const fileWriteStream = fs.createWriteStream(path.resolve(outputFile), { encoding: "utf8" }); - uxLog(this, c.cyan(`- Initialized output CSV file ${c.green(c.bold(outputFile))}`)); + const fileWriteStream = fs.createWriteStream(path.resolve(outputFile), { encoding: 'utf8' }); + uxLog("action", this, c.cyan(`- Initialized output CSV file ${c.green(c.bold(outputFile))}`)); this.csvFiles.push(outputFile); return fileWriteStream; } @@ -350,29 +414,30 @@ export default class Toml2Csv extends SfdxCommand { // Create SF Object output file name const outputFile = path.join( this.outputDir, - `${errMode ? "errors" + path.sep + "err__" : ""}${this.transfoConfig.entities[section].outputFile.salesforceObjectApiName}___${section}.csv`, + `${errMode ? 'errors' + path.sep + 'err__' : ''}${this.transfoConfig.entities[section].outputFile.salesforceObjectApiName + }___${section}.csv` ); // Init writeStream - const fileWriteStream = fs.createWriteStream(path.resolve(outputFile), { encoding: "utf8" }); + const fileWriteStream = fs.createWriteStream(path.resolve(outputFile), { encoding: 'utf8' }); // Create CSV Header let headerLine = (this.transfoConfig?.entities[section]?.outputFile?.cols || []) .map((colDescription: any) => colDescription.name) .join(this.outputFileSeparator); if (errMode) { - headerLine += this.outputFileSeparator + "Error"; + headerLine += this.outputFileSeparator + 'Error'; } // Initialize with header - fileWriteStream.write(headerLine + "\n"); - uxLog(this, c.cyan(`- Initialized ${errMode ? "errors" : "output"} CSV file ${c.green(c.bold(outputFile))}`)); + fileWriteStream.write(headerLine + '\n'); + uxLog("action", this, c.cyan(`- Initialized ${errMode ? 'errors' : 'output'} CSV file ${c.green(c.bold(outputFile))}`)); this.csvFiles.push(outputFile); return fileWriteStream; } else if (errMode === false) { // Section has not been described in config file !! - uxLog(this, c.yellow(`Section ${section} as entity is not described with columns in ${this.transfoConfigFile}`)); - const outputFile = path.join(this.outputDir, "errors", `noconfig__${section}.csv`); + uxLog("warning", this, c.yellow(`Section ${section} as entity is not described with columns in ${this.transfoConfigFile}`)); + const outputFile = path.join(this.outputDir, 'errors', `noconfig__${section}.csv`); // Init writeStream - const fileWriteStream = fs.createWriteStream(path.resolve(outputFile), { encoding: "utf8" }); - uxLog(this, c.cyan(`- Initialized default output CSV file ${c.green(c.bold(outputFile))}`)); + const fileWriteStream = fs.createWriteStream(path.resolve(outputFile), { encoding: 'utf8' }); + uxLog("action", this, c.cyan(`- Initialized default output CSV file ${c.green(c.bold(outputFile))}`)); this.csvFiles.push(outputFile); return fileWriteStream; } @@ -385,15 +450,15 @@ export default class Toml2Csv extends SfdxCommand { const ableToWrite = streamWriter.write(`${lineSf}\n`); if (!ableToWrite) { await new Promise((resolve) => { - streamWriter.once("drain", resolve); + streamWriter.once('drain', resolve); }); } } } // Convert input CSV line into SF Bulk API expected CSV line - async convertLineToSfThenWrite(section: string, lineSplit: string[]) { - const linesSfArray = []; + async convertLineToSfThenWrite(section: string, lineSplit: string[], flags) { + const linesSfArray: any[] = []; // convert into input format const inputCols: any = {}; @@ -401,34 +466,38 @@ export default class Toml2Csv extends SfdxCommand { // Case when cols are defined line [ {"Name": 0, "FirstName: 1" ...}] for (let i = 0; i < this.transfoConfig.entities[section].inputFile.cols.length; i++) { const inputColKey = this.transfoConfig.entities[section].inputFile.cols[i]; - inputCols[inputColKey] = lineSplit[i] || ""; + inputCols[inputColKey] = lineSplit[i] || ''; } } else { // Case when cols are not defined: just use positions for (let i = 0; i < lineSplit.length; i++) { const humanInputColPos = i + 1; - inputCols[humanInputColPos] = lineSplit[i] || ""; + inputCols[humanInputColPos] = lineSplit[i] || ''; } } // convert into output format for (const colDefinition of this.transfoConfig.entities[section]?.outputFile?.cols || []) { // Col definition is the position or the name of a column in input file if (colDefinition.inputColKey || colDefinition.inputColKey === 0) { - if (inputCols[colDefinition.inputColKey] || inputCols[colDefinition.inputColKey] === "" || inputCols[colDefinition.inputColKey] === 0) { + if ( + inputCols[colDefinition.inputColKey] || + inputCols[colDefinition.inputColKey] === '' || + inputCols[colDefinition.inputColKey] === 0 + ) { let colVal: string = inputCols[colDefinition.inputColKey]; // Transform if necessary if (colDefinition.transfo) { colVal = this.manageTransformation(colDefinition.transfo, colVal, colDefinition); } // Manage missing required value - if (colDefinition?.required === true && colVal === "") { + if (colDefinition?.required === true && colVal === '') { this.triggerError( c.red( - `${c.bold(this.transfoConfig.entities[this.currentSection].outputFile.salesforceObjectApiName)}.${c.bold( - colDefinition.name, - )}: Missing required value`, + `${c.bold( + this.transfoConfig.entities[this.currentSection || ''].outputFile.salesforceObjectApiName + )}.${c.bold(colDefinition.name)}: Missing required value` ), - false, + false ); } // Manage truncate value @@ -438,7 +507,10 @@ export default class Toml2Csv extends SfdxCommand { // Add cell in line linesSfArray.push(colVal); // Add quotes if value contains output file separator } else { - this.triggerError(c.red(`You must have a correspondance in input cols for output col ${JSON.stringify(colDefinition)}`), false); + this.triggerError( + c.red(`You must have a correspondance in input cols for output col ${JSON.stringify(colDefinition)}`), + false + ); } } // Col definition is a hardcoded value @@ -453,14 +525,14 @@ export default class Toml2Csv extends SfdxCommand { } // Col definition is a composite concatenated value (Virtual unique key for SFDMU) else if (colDefinition.concatComposite) { - const concatFields = colDefinition.name.split("$").filter((fieldName) => fieldName !== ""); - colDefinition.separator = colDefinition.separator || ";"; + const concatFields = colDefinition.name.split('$').filter((fieldName) => fieldName !== ''); + colDefinition.separator = colDefinition.separator || ';'; const concatenatedValue = this.processConcat(concatFields, section, linesSfArray, colDefinition); linesSfArray.push(concatenatedValue); } // Get record type Id else if (colDefinition.recordType) { - const recordTypeId = await getRecordTypeId(colDefinition.recordType, this.org.getConnection()); + const recordTypeId = await getRecordTypeId(colDefinition.recordType, flags['target-org'].getConnection()); if (recordTypeId === null) { this.triggerError(`No RecordTypeId found for ${JSON.stringify(colDefinition.recordType)}`, true); } @@ -486,34 +558,35 @@ export default class Toml2Csv extends SfdxCommand { if (concatColName.hardcoded) { return concatColName.hardcoded; } - const colNamePosition = this.transfoConfig?.entities[section]?.outputFile?.colOutputPositions?.indexOf(concatColName); + const colNamePosition = + this.transfoConfig?.entities[section]?.outputFile?.colOutputPositions?.indexOf(concatColName); if (colNamePosition === null || colNamePosition < 0) { this.triggerError( `Concat error: Unable to find output field "${concatColName}" in ${JSON.stringify( - this.transfoConfig.entities[section].outputFile.colOutputPositions, + this.transfoConfig.entities[section].outputFile.colOutputPositions )}`, - false, + false ); } const colNameValue = linesSfArray[colNamePosition]; return colNameValue; }) - .join(colDefinition.separator || " "); + .join(colDefinition.separator || ' '); return concatenatedValues; } // Apply transformations defined in transfoconfig file manageTransformation(transfo: any, colVal: any, colDefinition: any) { // Date transfo - if (transfo.type === "date") { - if (colVal === "") { - return ""; + if (transfo.type === 'date') { + if (colVal === '') { + return ''; } if (transfo.addZero && colVal.length === 7) { - colVal = "0" + colVal; + colVal = '0' + colVal; } const formattedDate = moment(colVal, transfo.from, true).format(transfo.to); - if (formattedDate === "Invalid date") { + if (formattedDate === 'Invalid date') { this.triggerError(`Unable to reformat date ${colVal} for column ${JSON.stringify(colDefinition)}`, false); } return formattedDate; @@ -528,15 +601,17 @@ export default class Toml2Csv extends SfdxCommand { // Manage transco value getTranscoValue(transfo: any, colVal: string, colDefinition: any) { const enumValues = this.getTranscoValues(transfo); - const transcodedValue = enumValues[colVal] !== null ? enumValues[colVal] : transfo.default || ""; - if (transcodedValue === "" && colVal !== "") { + const transcodedValue = enumValues[colVal] !== null ? enumValues[colVal] : transfo.default || ''; + if (transcodedValue === '' && colVal !== '') { this.triggerError( c.red( - `${c.bold(this.transfoConfig.entities[this.currentSection].outputFile.salesforceObjectApiName)}.${c.bold( - colDefinition.name, - )}: Missing matching value for ${c.bold(colVal)} in ${c.grey(JSON.stringify(Object.keys(enumValues)))}`, + `${c.bold( + this.transfoConfig.entities[this.currentSection || ''].outputFile.salesforceObjectApiName + )}.${c.bold(colDefinition.name)}: Missing matching value for ${c.bold(colVal)} in ${c.grey( + JSON.stringify(Object.keys(enumValues)) + )}` ), - false, + false ); } return transcodedValue; @@ -551,11 +626,11 @@ export default class Toml2Csv extends SfdxCommand { return this.loadedTranscos[transfo.enum]; } // Load enum in memory - const transcoFile = path.join(this.rootConfigDirectory, "enums", `${transfo.enum}.json`); + const transcoFile = path.join(this.rootConfigDirectory, 'enums', `${transfo.enum}.json`); if (!fs.existsSync(transcoFile)) { this.triggerError(`Missing transco file ${c.bold(transcoFile)} for enum ${c.bold(transfo.enum)}`, false); } - this.loadedTranscos[transfo.enum] = JSON.parse(fs.readFileSync(transcoFile, "utf-8")); + this.loadedTranscos[transfo.enum] = JSON.parse(fs.readFileSync(transcoFile, 'utf-8')); return this.loadedTranscos[transfo.enum]; } this.triggerError(`Missing transco definition in ${c.bold(JSON.stringify(transfo))}`, false); @@ -565,15 +640,15 @@ export default class Toml2Csv extends SfdxCommand { let checkRes: boolean | null = false; try { checkRes = - filter.type === "date" + filter.type === 'date' ? this.checkFilterDate(filter, lineSplit) - : filter.type === "parentId" + : filter.type === 'parentId' ? this.checkFilterParentId(filter, lineSplit) - : filter.type === "colValue" + : filter.type === 'colValue' ? this.checkFilterColValue(filter, lineSplit) : null; if (checkRes === null) { - throw Error("Unknown filter type " + JSON.stringify(filter)); + throw Error('Unknown filter type ' + JSON.stringify(filter)); } // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { @@ -587,11 +662,11 @@ export default class Toml2Csv extends SfdxCommand { const dateStart = moment(filter.date, filter.dateFormat, true); const colValue = moment(lineSplit[filter.colNumber - 1], filter.colDateFormat, true); const res = - filter.typeDtl === "higherThan" - ? colValue.isAfter(dateStart, "day") - : filter.typeDtl === "lowerThan" - ? colValue.isBefore(dateStart, "day") - : colValue.isSame(dateStart, "day"); + filter.typeDtl === 'higherThan' + ? colValue.isAfter(dateStart, 'day') + : filter.typeDtl === 'lowerThan' + ? colValue.isBefore(dateStart, 'day') + : colValue.isSame(dateStart, 'day'); return res; } @@ -643,9 +718,9 @@ export default class Toml2Csv extends SfdxCommand { triggerError(errorMsg: string, fatal = true) { if (fatal && this.spinner) { clearInterval(this.spinnerInterval); - this.spinner.fail(errorMsg); + this.spinnerCustom.fail(errorMsg); } - throw new SfdxError(errorMsg); + throw new SfError(errorMsg); } formatCsvCell(cellVal: string) { diff --git a/src/commands/hardis/org/community/update.ts b/src/commands/hardis/org/community/update.ts new file mode 100644 index 000000000..caf47901f --- /dev/null +++ b/src/commands/hardis/org/community/update.ts @@ -0,0 +1,140 @@ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, uxLog } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { soqlQuery } from '../../../../common/utils/apiUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class HardisOrgCommunityUpdate extends SfCommand { + public static readonly summary = messages.getMessage('orgCommunityUpdate'); + public static readonly description = ` +## Command Behavior + +**Updates the status of one or more Salesforce Experience Cloud (Community) networks.** + +This command provides a way to programmatically change the status of your Salesforce Communities, allowing you to manage their availability. This is particularly useful for: + +- **Maintenance:** Taking communities offline for planned maintenance (\`DownForMaintenance\`). +- **Activation/Deactivation:** Bringing communities online or offline (\`Live\`, \`DownForMaintenance\`). +- **Automation:** Integrating community status changes into CI/CD pipelines or scheduled jobs. + +Key functionalities: + +- **Network Selection:** You can specify one or more community network names (separated by commas) using the \`--name\` flag. +- **Status Update:** You can set the new status for the selected communities using the \`--status\` flag. Supported values are \`Live\` and \`DownForMaintenance\`. +- **Confirmation Prompt:** In non-CI environments, it provides a confirmation prompt before executing the update, ensuring intentional changes. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Query:** It first queries the Salesforce \`Network\` object using SOQL to retrieve the \`Id\`, \`Name\`, and \`Status\` of the specified communities. This ensures that only existing communities are targeted. +- **SObject Update:** It then constructs an array of \`Network\` sObjects with their \`Id\` and the new \`Status\` and performs a DML update operation using \`conn.sobject("Network").update()\`. The \`allOrNone: false\` option is used to allow partial success in case some updates fail. +- **Error Handling and Reporting:** It iterates through the update results, logging success or failure for each community. It also provides a summary of successful and erroneous updates. +- **User Interaction:** Uses \`prompts\` to confirm the update action with the user when not running in a CI environment. +- **Salesforce Connection:** Establishes a connection to the target Salesforce org using the \`target-org\` flag. +
+`; + + public static examples = [ + `$ sf hardis:org:community:update --name 'MyNetworkName' --status DownForMaintenance`, + `$ sf hardis:org:community:update --name 'MyNetworkName,MySecondNetworkName' --status Live` + ]; + + public static readonly flags = { + name: Flags.string({ + description: 'List of Networks Names that you want to update, separated by comma', + char: 'n', + required: true, + }), + status: Flags.string({ + description: 'New status for the community, available values are: Live, DownForMaintenance', + char: 's', + required: true, + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + public async run(): Promise { + const { flags } = await this.parse(HardisOrgCommunityUpdate); + const networkNames = flags.name ? flags.name.split(',') : []; + const status = flags.status ? flags.status : ''; + const debugMode = flags.debug || false; + + const conn = flags['target-org'].getConnection(); + + if (networkNames.length === 0) { + throw new SfError(`Error: No network name(s) provided.`); + } + + const networksConstraintIn = networkNames.map((networkName) => `'${networkName}'`).join(','); + const networksQuery = `SELECT Id, Name, Status FROM Network WHERE Name IN (${networksConstraintIn})`; + const networksQueryRes = await soqlQuery(networksQuery, conn); + if (debugMode) { + uxLog("log", this, c.grey(`Query result:\n${JSON.stringify(networksQueryRes, null, 2)}`)); + } + // Check empty result + if (networksQueryRes.length === 0) { + const outputString = `No matching network records found with given names`; + uxLog("warning", this, c.yellow(outputString)); + return { outputString }; + } + const idToNameMap = new Map(networksQueryRes.records.map(network => [network.Id, network.Name])); + + // Request configuration from user + if (!isCI) { + const confirmUpdate = await prompts({ + type: 'confirm', + name: 'value', + initial: true, + message: c.cyanBright( + `Are you sure you want to update these ${c.bold(idToNameMap.size)} networks's status to '${status}' in org ${c.green( + flags['target-org'].getUsername() + )} (y/n)?` + ), + description: 'Confirm that you want to change the status of the selected community networks', + }); + if (confirmUpdate.value !== true) { + const outputString = 'Script cancelled by user'; + uxLog("warning", this, c.yellow(outputString)); + return { outputString }; + } + } + + // Process Network update + const networkUpdates = networksQueryRes.records.map((network) => { + return { Id: network.Id, Status: status }; + }); + const updateResults = await conn.sobject("Network").update(networkUpdates, { allOrNone: false }); + let updateSuccessNb = 0; + let updateErrorsNb = 0; + + for (const ret of updateResults) { + if (ret.success) { + updateSuccessNb++; + uxLog("success", this, c.green(`'${c.bold(idToNameMap.get(ret.id))}' Network was updated.`)); + } else { + updateErrorsNb++; + uxLog("error", this, c.red(`Error ${updateErrorsNb}: Network '${idToNameMap.get(ret.id)}' failed to update: [${ret.errors[0].message}]`)); + } + } + // Return an object to be displayed with --json + return { + orgId: flags['target-org'].getOrgId(), + communityUpdatesSuccess: updateSuccessNb, + communityUpdatesErrors: updateErrorsNb, + outputString: `${updateSuccessNb} network(s) were updated`, + }; + + } +} diff --git a/src/commands/hardis/org/configure/data.ts b/src/commands/hardis/org/configure/data.ts index d7c5f4a1b..8596c6f8e 100644 --- a/src/commands/hardis/org/configure/data.ts +++ b/src/commands/hardis/org/configure/data.ts @@ -1,62 +1,80 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as pascalcase from "pascalcase"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; -import { dataFolderRoot } from "../../../../common/utils/dataUtils"; -import { prompts } from "../../../../common/utils/prompts"; -import { WebSocketClient } from "../../../../common/websocketClient"; -import { getConfig, setConfig } from "../../../../config"; -import { PACKAGE_ROOT_DIR } from "../../../../settings"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class ConfigureData extends SfdxCommand { - public static title = "Configure Data project"; - - public static description = `Configure Data Export/Import with a [SFDX Data Loader](https://help.sfdmu.com/) Project - -See article: +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import pascalcase from 'pascalcase'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { DATA_FOLDERS_ROOT } from '../../../../common/utils/dataUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; +import { getConfig, setConfig } from '../../../../config/index.js'; +import { PACKAGE_ROOT_DIR } from '../../../../settings.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class ConfigureData extends SfCommand { + public static title = 'Configure Data project'; + + public static description = ` +## Command Behavior + +**Configures a Salesforce Data Migration Utility (SFDMU) project for data export and import operations.** + +This command assists in setting up SFDMU workspaces, which are essential for managing data within your Salesforce environments. It streamlines the creation of \`export.json\` files and related configurations, enabling efficient data seeding, migration, and synchronization. + +Key functionalities: + +- **Template-Based Configuration:** Allows you to choose from predefined SFDMU templates or start with a blank configuration. Templates can pre-populate \`export.json\` with common data migration scenarios. +- **Interactive Setup:** Guides you through the process of defining the SFDMU project folder name, label, and description. +- **\`export.json\` Generation:** Creates the \`export.json\` file, which is the core configuration file for SFDMU, defining objects to export/import, queries, and operations. +- **Additional File Generation:** Can generate additional configuration files, such as a \`badwords.json\` file for data filtering scenarios. +- **Scratch Org Integration:** Offers to automatically configure the SFDMU project to be used for data import when initializing a new scratch org, ensuring consistent test data across development environments. + +See this article for a practical example: [![How to detect bad words in Salesforce records using SFDX Data Loader and sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-badwords.jpg)](https://nicolas.vuillamy.fr/how-to-detect-bad-words-in-salesforce-records-using-sfdx-data-loader-and-sfdx-hardis-171db40a9bac) + +
+Technical explanations + +The command's technical implementation involves: + +- **SFDMU Integration:** It acts as a setup wizard for SFDMU, generating the necessary configuration files that the \`sfdmu\` plugin consumes. +- **Interactive Prompts:** Uses the \`prompts\` library to gather user input for various configuration parameters, such as the data path, label, and description. +- **File System Operations:** Employs \`fs-extra\` to create directories (e.g., \`data/your-project-name/\`) and write the \`export.json\` and any additional configuration files. +- **JSON Manipulation:** Constructs the \`export.json\` content dynamically based on user input and selected templates, including defining objects, queries, and operations. +- **PascalCase Conversion:** Uses \`pascalcase\` to format the SFDMU folder name consistently. +- **Configuration Persistence:** Updates the project's \`sfdx-hardis.yml\` file (via \`setConfig\`) to include the newly configured data package if it's intended for scratch org initialization. +- **WebSocket Communication:** Uses \`WebSocketClient.requestOpenFile\` to open the generated \`export.json\` file in VS Code, facilitating immediate configuration. +- **Required Plugin Check:** Explicitly lists \`sfdmu\` as a required plugin, ensuring the necessary dependency is present. +
`; - public static examples = ["$ sfdx hardis:org:configure:data"]; + public static examples = ['$ sf hardis:org:configure:data']; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdmu"]; + protected static requiresSfdxPlugins = ['sfdmu']; additionalFiles: any = []; dataPath: string; sfdmuConfig: any; @@ -67,7 +85,7 @@ See article: public async run(): Promise { const template = await this.selectTemplate(); - if (template === "blank") { + if (template === 'blank') { // Request info to build sfdmu workspace await this.buildExportJsonInfo(); } else { @@ -80,35 +98,50 @@ See article: await this.promptImportInScratchOrgs(sfdmuProjectFolder); // Set bac initial cwd - const message = c.cyan(`Successfully initialized sfdmu project ${c.green(sfdmuProjectFolder)}, with ${c.green("export.json")} file. -You can now configure it using SFDMU documentation: https://help.sfdmu.com/plugin-basics/basic-usage/minimal-configuration -If you don't have unique field to identify an object, use composite external ids: https://help.sfdmu.com/full-documentation/advanced-features/composite-external-id-keys -`); - uxLog(this, message); + const sfdmuBaseDoc = "https://help.sfdmu.com/configuration"; + const sfdmuExternalIdsDoc = "https://help.sfdmu.com/full-documentation/advanced-features/composite-external-id-keys"; + const message = c.cyan(`Successfully initialized sfdmu project ${c.green(sfdmuProjectFolder)}, with ${c.green( + 'export.json' + )} file.`); + uxLog("other", this, message); + uxLog("log", this, c.grey(`You can now configure it using SFDMU documentation: ${c.yellow(sfdmuBaseDoc)}`)); + uxLog("log", this, c.grey(`If you don't have unique field to identify an object, use composite external ids: ${c.yellow(sfdmuExternalIdsDoc)}`)); // Trigger command to open SFDMU config file in VsCode extension - WebSocketClient.requestOpenFile(exportJsonFile); + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(exportJsonFile, 'Edit your SFDMU export.json file', 'report'); + WebSocketClient.sendReportFileMessage(sfdmuBaseDoc, 'SFDMU documentation (Basic)', 'docUrl'); + WebSocketClient.sendReportFileMessage(sfdmuExternalIdsDoc, 'SFDMU documentation (External Ids)', 'docUrl'); + } + else { + WebSocketClient.requestOpenFile(exportJsonFile); + } return { outputString: message }; } private async generateConfigurationFiles() { - const sfdmuProjectFolder = path.join(dataFolderRoot, this.dataPath); + const sfdmuProjectFolder = path.join(DATA_FOLDERS_ROOT, this.dataPath); if (fs.existsSync(sfdmuProjectFolder)) { - throw new SfdxError(`[sfdx-hardis]${c.red(`Folder ${c.bold(sfdmuProjectFolder)} already exists`)}`); + throw new SfError(`[sfdx-hardis]${c.red(`Folder ${c.bold(sfdmuProjectFolder)} already exists`)}`); } // Create folder & export.json await fs.ensureDir(sfdmuProjectFolder); - const exportJsonFile = path.join(sfdmuProjectFolder, "export.json"); + const exportJsonFile = path.join(sfdmuProjectFolder, 'export.json'); await fs.writeFile(exportJsonFile, JSON.stringify(this.sfdmuConfig, null, 2)); - uxLog(this, "Generated SFDMU config file " + exportJsonFile); + uxLog("action", this, c.cyan('Generated SFDMU config file ' + exportJsonFile)); for (const additionalFile of this.additionalFiles) { const additionalFileFull = path.join(sfdmuProjectFolder, additionalFile.path); await fs.writeFile(additionalFileFull, additionalFile.text); - uxLog(this, c.cyan(additionalFile.message + ": ") + c.yellow(additionalFileFull)); - WebSocketClient.requestOpenFile(additionalFileFull); + uxLog("action", this, c.cyan(additionalFile.message + ': ') + c.yellow(additionalFileFull)); + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(additionalFileFull, additionalFile.message, 'report'); + } + else { + WebSocketClient.requestOpenFile(additionalFileFull); + } } return { exportJsonFile, sfdmuProjectFolder }; } @@ -128,28 +161,28 @@ If you don't have unique field to identify an object, use composite external ids objects: [ { query: "SELECT all FROM Account WHERE Name='sfdx-hardis'", - operation: "Upsert", - externalId: "Name", + operation: 'Upsert', + externalId: 'Name', }, ], }; // Manage badwords filter option - if (additionalConfig.includes("badwordsFilter")) { - const badwordsFileName = "badwords.json"; + if (additionalConfig.includes('badwordsFilter')) { + const badwordsFileName = 'badwords.json'; this.sfdmuConfig.objects[0] = [ { - query: "SELECT all FROM Lead", - operation: "Readonly", - targetRecordsFilter: "core:DetectBadwords", + query: 'SELECT all FROM Lead', + operation: 'Readonly', + targetRecordsFilter: 'core:DetectBadwords', filterRecordsAddons: [ { - module: "core:RecordsFilter", + module: 'core:RecordsFilter', args: { - filterType: "BadWords", + filterType: 'BadWords', settings: { badwordsFile: badwordsFileName, - detectFields: ["Description"], + detectFields: ['Description'], highlightWords: true, outputMatches: false, }, @@ -158,14 +191,14 @@ If you don't have unique field to identify an object, use composite external ids ], }, ]; - if (!fs.existsSync("badwords.json")) { + if (!fs.existsSync('badwords.json')) { const badwordsSample = { - badwords: ["write", "your", "bad", "words", "and expressions", "here"], + badwords: ['write', 'your', 'bad', 'words', 'and expressions', 'here'], }; this.additionalFiles.push({ path: badwordsFileName, text: JSON.stringify(badwordsSample, null, 2), - message: "Sample badwords file has been generated and needs to be updated", + message: 'Sample badwords file has been generated and needs to be updated', }); } } @@ -174,31 +207,40 @@ If you don't have unique field to identify an object, use composite external ids private async promptExportInfo() { return await prompts([ { - type: "text", - name: "dataPath", - message: c.cyanBright('Please input the SFDMU folder name (PascalCase format). Ex: "ProductsActive"'), + type: 'text', + name: 'dataPath', + message: c.cyanBright('Please input the SFDMU folder name (PascalCase format)'), + description: 'The folder name that will contain the SFDMU data configuration files', + placeholder: 'Ex: ProductsActive', }, { - type: "text", - name: "sfdxHardisLabel", - message: c.cyanBright('Please input the SFDMU config label. Ex: "Active Products"'), + type: 'text', + name: 'sfdxHardisLabel', + message: c.cyanBright('Please input the SFDMU config label'), + description: 'A human-readable label for this data configuration', + placeholder: 'Ex: Active Products', }, { - type: "text", - name: "sfdxHardisDescription", + type: 'text', + name: 'sfdxHardisDescription', message: c.cyanBright( - 'Please input the SFDMU config description. Ex: "Active products are used for scratch org initialization and in deployments"', + 'Please input the SFDMU config description' ), + description: 'A detailed description explaining what this data configuration does', + placeholder: 'Ex: Active products are used for scratch org initialization and in deployments', }, { - type: "multiselect", - name: "additional", - message: c.cyanBright("Please select additional options if you need them. If not, just select nothing and continue"), + type: 'multiselect', + name: 'additional', + message: c.cyanBright( + 'Please select additional options if you need them' + ), + description: 'Choose optional features to include in the data configuration (select nothing to skip)', choices: [ { - title: "Bad words detector", - description: "Can detect a list of bad words in records", - value: "badwordsFilter", + title: 'Bad words detector', + description: 'Can detect a list of bad words in records', + value: 'badwordsFilter', }, ], }, @@ -206,11 +248,11 @@ If you don't have unique field to identify an object, use composite external ids } private async selectTemplate() { - const templateChoices = []; - const templatesFolder = path.join(PACKAGE_ROOT_DIR, "defaults/templates/sfdmu"); + const templateChoices: any[] = []; + const templatesFolder = path.join(PACKAGE_ROOT_DIR, 'defaults/templates/sfdmu'); const templateFiles = fs.readdirSync(templatesFolder); for (const templateFile of templateFiles) { - const templateName = path.basename(templateFile).replace(".json", ""); + const templateName = path.basename(templateFile).replace('.json', ''); templateChoices.push({ title: `📝 ${templateName}`, value: path.join(templatesFolder, templateFile), @@ -218,38 +260,47 @@ If you don't have unique field to identify an object, use composite external ids }); } - const defaultTemplateChoice = { title: "📄 Blank template", value: "blank", description: "Configure your data import/export from scratch :)" }; + const defaultTemplateChoice = { + title: '📄 Blank template', + value: 'blank', + description: 'Configure your data import/export from scratch :)', + }; const templateResp = await prompts({ - type: "select", - name: "template", - message: c.cyanBright("Please select a SFDMU template, or the blank one"), + type: 'select', + name: 'template', + message: c.cyanBright('Please select a SFDMU template, or the blank one'), + description: 'Choose a pre-configured SFDMU template for data operations or start with a blank configuration', + placeholder: 'Select a template', choices: [...[defaultTemplateChoice], ...templateChoices], }); return templateResp.template; } private async buildExportJsonInfoFromTemplate(templateFile) { - const templateName = path.basename(templateFile).replace(".json", ""); + const templateName = path.basename(templateFile).replace('.json', ''); this.dataPath = pascalcase(templateName); - this.sfdmuConfig = JSON.parse(fs.readFileSync(templateFile, "utf-8")); + this.sfdmuConfig = JSON.parse(fs.readFileSync(templateFile, 'utf-8')); } private async promptImportInScratchOrgs(sfdmuProjectFolder) { const importResp = await prompts({ - type: "confirm", - name: "importInScratchOrgs", - message: c.cyanBright("Do you want this SFDMU config to be used to import data when initializing a new scratch org ?"), + type: 'confirm', + name: 'importInScratchOrgs', + message: c.cyanBright( + 'Do you want this SFDMU config to be used to import data when initializing a new scratch org ?' + ), + description: 'Automatically import this data set when creating new scratch orgs for development and testing', default: false, }); this.importInScratchOrgs = importResp.importInScratchOrgs === true; // Manage dataPackages if importInScratchOrgs is true if (this.importInScratchOrgs === true) { - const config = await getConfig("project"); + const config = await getConfig('project'); const dataPackages = config.dataPackages || []; - dataPackages.push({ dataPath: sfdmuProjectFolder.replace(/\\/g, "/"), importInScratchOrgs: true }); - await setConfig("project", { dataPackages: dataPackages }); + dataPackages.push({ dataPath: sfdmuProjectFolder.replace(/\\/g, '/'), importInScratchOrgs: true }); + await setConfig('project', { dataPackages: dataPackages }); } } } diff --git a/src/commands/hardis/org/configure/files.ts b/src/commands/hardis/org/configure/files.ts index 7cbd1f6ef..9b25db751 100644 --- a/src/commands/hardis/org/configure/files.ts +++ b/src/commands/hardis/org/configure/files.ts @@ -1,59 +1,78 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as pascalcase from "pascalcase"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; -import { filesFolderRoot } from "../../../../common/utils/filesUtils"; -import { promptFilesExportConfiguration } from "../../../../common/utils/filesUtils"; -import { WebSocketClient } from "../../../../common/websocketClient"; -import { PACKAGE_ROOT_DIR } from "../../../../settings"; -import { prompts } from "../../../../common/utils/prompts"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class ConfigureData extends SfdxCommand { - public static title = "Configure File export project"; - - public static description = `Configure export of file attachments from a Salesforce org - -See article below +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import pascalcase from 'pascalcase'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { filesFolderRoot } from '../../../../common/utils/filesUtils.js'; +import { promptFilesExportConfiguration } from '../../../../common/utils/filesUtils.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; +import { PACKAGE_ROOT_DIR } from '../../../../settings.js'; +import { prompts } from '../../../../common/utils/prompts.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class ConfigureData extends SfCommand { + public static title = 'Configure File export project'; + + public static description = ` +## Command Behavior + +**Configures a project for exporting file attachments from a Salesforce org.** + +This command streamlines the setup of configurations for mass downloading files (such as Notes, Attachments, or Salesforce Files) associated with Salesforce records. It's particularly useful for data backups, migrations, or integrating Salesforce files with external systems. + +Key functionalities: + +- **Template-Based Configuration:** Allows you to choose from predefined templates for common file export scenarios or start with a blank configuration. Templates can pre-populate the export settings. +- **Interactive Setup:** Guides you through defining the export project folder name and other export parameters. +- **\`export.json\` Generation:** Creates an \`export.json\` file within the designated project folder. This file contains the configuration for the file export operation, including: + - **SOQL Query:** A SOQL query to select the parent records from which files will be exported. + - **File Types:** Specifies which types of files (e.g., \`ContentVersion\`, \`Attachment\`) to include. + - **File Size Filtering:** Minimum file size in KB to filter files during export (files smaller than this will be skipped). + - **Output Folder/File Naming:** Defines how the exported files and their containing folders will be named based on record fields. + - **Overwrite Options:** Controls whether existing files or parent records should be overwritten during the export. + +See this article for a practical example: [![How to mass download notes and attachments files from a Salesforce org](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-mass-download.jpg)](https://nicolas.vuillamy.fr/how-to-mass-download-notes-and-attachments-files-from-a-salesforce-org-83a028824afd) + +
+Technical explanations + +The command's technical implementation involves: + +- **Template Selection:** It uses \`selectTemplate\` to present predefined file export templates or a blank option to the user. +- **Interactive Prompts:** The \`promptFilesExportConfiguration\` utility is used to gather detailed export settings from the user, such as the SOQL query, file types, and naming conventions. +- **File System Operations:** Employs \`fs-extra\` to create the project directory (\`files/your-project-name/\`) and write the \`export.json\` configuration file. +- **PascalCase Conversion:** Uses \`pascalcase\` to format the files export path consistently. +- **JSON Serialization:** Serializes the collected export configuration into a JSON string and writes it to \`export.json\`. +- **WebSocket Communication:** Uses \`WebSocketClient.requestOpenFile\` to open the generated \`export.json\` file in VS Code, facilitating immediate configuration. +
`; - public static examples = ["$ sfdx hardis:org:configure:files"]; + public static examples = ['$ sf hardis:org:configure:files']; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; exportConfig: any; filesExportPath: any; @@ -62,7 +81,7 @@ See article below public async run(): Promise { const template = await this.selectTemplate(); - if (template === "blank") { + if (template === 'blank') { // Request info to build sfdmu workspace await this.buildExportJsonInfo(); } else { @@ -73,35 +92,42 @@ See article below const { exportJsonFile, filesProjectFolder } = await this.createConfigFiles(); // Trigger command to open SFDMU config file in VsCode extension - WebSocketClient.requestOpenFile(exportJsonFile); + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(exportJsonFile, 'Edit your Files export configuration', 'report'); + } + else { + WebSocketClient.requestOpenFile(exportJsonFile); + } // Set bac initial cwd - const message = c.cyan(`Successfully initialized files export project ${c.green(filesProjectFolder)}, with ${c.green("export.json")} file. -You can now call it using ${c.white("sfdx hardis:org:files:export")} + const message = c.cyan(`Successfully initialized files export project ${c.green( + filesProjectFolder + )}, with ${c.green('export.json')} file. +You can now call it using ${c.white('sf hardis:org:files:export')} `); - uxLog(this, message); + uxLog("other", this, message); return { outputString: message }; } private async createConfigFiles() { const filesProjectFolder = path.join(filesFolderRoot, this.filesExportPath); if (fs.existsSync(filesProjectFolder)) { - throw new SfdxError(`[sfdx-hardis]${c.red(`Folder ${c.bold(filesProjectFolder)} already exists`)}`); + throw new SfError(`[sfdx-hardis]${c.red(`Folder ${c.bold(filesProjectFolder)} already exists`)}`); } // Create folder & export.json await fs.ensureDir(filesProjectFolder); - const exportJsonFile = path.join(filesProjectFolder, "export.json"); + const exportJsonFile = path.join(filesProjectFolder, 'export.json'); await fs.writeFile(exportJsonFile, JSON.stringify(this.exportConfig, null, 2)); return { exportJsonFile, filesProjectFolder }; } private async selectTemplate() { - const templateFileChoices = []; - const templatesFilesFolder = path.join(PACKAGE_ROOT_DIR, "defaults/templates/files"); + const templateFileChoices: any[] = []; + const templatesFilesFolder = path.join(PACKAGE_ROOT_DIR, 'defaults/templates/files'); const templateFiles = fs.readdirSync(templatesFilesFolder); for (const templateFile of templateFiles) { - const templateName = path.basename(templateFile).replace(".json", ""); + const templateName = path.basename(templateFile).replace('.json', ''); templateFileChoices.push({ title: `📝 ${templateName}`, value: path.join(templatesFilesFolder, templateFile), @@ -109,12 +135,18 @@ You can now call it using ${c.white("sfdx hardis:org:files:export")} }); } - const defaultTemplateChoice = { title: "📄 Blank template", value: "blank", description: "Configure your files import/export from scratch :)" }; + const defaultTemplateChoice = { + title: '📄 Blank template', + value: 'blank', + description: 'Configure your files import/export from scratch :)', + }; const templateResp = await prompts({ - type: "select", - name: "template", - message: c.cyanBright("Please select a Files import/export template, or the blank one"), + type: 'select', + name: 'template', + message: c.cyanBright('Please select a Files import/export template, or the blank one'), + description: 'Choose a pre-configured template for file operations or start with a blank configuration', + placeholder: 'Select a template', choices: [...[defaultTemplateChoice], ...templateFileChoices], }); return templateResp.template; @@ -122,14 +154,15 @@ You can now call it using ${c.white("sfdx hardis:org:files:export")} private async buildExportJsonInfo() { const defaultConfig = { - sfdxHardisLabel: "", - sfdxHardisDescription: "", - soqlQuery: "SELECT Id,Name FROM Opportunity", - fileTypes: "all", - outputFolderNameField: "Name", - outputFileNameFormat: "title", + sfdxHardisLabel: '', + sfdxHardisDescription: '', + soqlQuery: 'SELECT Id,Name FROM Opportunity', + fileTypes: 'all', + outputFolderNameField: 'Name', + outputFileNameFormat: 'title', overwriteParentRecords: true, overwriteFiles: false, + fileSizeMin: 0, }; this.exportConfig = await promptFilesExportConfiguration(defaultConfig, false); @@ -139,8 +172,8 @@ You can now call it using ${c.white("sfdx hardis:org:files:export")} } private async buildExportJsonInfoFromTemplate(templateFile) { - const templateName = path.basename(templateFile).replace(".json", ""); + const templateName = path.basename(templateFile).replace('.json', ''); this.filesExportPath = pascalcase(templateName); - this.exportConfig = JSON.parse(fs.readFileSync(templateFile, "utf-8")); + this.exportConfig = JSON.parse(fs.readFileSync(templateFile, 'utf-8')); } } diff --git a/src/commands/hardis/org/configure/monitoring.ts b/src/commands/hardis/org/configure/monitoring.ts index 18c2b9670..a3df75880 100644 --- a/src/commands/hardis/org/configure/monitoring.ts +++ b/src/commands/hardis/org/configure/monitoring.ts @@ -1,11 +1,11 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import * as open from "open"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import open from 'open'; import { ensureGitBranch, ensureGitRepository, @@ -15,193 +15,243 @@ import { getGitRepoName, gitAddCommitPush, uxLog, -} from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; -import { setInConfigFile } from "../../../../config"; -import { PACKAGE_ROOT_DIR } from "../../../../settings"; -import { promptOrg } from "../../../../common/utils/orgUtils"; -import { WebSocketClient } from "../../../../common/websocketClient"; +} from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { CONSTANTS, setInConfigFile } from '../../../../config/index.js'; +import { PACKAGE_ROOT_DIR } from '../../../../settings.js'; +import { promptOrg } from '../../../../common/utils/orgUtils.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class OrgConfigureMonitoring extends SfCommand { + public static title = 'Configure org monitoring'; -export default class OrgConfigureMonitoring extends SfdxCommand { - public static title = "Configure org monitoring"; + public static description = ` +## Command Behavior - public static description = "Configure monitoring of an org"; +**Configures the monitoring of a Salesforce org within a dedicated Git repository.** - public static examples = ["$ sfdx hardis:org:configure:monitoring"]; +This command streamlines the setup of continuous monitoring for a Salesforce organization, ensuring that changes and health metrics are tracked and reported. It is designed to be run within a Git repository specifically dedicated to monitoring configurations. - protected static flagsConfig = { - orginstanceurl: flags.string({ - description: "Org instance url (technical param, do not use manually)", +Key functionalities include: + +- **Git Repository Validation:** Ensures the current Git repository's name contains "monitoring" to enforce best practices for separating monitoring configurations from deployment sources. +- **Prerequisite Check:** Guides the user to confirm that necessary monitoring prerequisites (CI/CD variables, permissions) are configured on their Git server. +- **Org Selection:** Prompts the user to select or connect to the Salesforce org they wish to monitor. +- **Monitoring Branch Creation:** Creates or checks out a dedicated Git branch (e.g., \`monitoring_yourinstanceurl\`) for the monitoring configuration. +- **SFDX Project Setup:** Initializes an SFDX project structure within the repository if it doesn't already exist, and copies default monitoring files. +- **Configuration File Update:** Updates the local \`.sfdx-hardis.yml\` file with the target org's username and instance URL. +- **SSL Certificate Generation:** Generates an SSL certificate for secure authentication to the monitored org. +- **Automated Commit and Push:** Offers to automatically commit and push the generated configuration files to the remote Git repository. +- **Scheduling Guidance:** Provides instructions and links for scheduling the monitoring job on the Git server. + +
+Technical explanations + +The command's technical implementation involves a series of Git operations, file system manipulations, and Salesforce CLI interactions: + +- **Git Operations:** Utilizes \`ensureGitRepository\`, \`getGitRepoName\`, \`execCommand\` (for \`git add\`, \`git stash\`), \`ensureGitBranch\`, and \`gitAddCommitPush\` to manage the Git repository, branches, and commits. +- **Interactive Prompts:** Employs the \`prompts\` library to interact with the user for confirmations and selections. +- **File System Management:** Uses \`fs-extra\` for copying default monitoring files (\`defaults/monitoring\`) and managing the SFDX project structure. +- **Salesforce CLI Integration:** Calls \`sf project generate\` to create a new SFDX project and uses \`promptOrg\` for Salesforce org authentication and selection. +- **Configuration Management:** Updates the \`.sfdx-hardis.yml\` file using \`setInConfigFile\` to store org-specific monitoring configurations. +- **SSL Certificate Generation:** Leverages \`generateSSLCertificate\` to create the necessary SSL certificates for JWT-based authentication to the Salesforce org. +- **External Tool Integration:** Requires \`openssl\` to be installed on the system for SSL certificate generation. +- **WebSocket Communication:** Uses \`WebSocketClient.sendRunSfdxHardisCommandMessage\` to restart the command in VS Code if the default org changes, and \`WebSocketClient.sendRefreshStatusMessage\` to update the status. +
+`; + + public static examples = ['$ sf hardis:org:configure:monitoring']; + + public static flags: any = { + orginstanceurl: Flags.string({ + description: 'Org instance url (technical param, do not use manually)', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; // Comment this out if your command does not require an org username - protected static supportsUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; /* jscpd:ignore-end */ - protected static requiresDependencies = ["openssl"]; + protected static requiresDependencies = ['openssl']; public async run(): Promise { + const { flags } = await this.parse(OrgConfigureMonitoring); // Make sure that we are located in a git repository await ensureGitRepository(); // Check git repo name is valid (contains monitoring) - const repoName = await getGitRepoName(); - if (!repoName.includes("monitoring")) { + const repoName = (await getGitRepoName()) || ''; + if (!repoName.includes('monitoring')) { const confirmMix = await prompts({ - type: "select", - name: "value", + type: 'select', + name: 'value', choices: [ - { title: "Yes, I'm sure because I know what I'm doing, like Roman :)", value: "yes" }, - { title: 'Mmmmm no, let me create another repo with the word "monitoring" in its name !', value: "no" }, + { title: "Yes, I'm sure because I know what I'm doing, like Roman :)", value: 'yes' }, + { title: 'Mmmmm no, let me create another repo with the word "monitoring" in its name !', value: 'no' }, ], - message: c.cyanBright("It's safer to have monitoring in a separate repo. Are you sure you want to mix monitoring and deployment sources ?"), + message: c.cyanBright( + "Are you sure you want to mix monitoring and deployment sources ?" + ), + description: 'It is recommended to separate monitoring configuration from deployment sources in different repositories', + placeholder: 'Select an option', }); - if (confirmMix.value === "no") { - throw new SfdxError('Your git repository name must contain the expression "monitoring"'); + if (confirmMix.value === 'no') { + throw new SfError('Your git repository name must contain the expression "monitoring"'); } } - const preRequisitesUrl = "https://sfdx-hardis.cloudity.com/salesforce-monitoring-config-home/#instructions"; - uxLog(this, c.yellow("Monitoring pre-requisites documentation: " + c.bold(preRequisitesUrl))); + const preRequisitesUrl = `${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-config-home/#instructions`; + uxLog("warning", this, c.yellow('Monitoring pre-requisites documentation: ' + c.bold(preRequisitesUrl))); const confirmPreRequisites = await prompts({ - type: "select", - name: "value", + type: 'select', + name: 'value', choices: [ - { title: "Yes", value: "yes" }, - { title: "No, help me !", value: "no" }, + { title: 'Yes', value: 'yes' }, + { title: 'No, help me !', value: 'no' }, ], - message: c.cyanBright("Did you configure the sfdx-hardis monitoring pre-requisites on your Git server ?"), + message: c.cyanBright('Did you configure the sfdx-hardis monitoring pre-requisites on your Git server ?'), + description: 'Confirm that you have set up the required CI/CD variables and permissions for monitoring', + placeholder: 'Select an option', }); - if (confirmPreRequisites.value === "no") { - const msg = "Please follow the instructions to configure the sfdx-hardis monitoring pre-requisites on your Git server\n" + preRequisitesUrl; - uxLog(this, c.yellow(msg)); + if (confirmPreRequisites.value === 'no') { + const msg = + 'Please follow the instructions to configure the sfdx-hardis monitoring pre-requisites on your Git server\n' + + preRequisitesUrl; + uxLog("warning", this, c.yellow(msg)); await open(preRequisitesUrl, { wait: true }); return { outputString: msg }; } // Get current default org - const currentOrgId = this.org?.getOrgId() || ""; - if (this.flags.orginstanceurl && this.org?.getConnection()?.instanceUrl === this.flags.orginstanceurl) { - uxLog(this, c.cyan(`Default org ${this.org.getConnection()?.instanceUrl} is selected, let's configure its monitoring !`)); + const currentOrgId = flags['target-org']?.getOrgId() || ''; + if (flags.orginstanceurl && flags['target-org']?.getConnection()?.instanceUrl === flags.orginstanceurl) { + uxLog( + "action", + this, + c.cyan( + `Default org ${flags['target-org'].getConnection()?.instanceUrl + } is selected, let's configure its monitoring !` + ) + ); } else { // Select the org that must be monitored const org = await promptOrg(this, { devHub: false, setDefault: true, scratch: false, - promptMessage: "Please select or connect to the org that you want to monitor", + promptMessage: 'Please select or connect to the org that you want to monitor', + defaultOrgUsername: flags['target-org']?.getUsername(), }); // Restart command so the org is selected as default org (will help to select profiles) if (currentOrgId !== org.orgId) { - const infoMsg = "Default org changed. Please restart the same command if VsCode does not do that automatically for you :)"; - uxLog(this, c.yellow(infoMsg)); - const currentCommand = "sfdx " + this.id + " " + this.argv.join(" ") + " --orginstanceurl " + org.instanceUrl; - WebSocketClient.sendMessage({ - event: "runSfdxHardisCommand", - sfdxHardisCommand: currentCommand, - }); + const infoMsg = + 'Default org changed. Please restart the same command if VsCode does not do that automatically for you :)'; + uxLog("warning", this, c.yellow(infoMsg)); + const currentCommand = 'sf ' + this.id + ' ' + this.argv.join(' ') + ' --orginstanceurl ' + org.instanceUrl; + WebSocketClient.sendRunSfdxHardisCommandMessage(currentCommand); return { outputString: infoMsg }; } } // Build monitoring branch name const branchName = - "monitoring_" + - this.org + 'monitoring_' + + flags['target-org'] ?.getConnection() - .instanceUrl.replace("https://", "") - .replace(".my.salesforce.com", "") - .replace(/\./gm, "_") - .replace(/--/gm, "__") - .replace(/-/gm, "_"); + .instanceUrl.replace('https://', '') + .replace('.my.salesforce.com', '') + .replace(/\./gm, '_') + .replace(/--/gm, '__') + .replace(/-/gm, '_'); // Checkout branch, or create it if not existing (stash before if necessary) - await execCommand("git add --all", this, { output: true, fail: false }); - await execCommand("git stash", this, { output: true, fail: false }); - await ensureGitBranch(branchName, { parent: "main" }); + await execCommand('git add --all', this, { output: true, fail: false }); + await execCommand('git stash', this, { output: true, fail: false }); + await ensureGitBranch(branchName, { parent: 'main' }); // Create sfdx project if not existing yet - if (!fs.existsSync("sfdx-project.json")) { - const createCommand = "sfdx force:project:create" + ` --projectname "sfdx-hardis-monitoring"`; - uxLog(this, c.cyan("Creating sfdx-project...")); + if (!fs.existsSync('sfdx-project.json')) { + const createCommand = 'sf project generate' + ` --name "sfdx-hardis-monitoring"`; + uxLog("action", this, c.cyan('Creating sfdx-project...')); await execCommand(createCommand, this, { output: true, fail: true, }); - uxLog(this, c.cyan("Moving sfdx-project to root...")); - await fs.copy("sfdx-hardis-monitoring", process.cwd(), { overwrite: true }); - await fs.remove("sfdx-hardis-monitoring"); + uxLog("action", this, c.cyan('Moving sfdx-project to root...')); + await fs.copy('sfdx-hardis-monitoring', process.cwd(), { overwrite: true }); + await fs.remove('sfdx-hardis-monitoring'); // Copying monitoring folder structure - uxLog(this, "Copying default monitoring files..."); - if (fs.existsSync("README.md") && fs.readFileSync("README.md", "utf8").toString().split("\n").length < 5) { + uxLog("other", this, 'Copying default monitoring files...'); + if (fs.existsSync('README.md') && fs.readFileSync('README.md', 'utf8').toString().split('\n').length < 5) { // Remove default README if necessary - await fs.remove("README.md"); + await fs.remove('README.md'); } - await fs.copy(path.join(PACKAGE_ROOT_DIR, "defaults/monitoring", "."), process.cwd(), { overwrite: true }); + await fs.copy(path.join(PACKAGE_ROOT_DIR, 'defaults/monitoring', '.'), process.cwd(), { overwrite: true }); } // Update config file await setInConfigFile( [], { - targetUsername: this.org.getUsername(), - instanceUrl: this.org.getConnection().instanceUrl, + targetUsername: flags['target-org'].getUsername(), + instanceUrl: flags['target-org'].getConnection().instanceUrl, }, - "./.sfdx-hardis.yml", + './.sfdx-hardis.yml' ); // Generate SSL certificate (requires openssl to be installed on computer) - await generateSSLCertificate(branchName, "./.ssh", this, this.org.getConnection(), {}); + await generateSSLCertificate(branchName, './.ssh', this, flags['target-org'].getConnection(), {}); // Confirm & push on server const confirmPush = await prompts({ - type: "confirm", - name: "value", + type: 'confirm', + name: 'value', initial: true, - message: c.cyanBright("(RECOMMENDED) Do you want sfdx-hardis to save your configuration on server ? (git stage, commit & push)"), + message: c.cyanBright( + 'Do you want sfdx-hardis to save your configuration on server ?' + ), + description: 'Automatically commit and push the monitoring configuration files to your git repository (recommended)', }); if (confirmPush.value === true) { await gitAddCommitPush({ - message: "[sfdx-hardis] Update monitoring configuration", + message: '[sfdx-hardis] Update monitoring configuration', }); - uxLog(this, c.green("Your configuration for org monitoring is now ready :)")); + uxLog("success", this, c.green('Your configuration for org monitoring is now ready :)')); } else { - uxLog(this, c.yellow("Please manually git add, commit and push to the remote repository :)")); + uxLog("warning", this, c.yellow('Please manually git add, commit and push to the remote repository :)')); } const branch = await getCurrentGitBranch(); - uxLog(this, c.greenBright(`Now you must schedule monitoring to run the job automatically every night on branch ${c.bold(branch)}:)`)); - const scheduleMonitoringUrl = "https://sfdx-hardis.cloudity.com/salesforce-monitoring-config-home/#instructions"; - const msg = "Please follow the instructions to schedule sfdx-hardis monitoring on your Git server: " + c.bold(scheduleMonitoringUrl); - uxLog(this, c.yellow(msg)); + uxLog( + "success", + this, + c.green( + `Now you must schedule monitoring to run the job automatically every night on branch ${c.bold(branch)}:)` + ) + ); + const scheduleMonitoringUrl = `${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-config-home/#instructions`; + const msg = + 'Please follow the instructions to schedule sfdx-hardis monitoring on your Git server: ' + + c.bold(scheduleMonitoringUrl); + uxLog("warning", this, c.yellow(msg)); await open(scheduleMonitoringUrl, { wait: true }); // Return an object to be displayed with --json - return { outputString: "Configured branch for authentication" }; + return { outputString: 'Configured branch for authentication' }; } } diff --git a/src/commands/hardis/org/connect.ts b/src/commands/hardis/org/connect.ts index 0f9267791..3df8a9f79 100644 --- a/src/commands/hardis/org/connect.ts +++ b/src/commands/hardis/org/connect.ts @@ -1,57 +1,70 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { execCommand, isCI } from "../../../common/utils"; -import { promptOrg } from "../../../common/utils/orgUtils"; -import { prompts } from "../../../common/utils/prompts"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { execCommand, isCI } from '../../../common/utils/index.js'; +import { promptOrg } from '../../../common/utils/orgUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class OrgConnect extends SfCommand { + public static title = 'Connect to an org'; -export default class OrgSelect extends SfdxCommand { - public static title = "Connect to an org"; + public static description = ` +## Command Behavior - public static description = `Connect to an org without setting it as default username, then proposes to open the org in web browser - `; +**Connects to a Salesforce org without setting it as the default username, and optionally opens the org in a web browser.** - public static examples = ["$ sfdx hardis:org:connect"]; +This command provides a quick way to establish a connection to a Salesforce organization for one-off tasks or when you don't want to change your default org. It's useful for accessing different environments without disrupting your primary development setup. + +Key functionalities: + +- **Org Selection:** Prompts the user to select an existing Salesforce org or connect to a new one. +- **Non-Default Connection:** Ensures that the selected org is connected but does not set it as the default username for subsequent Salesforce CLI commands. +- **Browser Launch (Optional):** Offers to open the connected org directly in your default web browser, providing immediate access to the Salesforce UI. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Org Prompt:** Uses the \`promptOrg\` utility to display a list of available Salesforce orgs and allows the user to select one or initiate a new authentication flow. +- **Salesforce CLI Integration:** Internally, it leverages Salesforce CLI commands to establish the connection to the chosen org. It does not use \`sf config set target-org\` to avoid changing the default org. +- **Browser Launch:** If the user opts to open the org in a browser, it executes the \`sf org open\` command, passing the selected org's username as the target. +- **Environment Awareness:** Checks the \`isCI\` flag to determine whether to offer the browser launch option, as it's typically not applicable in continuous integration environments. +
+`; + + public static examples = ['$ sf hardis:org:connect']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(OrgConnect); + this.debugMode = flags.debug || false; // Prompt org to connect to const org = await promptOrg(this, { devHub: false, setDefault: false }); @@ -59,9 +72,10 @@ export default class OrgSelect extends SfdxCommand { // Prompt user if he/she wants to open org in Web Browser if (!isCI) { const openRes = await prompts({ - type: "confirm", - name: "value", - message: "Do you want to open this org in Web Browser ?", + type: 'confirm', + name: 'value', + message: 'Do you want to open this org in Web Browser ?', + description: 'Launch the Salesforce org in your default web browser for immediate access', }); if (openRes.value === true) { const openCommand = `sf org open --target-org ${org.username}`; diff --git a/src/commands/hardis/org/create.ts b/src/commands/hardis/org/create.ts index fccda1763..d97b0052a 100644 --- a/src/commands/hardis/org/create.ts +++ b/src/commands/hardis/org/create.ts @@ -1,61 +1,87 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { assert } from "console"; -import * as fs from "fs-extra"; -import * as moment from "moment"; -import * as os from "os"; -import * as path from "path"; -import { clearCache } from "../../../common/cache"; -import { elapseEnd, elapseStart, execSfdxJson, getCurrentGitBranch, uxLog } from "../../../common/utils"; -import { initApexScripts, initOrgData, initPermissionSetAssignments, promptUserEmail } from "../../../common/utils/orgUtils"; -import { WebSocketClient } from "../../../common/websocketClient"; -import { getConfig } from "../../../config"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class SandboxCreate extends SfdxCommand { - public static title = "Create sandbox org"; - - public static description = "Create and initialize sandbox org"; - - public static examples = ["$ sfdx hardis:org:create"]; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { assert } from 'console'; +import fs from 'fs-extra'; +import moment from 'moment'; +import * as os from 'os'; +import * as path from 'path'; +import { clearCache } from '../../../common/cache/index.js'; +import { elapseEnd, elapseStart, execSfdxJson, getCurrentGitBranch, uxLog } from '../../../common/utils/index.js'; +import { + initApexScripts, + initOrgData, + initPermissionSetAssignments, + promptUserEmail, +} from '../../../common/utils/orgUtils.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { getConfig } from '../../../config/index.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class SandboxCreate extends SfCommand { + public static title = 'Create sandbox org'; + + public static description = ` +## Command Behavior + +**Creates and initializes a Salesforce sandbox org.** + +This command automates the process of provisioning a new sandbox environment, making it ready for development or testing. It handles various aspects of sandbox creation and initial setup, reducing manual effort and ensuring consistency. + +Key functionalities: + +- **Sandbox Definition:** Uses a \`project-sandbox-def.json\` file (if present in \`config/\`) to define sandbox properties like name, description, license type, and source sandbox. If not provided, it uses default values. +- **Dynamic Naming:** Generates a unique sandbox alias based on the current username, Git branch, and a timestamp. +- **Sandbox Creation:** Executes the Salesforce CLI command to create the sandbox, including setting it as the default org and waiting for its completion. +- **User Update:** Updates the main sandbox user's details (e.g., Last Name, First Name) and can fix country values or marketing user permissions if needed. +- **Initialization Scripts:** Runs predefined Apex scripts, assigns permission sets, and imports initial data into the newly created sandbox, based on configurations in your project. +- **Error Handling:** Provides detailed error messages for common sandbox creation issues, including Salesforce-specific errors. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It loads project and user configurations using \`getConfig\` to retrieve settings like \`projectName\`, \`devHubAlias\`, and \`userEmail\`. +- **Git Integration:** Retrieves the current Git branch name using \`getCurrentGitBranch\` to inform sandbox naming. +- **File System Operations:** Uses \`fs-extra\` to manage sandbox definition files (reading \`project-sandbox-def.json\`, writing a user-specific definition file) and temporary directories. +- **Salesforce CLI Execution:** Executes Salesforce CLI commands (\`sf org create sandbox\`, \`sf data get record\`, \`sf data update record\`, \`sf org open\`) using \`execSfdxJson\` for sandbox creation, user updates, and opening the org in a browser. +- **Cache Management:** Clears the Salesforce CLI org list cache (\`clearCache('sf org list')\`) to ensure the newly created sandbox is immediately recognized. +- **Initialization Utilities:** Calls a suite of utility functions (\`initPermissionSetAssignments\`, \`initApexScripts\`, \`initOrgData\`) to perform post-creation setup tasks. +- **Error Assertions:** Uses \`assert\` to check the success of Salesforce CLI commands and provides custom error messages for better debugging. +- **WebSocket Communication:** Uses \`WebSocketClient.sendRefreshStatusMessage\` to notify connected VS Code clients about the new sandbox. +- **Required Plugin Check:** Explicitly lists \`sfdmu\` as a required plugin, indicating its role in data initialization. +
+`; + + public static examples = ['$ sf hardis:org:create']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; protected static supportsDevhubUsername = true; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdmu"]; + protected static requiresSfdxPlugins = ['sfdmu']; /* jscpd:ignore-end */ @@ -75,7 +101,8 @@ export default class SandboxCreate extends SfdxCommand { protected sandboxOrgFromPool: any; public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(SandboxCreate); + this.debugMode = flags.debug || false; elapseStart(`Create and initialize sandbox org`); await this.initConfig(); await this.createSandboxOrg(); @@ -83,10 +110,10 @@ export default class SandboxCreate extends SfdxCommand { await this.updateSandboxOrgUser(); await initPermissionSetAssignments(this.configInfo.initPermissionSets || [], this.sandboxOrgUsername); await initApexScripts(this.configInfo.sandboxOrgInitApexScripts || [], this.sandboxOrgUsername); - await initOrgData(path.join(".", "scripts", "data", "SandboxInit"), this.sandboxOrgUsername); + await initOrgData(path.join('.', 'scripts', 'data', 'SandboxInit'), this.sandboxOrgUsername); } catch (e) { elapseEnd(`Create and initialize sandbox org`); - uxLog(this, c.grey("Error: " + e.message + "\n" + e.stack)); + uxLog("log", this, c.grey('Error: ' + (e as Error).message + '\n' + (e as Error).stack)); throw e; } elapseEnd(`Create and initialize sandbox org`); @@ -98,15 +125,20 @@ export default class SandboxCreate extends SfdxCommand { sandboxOrgUsername: this.sandboxOrgUsername, sandboxOrgSfdxAuthUrl: this.sandboxOrgSfdxAuthUrl, authFileJson: this.authFileJson, - outputString: "Created and initialized sandbox org", + outputString: 'Created and initialized sandbox org', }; } // Initialize configuration from .sfdx-hardis.yml + .gitbranch.sfdx-hardis.yml + .username.sfdx-hardis.yml public async initConfig() { - this.configInfo = await getConfig("user"); - this.gitBranch = await getCurrentGitBranch({ formatted: true }); - const newSandboxName = os.userInfo().username + "-" + this.gitBranch.split("/").pop().slice(0, 15) + "_" + moment().format("YYYYMMDD_hhmm"); + this.configInfo = await getConfig('user'); + this.gitBranch = (await getCurrentGitBranch({ formatted: true })) || ''; + const newSandboxName = + os.userInfo().username + + '-' + + (this.gitBranch.split('/').pop() || '').slice(0, 15) + + '_' + + moment().format('YYYYMMDD_hhmm'); this.sandboxOrgAlias = process.env.SANDBOX_ORG_ALIAS || newSandboxName; this.projectName = process.env.PROJECT_NAME || this.configInfo.projectName; @@ -123,16 +155,16 @@ export default class SandboxCreate extends SfdxCommand { // Create a new sandbox org or reuse existing one public async createSandboxOrg() { // Build project-sandbox-def-branch-user.json - uxLog(this, c.cyan("Building custom project-sandbox-def.json...")); - if (fs.existsSync("./config/project-sandbox-def.json")) { - this.projectSandboxDef = JSON.parse(fs.readFileSync("./config/project-sandbox-def.json", "utf-8")); + uxLog("action", this, c.cyan('Building custom project-sandbox-def.json...')); + if (fs.existsSync('./config/project-sandbox-def.json')) { + this.projectSandboxDef = JSON.parse(fs.readFileSync('./config/project-sandbox-def.json', 'utf-8')); } else { - uxLog(this, c.yellow(`Default values used: you may define a file ${c.bold("config/project-sandbox-def.json")}`)); + uxLog("warning", this, c.yellow(`Default values used: you may define a file ${c.bold('config/project-sandbox-def.json')}`)); this.projectSandboxDef = { - sandboxName: "", - description: "SFDX Hardis developer sandbox", - licenseType: "Developer", - sourceSandbox: "", + sandboxName: '', + description: 'SFDX Hardis developer sandbox', + licenseType: 'Developer', + sourceSandbox: '', }; } this.projectSandboxDef.sandboxName = os.userInfo().username.substring(0, 10); @@ -141,79 +173,96 @@ export default class SandboxCreate extends SfdxCommand { await fs.writeFile(projectSandboxDefLocal, JSON.stringify(this.projectSandboxDef, null, 2)); // Fix @salesforce/cli bug: remove shape.zip if found - const tmpShapeFolder = path.join(os.tmpdir(), "shape"); + const tmpShapeFolder = path.join(os.tmpdir(), 'shape'); if (fs.existsSync(tmpShapeFolder)) { await fs.remove(tmpShapeFolder); - uxLog(this, c.grey("Deleted " + tmpShapeFolder)); + uxLog("log", this, c.grey('Deleted ' + tmpShapeFolder)); } // Create new sandbox org - uxLog(this, c.cyan("Creating new sandbox org...")); - const waitTime = process.env.SANDBOX_ORG_WAIT || "60"; + uxLog("action", this, c.cyan('Creating new sandbox org...')); + const waitTime = process.env.SANDBOX_ORG_WAIT || '60'; const createCommand = - "sfdx force:org:create --setdefaultusername " + - "--type sandbox " + - `--definitionfile ${projectSandboxDefLocal} ` + - `--setalias ${this.sandboxOrgAlias} ` + + 'sf org create sandbox --set-default ' + + `--definition-file ${projectSandboxDefLocal} ` + + `--alias ${this.sandboxOrgAlias} ` + `--wait ${waitTime} ` + - `--targetusername ${this.devHubAlias} `; + `--target-org ${this.devHubAlias} `; const createResult = await execSfdxJson(createCommand, this, { fail: false, output: false, debug: this.debugMode, }); - await clearCache("force:org:list"); + await clearCache('sf org list'); assert(createResult.status === 0 && createResult.result, this.buildSandboxCreateErrorMessage(createResult)); this.sandboxOrgInfo = createResult.result; this.sandboxOrgUsername = this.sandboxOrgInfo.username; // Trigger a status refresh on VsCode WebSocket Client - WebSocketClient.sendMessage({ event: "refreshStatus" }); + WebSocketClient.sendRefreshStatusMessage(); // Open sandbox org for user if not in CI - await execSfdxJson("sf org open", this, { + await execSfdxJson('sf org open', this, { fail: true, output: false, debug: this.debugMode, }); - uxLog(this, c.cyan(`Created sandbox org ${c.green(this.sandboxOrgAlias)} with user ${c.green(this.sandboxOrgUsername)}`)); + uxLog( + "action", + this, + c.cyan(`Created sandbox org ${c.green(this.sandboxOrgAlias)} with user ${c.green(this.sandboxOrgUsername)}`) + ); } public buildSandboxCreateErrorMessage(createResult) { if (createResult.status === 0 && createResult.result) { - return c.green("Sandbox create OK"); - } else if (createResult.status === 1 && createResult.errorMessage.includes("Socket timeout occurred while listening for results")) { + return c.green('Sandbox create OK'); + } else if ( + createResult.status === 1 && + createResult.errorMessage.includes('Socket timeout occurred while listening for results') + ) { return c.red( `[sfdx-hardis] Error creating sandbox org. ${c.bold( - "This is probably a Salesforce error, try again manually or launch again CI job", - )}\n${JSON.stringify(createResult, null, 2)}`, + 'This is probably a Salesforce error, try again manually or launch again CI job' + )}\n${JSON.stringify(createResult, null, 2)}` ); } return c.red( - `[sfdx-hardis] Error creating sandbox org. Maybe try ${c.yellow(c.bold("sfdx hardis:sandbox:create --forcenew"))} ?\n${JSON.stringify( - createResult, - null, - 2, - )}`, + `[sfdx-hardis] Error creating sandbox org. Maybe try ${c.yellow( + c.bold('sf hardis:sandbox:create --forcenew') + )} ?\n${JSON.stringify(createResult, null, 2)}` ); } // Update sandbox org user public async updateSandboxOrgUser() { - const config = await getConfig("user"); + const config = await getConfig('user'); // Update sandbox org main user - uxLog(this, c.cyan("Update / fix sandbox org user " + this.sandboxOrgUsername)); - const userQueryCommand = `sfdx force:data:record:get -s User -w "Username=${this.sandboxOrgUsername}" -u ${this.sandboxOrgAlias}`; - const userQueryRes = await execSfdxJson(userQueryCommand, this, { fail: true, output: false, debug: this.debugMode }); + uxLog("action", this, c.cyan('Update / fix sandbox org user ' + this.sandboxOrgUsername)); + const userQueryCommand = `sf data get record --sobject User --where "Username=${this.sandboxOrgUsername}" --target-org ${this.sandboxOrgAlias}`; + const userQueryRes = await execSfdxJson(userQueryCommand, this, { + fail: true, + output: false, + debug: this.debugMode, + }); let updatedUserValues = `LastName='SFDX-HARDIS' FirstName='Sandbox Org'`; // Fix country value is State & Country picklist activated - if ((this.projectSandboxDef.features || []).includes("StateAndCountryPicklist") && userQueryRes.result.CountryCode == null) { - updatedUserValues += ` CountryCode='${config.defaultCountryCode || "FR"}' Country='${config.defaultCountry || "France"}'`; + /* jscpd:ignore-start */ + if ( + (this.projectSandboxDef.features || []).includes('StateAndCountryPicklist') && + userQueryRes.result.CountryCode == null + ) { + updatedUserValues += ` CountryCode='${config.defaultCountryCode || 'FR'}' Country='${config.defaultCountry || 'France' + }'`; } - if ((this.projectSandboxDef.features || []).includes("MarketingUser") && userQueryRes.result.UserPermissionsMarketingUser === false) { + if ( + (this.projectSandboxDef.features || []).includes('MarketingUser') && + userQueryRes.result.UserPermissionsMarketingUser === false + ) { // Make sure MarketingUser is checked on sandbox org user if it is supposed to be - updatedUserValues += " UserPermissionsMarketingUser=true"; + updatedUserValues += ' UserPermissionsMarketingUser=true'; } - const userUpdateCommand = `sfdx force:data:record:update -s User -i ${userQueryRes.result.Id} -v "${updatedUserValues}" -u ${this.sandboxOrgAlias}`; + const userUpdateCommand = `sf data update record --sobject User --record-id ${userQueryRes.result.Id} --values "${updatedUserValues}" --target-org ${this.sandboxOrgAlias}`; await execSfdxJson(userUpdateCommand, this, { fail: false, output: true, debug: this.debugMode }); + /* jscpd:ignore-end */ } } diff --git a/src/commands/hardis/org/data/delete.ts b/src/commands/hardis/org/data/delete.ts index 2ade0dd72..68407c0c4 100644 --- a/src/commands/hardis/org/data/delete.ts +++ b/src/commands/hardis/org/data/delete.ts @@ -1,80 +1,104 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { isCI, uxLog } from "../../../../common/utils"; -import { deleteData, selectDataWorkspace } from "../../../../common/utils/dataUtils"; -import { promptOrgUsernameDefault } from "../../../../common/utils/orgUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, uxLog } from '../../../../common/utils/index.js'; +import { deleteData, selectDataWorkspace } from '../../../../common/utils/dataUtils.js'; +import { promptOrgUsernameDefault } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DataExport extends SfCommand { + public static title = 'Delete data'; -export default class DataExport extends SfdxCommand { - public static title = "Delete data"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("orgDataDelete"); +**Deletes records in multiple Salesforce objects using an SFDMU (Salesforce Data Migration Utility) workspace.** - public static examples = ["$ sfdx hardis:org:data:delete"]; +This command provides a powerful and controlled way to remove data from your Salesforce orgs based on configurations defined in an SFDMU workspace. It's particularly useful for: - protected static flagsConfig = { - path: flags.string({ - char: "p", - description: "Path to the sfdmu workspace folder", +- **Data Cleanup:** Removing test data, obsolete records, or sensitive information. +- **Environment Reset:** Preparing sandboxes for new development cycles by clearing specific data sets. +- **Compliance:** Deleting data to meet regulatory requirements. + +**Important Considerations for Production Environments:** + +If you intend to run this command in a production environment, you must: + +- Set \`runnableInProduction\` to \`true\` in your \`export.json\` file within the SFDMU workspace. +- Define \`sfdmuCanModify: YOUR_INSTANCE_URL\` in your branch-specific configuration file (e.g., \`config/branches/.sfdx-hardis.YOUR_BRANCH.yml\`) to explicitly authorize data modification for that instance. + +
+Technical explanations + +The command's technical implementation relies heavily on the SFDMU plugin: + +- **SFDMU Integration:** It leverages the \`sfdmu\` plugin to perform the actual data deletion operations. The command acts as a wrapper, providing an assisted interface for SFDMU execution. +- **Workspace Selection:** If the SFDMU workspace path is not provided via the \`--path\` flag, it interactively prompts the user to select a data workspace using \`selectDataWorkspace\`. +- **Org Selection:** It ensures that a target Salesforce org is selected (either via the \`--target-org\` flag or through an interactive prompt using \`promptOrgUsernameDefault\`) to specify where the data deletion will occur. +- **\`deleteData\` Utility:** The core logic for executing the SFDMU deletion process is encapsulated within the \`deleteData\` utility function, which takes the SFDMU workspace path and the target username as arguments. +- **Environment Awareness:** It checks the \`isCI\` flag to determine whether to run in an interactive mode (prompting for user input) or a non-interactive mode (relying solely on command-line flags). +- **Required Plugin:** It explicitly lists \`sfdmu\` as a required plugin, ensuring that the necessary dependency is in place before execution. +
+`; + + public static examples = ['$ sf hardis:org:data:delete']; + + public static flags: any = { + path: Flags.string({ + char: 'p', + description: 'Path to the sfdmu workspace folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdmu"]; + protected static requiresSfdxPlugins = ['sfdmu']; /* jscpd:ignore-end */ public async run(): Promise { - let sfdmuPath = this.flags.path || null; + const { flags } = await this.parse(DataExport); + let sfdmuPath = flags.path || null; // Identify sfdmu workspace if not defined if (sfdmuPath == null) { - sfdmuPath = await selectDataWorkspace({ selectDataLabel: "Please select a data workspace to use for DELETION" }); + sfdmuPath = await selectDataWorkspace({ selectDataLabel: 'Please select a data workspace to use for DELETION' }); } // Select org that where records will be imported - let orgUsername = this.org.getUsername(); + let orgUsername = flags['target-org'].getUsername(); if (!isCI) { - orgUsername = await promptOrgUsernameDefault(this, orgUsername, { devHub: false, setDefault: false }); + orgUsername = await promptOrgUsernameDefault(this, orgUsername || '', { devHub: false, setDefault: false, defaultOrgUsername: flags['target-org']?.getUsername() }); } // Export data from org - await deleteData(sfdmuPath, this, { + await deleteData(sfdmuPath || '', this, { targetUsername: orgUsername, }); // Output message - const message = `Successfully deleted data from org ${c.green(orgUsername)} using SFDMU project ${c.green(sfdmuPath)}`; - uxLog(this, c.cyan(message)); + const message = `Successfully deleted data from org ${c.green(orgUsername)} using SFDMU project ${c.green( + sfdmuPath + )}`; + uxLog("action", this, c.cyan(message)); return { outputString: message }; } } diff --git a/src/commands/hardis/org/data/export.ts b/src/commands/hardis/org/data/export.ts index b68d9de6d..4154f18af 100644 --- a/src/commands/hardis/org/data/export.ts +++ b/src/commands/hardis/org/data/export.ts @@ -1,86 +1,133 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { isCI, uxLog } from "../../../../common/utils"; -import { exportData, selectDataWorkspace } from "../../../../common/utils/dataUtils"; -import { promptOrgUsernameDefault } from "../../../../common/utils/orgUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, uxLog } from '../../../../common/utils/index.js'; +import { exportData, findDataWorkspaceByName, selectDataWorkspace } from '../../../../common/utils/dataUtils.js'; +import { promptOrgUsernameDefault } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DataExport extends SfCommand { + public static title = 'Export data'; -export default class DataExport extends SfdxCommand { - public static title = "Export data"; + public static description = ` +## Command Behavior - public static description = `Export data from an org using a [SFDX Data Loader](https://help.sfdmu.com/) Project +**Exports data from a Salesforce org using an SFDMU (Salesforce Data Migration Utility) project.** -See article: +This command facilitates the extraction of data from your Salesforce environments based on configurations defined in an SFDMU workspace. It's a powerful tool for various data-related tasks, including: + +- **Data Backup:** Creating snapshots of your Salesforce data. +- **Data Migration:** Extracting data for transfer to another Salesforce org or external system. +- **Reporting and Analysis:** Exporting specific datasets for detailed analysis outside of Salesforce. +- **Data Seeding:** Preparing data for import into other environments. + +Key functionalities: + +- **SFDMU Workspace Integration:** Leverages an existing SFDMU workspace (defined by an \`export.json\` file) to determine which objects and records to export, along with any filtering or transformation rules. +- **Interactive Workspace Selection:** If the SFDMU workspace path is not provided via the \`--path\` flag, it interactively prompts the user to select one. +- **Org Selection:** Ensures that a target Salesforce org is selected (either via the \`--target-org\` flag or through an interactive prompt) to specify the source of the data export. + +See this article for a practical example: [![How to detect bad words in Salesforce records using SFDX Data Loader and sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-badwords.jpg)](https://nicolas.vuillamy.fr/how-to-detect-bad-words-in-salesforce-records-using-sfdx-data-loader-and-sfdx-hardis-171db40a9bac) + +
+Technical explanations + +The command's technical implementation relies heavily on the SFDMU plugin: + +- **SFDMU Integration:** It acts as a wrapper around the \`sfdmu\` plugin, which performs the actual data export operations. The command provides an assisted interface for SFDMU execution. +- **\`exportData\` Utility:** The core logic for executing the SFDMU export process is encapsulated within the \`exportData\` utility function, which takes the SFDMU workspace path and the source username as arguments. +- **Interactive Prompts:** Uses \`selectDataWorkspace\` to allow the user to choose an SFDMU project and \`promptOrgUsernameDefault\` for selecting the source Salesforce org when not running in a CI environment. +- **Environment Awareness:** Checks the \`isCI\` flag to determine whether to run in an interactive mode (prompting for user input) or a non-interactive mode (relying solely on command-line flags). +- **Required Plugin:** It explicitly lists \`sfdmu\` as a required plugin, ensuring that the necessary dependency is in place before execution. +
`; - public static examples = ["$ sfdx hardis:org:data:export"]; + public static examples = [ + '$ sf hardis:org:data:export', + '$ sf hardis:org:data:export --project-name MyDataProject --target-org my-org@example.com', + '$ sf hardis:org:data:export --path ./scripts/data/MyDataProject --no-prompt --target-org my-org@example.com', + ]; - protected static flagsConfig = { - path: flags.string({ - char: "p", - description: "Path to the sfdmu workspace folder", + public static flags: any = { + "project-name": Flags.string({ + char: 'n', + description: 'Name of the sfdmu project to use (if not defined, you will be prompted to select one)', + }), + path: Flags.string({ + char: 'p', + description: 'Path to the sfdmu workspace folder', }), - debug: flags.boolean({ - char: "d", + "no-prompt": Flags.boolean({ + char: 'r', + description: 'Do not prompt for Org, use default org', default: false, - description: messages.getMessage("debugMode"), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdmu"]; + protected static requiresSfdxPlugins = ['sfdmu']; /* jscpd:ignore-end */ public async run(): Promise { - let sfdmuPath = this.flags.path || null; - //const debugMode = this.flags.debug || false; + const { flags } = await this.parse(DataExport); + let sfdmuPath = flags.path || null; + const projectName = flags["project-name"] || null; + const noPrompts = flags["no-prompt"] || false; + //const debugMode = flags.debug || false; - // Identify sfdmu workspace if not defined - if (sfdmuPath == null) { - sfdmuPath = await selectDataWorkspace({ selectDataLabel: "Please select a data workspace to EXPORT" }); - } + + uxLog("action", this, c.cyan('This command will launch data EXPORT (download from org) using SFDX Data Loader (sfdmu)')); // Select org that will be used to export records - let orgUsername = this.org.getUsername(); - if (!isCI) { - orgUsername = await promptOrgUsernameDefault(this, orgUsername, { devHub: false, setDefault: false }); + let orgUsername = flags['target-org'].getUsername(); + if (!isCI && noPrompts === false) { + orgUsername = await promptOrgUsernameDefault(this, orgUsername || '', { devHub: false, setDefault: false }); + } + + // Find by project name if provided + if (projectName != null && sfdmuPath == null) { + sfdmuPath = await findDataWorkspaceByName(projectName); + } + + // Identify sfdmu workspace if not defined + if (sfdmuPath == null) { + sfdmuPath = await selectDataWorkspace({ + selectDataLabel: `Please select a data workspace to EXPORT from ${c.green(orgUsername)}`, + }); } // Export data from org - await exportData(sfdmuPath, this, { + await exportData(sfdmuPath || '', this, { sourceUsername: orgUsername, }); // Output message - const message = `Successfully exported data from sfdmu project ${c.green(sfdmuPath)} from org ${c.green(orgUsername)}`; - uxLog(this, c.cyan(message)); + const message = `Successfully exported data from sfdmu project ${c.green(sfdmuPath)} from org ${c.green( + orgUsername + )}`; + uxLog("action", this, c.cyan(message)); return { outputString: message }; } } diff --git a/src/commands/hardis/org/data/import.ts b/src/commands/hardis/org/data/import.ts index a3404321b..ff3f65cb8 100644 --- a/src/commands/hardis/org/data/import.ts +++ b/src/commands/hardis/org/data/import.ts @@ -1,85 +1,111 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { isCI, uxLog } from "../../../../common/utils"; -import { importData, selectDataWorkspace } from "../../../../common/utils/dataUtils"; -import { promptOrgUsernameDefault } from "../../../../common/utils/orgUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, uxLog } from '../../../../common/utils/index.js'; +import { findDataWorkspaceByName, importData, selectDataWorkspace } from '../../../../common/utils/dataUtils.js'; +import { promptOrgUsernameDefault } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class DataExport extends SfdxCommand { - public static title = "Import data"; +export default class DataImport extends SfCommand { + public static title = 'Import data'; public static description = `Import/Load data in an org using a [SFDX Data Loader](https://help.sfdmu.com/) Project +If you need to run this command in a production org, you need to either: + +- Define **sfdmuCanModify** in your .sfdx-hardis.yml config file. (Example: \`sfdmuCanModify: prod-instance.my.salesforce.com\`) +- Define an environment variable SFDMU_CAN_MODIFY. (Example: \`SFDMU_CAN_MODIFY=prod-instance.my.salesforce.com\`) + See article: [![How to detect bad words in Salesforce records using SFDX Data Loader and sfdx-hardis](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-badwords.jpg)](https://nicolas.vuillamy.fr/how-to-detect-bad-words-in-salesforce-records-using-sfdx-data-loader-and-sfdx-hardis-171db40a9bac) `; - public static examples = ["$ sfdx hardis:org:data:import"]; - - protected static flagsConfig = { - path: flags.string({ - char: "p", - description: "Path to the sfdmu workspace folder", + public static examples = [ + '$ sf hardis:org:data:import', + '$ sf hardis:org:data:import --project-name MyDataProject --target-org my-org@example.com', + '$ sf hardis:org:data:import --path ./scripts/data/MyDataProject --no-prompt --target-org my-org@example.com', + '$ SFDMU_CAN_MODIFY=prod-instance.my.salesforce.com sf hardis:org:data:import --project-name MyDataProject --target-org prod@example.com', + ]; + + public static flags: any = { + "project-name": Flags.string({ + char: 'n', + description: 'Name of the sfdmu project to use (if not defined, you will be prompted to select one)', + }), + path: Flags.string({ + char: 'p', + description: 'Path to the sfdmu workspace folder', }), - debug: flags.boolean({ - char: "d", + "no-prompt": Flags.boolean({ + char: 'r', + description: 'Do not prompt for Org, use default org', default: false, - description: messages.getMessage("debugMode"), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdmu"]; + protected static requiresSfdxPlugins = ['sfdmu']; /* jscpd:ignore-end */ public async run(): Promise { - let sfdmuPath = this.flags.path || null; + const { flags } = await this.parse(DataImport); + let sfdmuPath = flags.path || null; + const projectName = flags["project-name"] || null; + const noPrompts = flags["no-prompt"] || false; + + uxLog("action", this, c.cyan('This command will launch data IMPORT (upload to org) using SFDX Data Loader (sfdmu)')); + + // Select org that where records will be imported + let orgUsername = flags['target-org'].getUsername(); + if (!isCI && noPrompts === false) { + orgUsername = await promptOrgUsernameDefault(this, orgUsername || '', { devHub: false, setDefault: false }); + } + + // Find by project name if provided + if (projectName != null && sfdmuPath == null) { + sfdmuPath = await findDataWorkspaceByName(projectName); + } // Identify sfdmu workspace if not defined if (sfdmuPath == null) { - sfdmuPath = await selectDataWorkspace({ selectDataLabel: "Please select a data workspace to IMPORT" }); + sfdmuPath = await selectDataWorkspace({ + selectDataLabel: `Please select a data workspace to IMPORT in ${c.green(orgUsername)}`, + }); } - // Select org that where records will be imported - let orgUsername = this.org.getUsername(); - if (!isCI) { - orgUsername = await promptOrgUsernameDefault(this, orgUsername, { devHub: false, setDefault: false }); - } + // Export data from org - await importData(sfdmuPath, this, { + await importData(sfdmuPath || '', this, { targetUsername: orgUsername, }); // Output message - const message = `Successfully import data from sfdmu project ${c.green(sfdmuPath)} into org ${c.green(orgUsername)}`; - uxLog(this, c.cyan(message)); + const message = `Successfully import data from sfdmu project ${c.green(sfdmuPath)} into org ${c.green( + orgUsername + )}`; + uxLog("action", this, c.cyan(message)); return { outputString: message }; } } diff --git a/src/commands/hardis/org/diagnose/audittrail.ts b/src/commands/hardis/org/diagnose/audittrail.ts index 7e1b308a5..f18a2c725 100644 --- a/src/commands/hardis/org/diagnose/audittrail.ts +++ b/src/commands/hardis/org/diagnose/audittrail.ts @@ -1,28 +1,28 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { isCI, uxLog } from "../../../../common/utils"; -import { bulkQuery } from "../../../../common/utils/apiUtils"; -import { getConfig } from "../../../../config"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; -import { prompts } from "../../../../common/utils/prompts"; -import { generateCsvFile, generateReportPath } from "../../../../common/utils/filesUtils"; -import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from "../../../../common/utils/notifUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class DiagnoseAuditTrail extends SfdxCommand { - public static title = "Diagnose content of Setup Audit Trail"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, sortCrossPlatform, uxLog } from '../../../../common/utils/index.js'; +import { bulkQuery } from '../../../../common/utils/apiUtils.js'; +import { soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { CONSTANTS, getConfig } from '../../../../config/index.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DiagnoseAuditTrail extends SfCommand { + public static title = 'Diagnose content of Setup Audit Trail'; public static description = `Export Audit trail into a CSV file with selected criteria, and highlight suspect actions +Also detects updates of Custom Settings values (disable by defining \`SKIP_AUDIT_TRAIL_CUSTOM_SETTINGS=true\`) + Regular setup actions performed in major orgs are filtered. - "" @@ -35,6 +35,10 @@ Regular setup actions performed in major orgs are filtered. - Custom App Licenses - addeduserpackagelicense - granteduserpackagelicense + - revokeduserpackagelicense +- Customer Portal + - createdcustomersuccessuser + - CSPUserDisabled - Currency - updateddatedexchrate - Data Management @@ -42,12 +46,15 @@ Regular setup actions performed in major orgs are filtered. - Email Administration - dkimRotationPreparationSuccessful - dkimRotationSuccessful +- External Objects + - xdsEncryptedFieldChange - Groups - groupMembership - Holidays - holiday_insert - Inbox mobile and legacy desktop apps - enableSIQUserNonEAC + - siqUserAcceptedTOS - Manage Users - activateduser - createduser @@ -59,16 +66,24 @@ Regular setup actions performed in major orgs are filtered. - changedinteractionuseronoff - changedmarketinguseroffon - changedmarketinguseronoff + - changedofflineuseroffon + - changedprofileforuserstdtostd - changedprofileforuser - changedprofileforusercusttostd - changedprofileforuserstdtocust - changedroleforusertonone - changedroleforuser - changedroleforuserfromnone + - changedUserAdminVerifiedStatusVerified - changedUserEmailVerifiedStatusUnverified - changedUserEmailVerifiedStatusVerified + - changedknowledgeuseroffon + - changedsfcontentuseroffon + - changedsupportuseroffon + - changedusername - changedUserPhoneNumber - changedUserPhoneVerifiedStatusUnverified + - changedUserPhoneVerifiedStatusVerified - deactivateduser - deleteAuthenticatorPairing - deleteTwoFactorInfo2 @@ -86,6 +101,8 @@ Regular setup actions performed in major orgs are filtered. - PermSetLicenseUnassign - registeredUserPhoneNumber - resetpassword + - suNetworkAdminLogin + - suNetworkAdminLogout - suOrgAdminLogin - suOrgAdminLogout - unfrozeuser @@ -117,291 +134,181 @@ monitoringAllowedSectionsActions: "Some other section": ["actionType1","actionType2","actionType3"] // Will ignore only those 3 actions from section "Some other section". Other actions in the same section will be considered as suspect. \`\`\` -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-suspect-audit-trail/) and can output Grafana, Slack and MsTeams Notifications. +## Excel output example + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-monitoring-audittrail-excel.jpg) + +## Local output example + +![](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/screenshot-monitoring-audittrail-local.jpg) + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-suspect-audit-trail/) and can output Grafana, Slack and MsTeams Notifications. `; public static examples = [ - "$ sfdx hardis:org:diagnose:audittrail", - "$ sfdx hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com", - "$ sfdx hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com,bertrand@titi.com", - "$ sfdx hardis:org:diagnose:audittrail --lastndays 5", + '$ sf hardis:org:diagnose:audittrail', + '$ sf hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com', + '$ sf hardis:org:diagnose:audittrail --excludeusers baptiste@titi.com,bertrand@titi.com', + '$ sf hardis:org:diagnose:audittrail --lastndays 5', ]; - protected static flagsConfig = { - excludeusers: flags.string({ - char: "e", - description: "Comma-separated list of usernames to exclude", + public static flags: any = { + excludeusers: Flags.string({ + char: 'e', + description: 'Comma-separated list of usernames to exclude', }), - lastndays: flags.number({ - char: "t", - description: "Number of days to extract from today (included)", + lastndays: Flags.integer({ + char: 't', + description: 'Number of days to extract from today (included)', }), - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; + public static requiresProject = false; - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; - - protected excludeUsers = []; - protected lastNdays: number; + protected excludeUsers: any[] = []; + protected lastNdays: number | undefined; protected allowedSectionsActions = {}; protected debugMode = false; - protected auditTrailRecords = []; + protected suspectRecords: any[] = []; + protected suspectUsers: any[] = []; + protected suspectUsersAndActions: any = {}; + protected suspectActions: any[] = []; + protected severityIconLog = getSeverityIcon('log'); + protected severityIconWarning = getSeverityIcon('warning'); + + protected auditTrailRecords: any[] = []; protected outputFile; protected outputFilesRes: any = {}; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; - this.excludeUsers = this.flags.excludeusers ? this.flags.excludeusers.split(",") : []; - this.lastNdays = this.flags.lastndays; - this.outputFile = this.flags.outputfile || null; - const config = await getConfig("branch"); + const { flags } = await this.parse(DiagnoseAuditTrail); + this.debugMode = flags.debug || false; + this.excludeUsers = flags.excludeusers ? flags.excludeusers.split(',') : []; + this.lastNdays = flags.lastndays; + this.outputFile = flags.outputfile || null; + const config = await getConfig('branch'); // If manual mode and lastndays not sent as parameter, prompt user - if (!isCI && !this.lastNdays) { - const lastNdaysResponse = await prompts({ - type: "select", - name: "lastndays", - message: "Please select the number of days in the past from today you want to detect suspiscious setup activities", - choices: [ - { title: `1`, value: 1 }, - { title: `2`, value: 2 }, - { title: `3`, value: 3 }, - { title: `4`, value: 4 }, - { title: `5`, value: 5 }, - { title: `6`, value: 6 }, - { title: `7`, value: 7 }, - { title: `14`, value: 14 }, - { title: `30`, value: 30 }, - { title: `60`, value: 60 }, - { title: `90`, value: 90 }, - { title: `180`, value: 180 }, - ], - }); - this.lastNdays = lastNdaysResponse.lastndays; - } else { - this.lastNdays = this.lastNdays || 1; - } + await this.manageAuditTimeframe(); - this.allowedSectionsActions = { - "": ["createScratchOrg", "changedsenderemail", "deleteScratchOrg", "loginasgrantedtopartnerbt"], - "Certificate and Key Management": ["insertCertificate"], - "Custom App Licenses": ["addeduserpackagelicense", "granteduserpackagelicense"], - Currency: ["updateddatedexchrate"], - "Data Management": ["queueMembership"], - "Email Administration": ["dkimRotationSuccessful", "dkimRotationPreparationSuccessful"], - Holidays: ["holiday_insert"], - "Inbox mobile and legacy desktop apps": ["enableSIQUserNonEAC"], - Groups: ["groupMembership"], - "Manage Territories": ["tm2_userAddedToTerritory", "tm2_userRemovedFromTerritory"], - "Manage Users": [ - "activateduser", - "createduser", - "changedcommunitynickname", - "changedemail", - "changedfederationid", - "changedinteractionuseroffon", - "changedinteractionuseronoff", - "changedmarketinguseroffon", - "changedmarketinguseronoff", - "changedManager", - "changedprofileforuser", - "changedprofileforusercusttostd", - "changedprofileforuserstdtocust", - "changedroleforusertonone", - "changedroleforuser", - "changedroleforuserfromnone", - "changedpassword", - "changedUserEmailVerifiedStatusUnverified", - "changedUserEmailVerifiedStatusVerified", - "changedUserPhoneNumber", - "changedUserPhoneVerifiedStatusUnverified", - "deactivateduser", - "deleteAuthenticatorPairing", - "deleteTwoFactorInfo2", - "deleteTwoFactorTempCode", - "frozeuser", - "insertAuthenticatorPairing", - "insertTwoFactorInfo2", - "insertTwoFactorTempCode", - "lightningloginenroll", - "PermSetAssign", - "PermSetGroupAssign", - "PermSetGroupUnassign", - "PermSetLicenseAssign", - "PermSetUnassign", - "PermSetLicenseUnassign", - "registeredUserPhoneNumber", - "resetpassword", - "suOrgAdminLogin", - "suOrgAdminLogout", - "unfrozeuser", - "useremailchangesent", - ], - "Mobile Administration": ["assigneduserstomobileconfig"], - "Reporting Snapshots": ["createdReportJob", "deletedReportJob"], - Sandboxes: ["DeleteSandbox"], - }; + // Initialize exceptions that will not be considered as suspect + this.initializeAllowedSectionsActions(); // Append custom sections & actions considered as not suspect if (config.monitoringAllowedSectionsActions) { this.allowedSectionsActions = Object.assign(this.allowedSectionsActions, config.monitoringAllowedSectionsActions); } - const conn = this.org.getConnection(); - uxLog(this, c.cyan(`Extracting Setup Audit Trail and detect suspect actions in ${conn.instanceUrl} ...`)); + const conn = flags['target-org'].getConnection(); + uxLog("action", this, c.cyan(`Extracting Setup Audit Trail and detect suspect actions in ${conn.instanceUrl} ...`)); // Manage exclude users list - if (this.excludeUsers.length === 0) { - if (config.targetUsername) { - this.excludeUsers.push(config.targetUsername); - } - if (config.monitoringExcludeUsernames) { - this.excludeUsers.push(...config.monitoringExcludeUsernames); - } - } - let whereConstraint = `WHERE CreatedDate = LAST_N_DAYS:${this.lastNdays}` + ` AND CreatedBy.Username != NULL `; - if (this.excludeUsers.length > 0) { - whereConstraint += `AND CreatedBy.Username NOT IN ('${this.excludeUsers.join("','")}') `; - } - - uxLog(this, c.cyan(`Excluded users are ${this.excludeUsers.join(",") || "None"}`)); - uxLog(this, c.cyan(`Use argument --excludeusers or .sfdx-hardis.yml property monitoringExcludeUsernames to exclude more users`)); + const whereConstraint = this.manageExcludedUsers(config); // Fetch SetupAuditTrail records - const auditTrailQuery = - `SELECT CreatedDate,CreatedBy.Username,CreatedBy.Name,Action,Section,Display,ResponsibleNamespacePrefix,DelegateUser ` + - `FROM SetupAuditTrail ` + - whereConstraint + - `ORDER BY CreatedDate DESC`; - uxLog(this, c.grey("Query: " + c.italic(auditTrailQuery))); - const queryRes = await bulkQuery(auditTrailQuery, conn); - const suspectRecords = []; - let suspectUsers = []; - const suspectActions = []; - const severityIconLog = getSeverityIcon("log"); - const severityIconWarning = getSeverityIcon("warning"); - this.auditTrailRecords = queryRes.records.map((record) => { - const section = record?.Section || ""; - record.Suspect = false; - record.severity = "log"; - record.severityIcon = severityIconLog; - // Unallowed actions - if ( - (this.allowedSectionsActions[section] && !this.allowedSectionsActions[section].includes(record.Action)) || - !this.allowedSectionsActions[section] - ) { - record.Suspect = true; - record.SuspectReason = `Manual config in unallowed section ${section} with action ${record.Action}`; - record.severity = "warning"; - record.severityIcon = severityIconWarning; - suspectRecords.push(record); - suspectUsers.push(record["CreatedBy.Username"] + " - " + record["CreatedBy.Name"]); - suspectActions.push(`${section} - ${record.Action}`); - return record; - } - return record; - }); + await this.queryAuditTrail(whereConstraint, conn); + + await this.handleCustomSettingsAudit(conn); + // Summarize + uxLog("action", this, c.cyan(`Results summary:`)); let statusCode = 0; - let msg = "No suspect Setup Audit Trail records has been found"; - const suspectActionsWithCount = []; - if (suspectRecords.length > 0) { + let msg = 'No suspect Setup Audit Trail records has been found'; + const suspectActionsWithCount: any[] = []; + if (this.suspectRecords.length > 0) { statusCode = 1; - uxLog(this, c.yellow("Suspect records list")); - uxLog(this, JSON.stringify(suspectRecords, null, 2)); - msg = `${suspectRecords.length} suspect Setup Audit Trail records has been found`; - uxLog(this, c.yellow(msg)); - suspectUsers = [...new Set(suspectUsers)]; - suspectUsers.sort(); - const suspectActionsSummary = {}; - for (const suspectAction of suspectActions) { + msg = `${this.suspectRecords.length} suspect Setup Audit Trail records has been found`; + this.suspectUsers = [...new Set(this.suspectUsers)]; + sortCrossPlatform(this.suspectUsers); + const suspectActionsSummary: Record = {}; + for (const suspectAction of this.suspectActions) { suspectActionsSummary[suspectAction] = (suspectActionsSummary[suspectAction] || 0) + 1; } for (const suspectAction of Object.keys(suspectActionsSummary)) { suspectActionsWithCount.push(`${suspectAction} (${suspectActionsSummary[suspectAction]})`); } - suspectActionsWithCount.sort(); - uxLog(this, ""); - uxLog(this, c.yellow("Related users:")); - for (const user of suspectUsers) { - uxLog(this, c.yellow(`- ${user}`)); + sortCrossPlatform(suspectActionsWithCount); + + uxLog("other", this, 'Suspect records list'); + uxLog("other", this, JSON.stringify(this.suspectRecords, null, 2)); + + let logMsg = ''; + logMsg += c.yellow(msg) + '\n\n'; + logMsg += c.yellow('Related users:') + '\n'; + for (const user of this.suspectUsers) { + logMsg += c.yellow(`- ${user}` + ' (' + this.suspectUsersAndActions[user].actions.join(', ') + ")") + '\n'; } - uxLog(this, ""); - uxLog(this, c.yellow("Related actions:")); + logMsg += '\n' + c.yellow('Related actions:') + '\n'; for (const action of suspectActionsWithCount) { - uxLog(this, c.yellow(`- ${action}`)); + logMsg += c.yellow(`- ${action}`) + '\n'; } - uxLog(this, ""); + logMsg += '\n'; + uxLog("other", this, logMsg); } else { - uxLog(this, c.green(msg)); + uxLog("success", this, c.green(msg)); } // Generate output CSV file - this.outputFile = await generateReportPath("audit-trail", this.outputFile); - this.outputFilesRes = await generateCsvFile(this.auditTrailRecords, this.outputFile); + this.outputFile = await generateReportPath('audit-trail', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.auditTrailRecords, this.outputFile, { fileTitle: 'Suspect Actions' }); // Manage notifications - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No suspect Setup Audit Trail records has been found in ${orgMarkdown}`; - let notifAttachments = []; - if (suspectRecords.length > 0) { - notifSeverity = "warning"; - notifText = `${suspectRecords.length} suspect Setup Audit Trail records have been found in ${orgMarkdown}`; + let notifAttachments: any[] = []; + if (this.suspectRecords.length > 0) { + notifSeverity = 'warning'; + notifText = `${this.suspectRecords.length} suspect Setup Audit Trail records have been found in ${orgMarkdown}`; let notifDetailText = ``; - notifDetailText += "*Related users*:\n"; - for (const user of suspectUsers) { - notifDetailText += `• ${user}\n`; + notifDetailText += '*Related users*:\n'; + for (const user of this.suspectUsers) { + notifDetailText += `• ${user + " (" + this.suspectUsersAndActions[user].actions.join(', ') + ")"}\n`; } - notifDetailText += "\n"; - notifDetailText += "*Related actions*:\n"; + notifDetailText += '\n'; + notifDetailText += '*Related actions*:\n'; for (const action of suspectActionsWithCount) { notifDetailText += `• ${action}\n`; } notifAttachments = [{ text: notifDetailText }]; } - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "AUDIT_TRAIL", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'AUDIT_TRAIL', text: notifText, attachments: notifAttachments, buttons: notifButtons, severity: notifSeverity, attachedFiles: this.outputFilesRes.xlsxFile ? [this.outputFilesRes.xlsxFile] : [], logElements: this.auditTrailRecords, - data: { metric: suspectRecords.length }, + data: { metric: this.suspectRecords.length }, metrics: { - SuspectMetadataUpdates: suspectRecords.length, + SuspectMetadataUpdates: this.suspectRecords.length, }, }); - if ((this.argv || []).includes("audittrail")) { + if ((this.argv || []).includes('audittrail')) { process.exitCode = statusCode; } @@ -409,9 +316,269 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co return { status: statusCode, message: msg, - suspectRecords: suspectRecords, - suspectUsers: suspectUsers, + suspectRecords: this.suspectRecords, + suspectUsers: this.suspectUsers, csvLogFile: this.outputFile, }; } + + private async queryAuditTrail(whereConstraint: string, conn: any) { + const auditTrailQuery = `SELECT CreatedDate,CreatedBy.Username,CreatedBy.Name,Action,Section,Display,ResponsibleNamespacePrefix,DelegateUser ` + + `FROM SetupAuditTrail ` + + whereConstraint + + `ORDER BY CreatedDate DESC`; + const queryRes = await bulkQuery(auditTrailQuery, conn); + this.auditTrailRecords = queryRes.records.map((record) => { + const section = record?.Section || ''; + record.Suspect = false; + record.severity = 'log'; + record.severityIcon = this.severityIconLog; + // Unallowed actions + if (( + this.allowedSectionsActions[section] && + this.allowedSectionsActions[section].length > 0 && + !this.allowedSectionsActions[section].includes(record.Action) + ) || + !this.allowedSectionsActions[section]) { + record.Suspect = true; + record.SuspectReason = `Manual config in unallowed section ${section} with action ${record.Action}`; + record.severity = 'warning'; + record.severityIcon = this.severityIconWarning; + this.suspectRecords.push(record); + const suspectUserDisplayName = `${record['CreatedBy.Name']}`; + this.suspectUsers.push(suspectUserDisplayName); + const actionFullName = `${section} - ${record.Action}`; + this.suspectActions.push(actionFullName); + if (!this.suspectUsersAndActions[suspectUserDisplayName]) { + this.suspectUsersAndActions[suspectUserDisplayName] = { + name: record['CreatedBy.Name'], + actions: [], + }; + } + const suspectUserActions = this.suspectUsersAndActions[suspectUserDisplayName].actions; + if (!suspectUserActions.includes(record.Action)) { + suspectUserActions.push(record.Action); + } + this.suspectUsersAndActions[suspectUserDisplayName].actions = suspectUserActions; + return record; + } + return record; + }); + } + + private async handleCustomSettingsAudit(conn: any) { + if (process.env?.SKIP_AUDIT_TRAIL_CUSTOM_SETTINGS === "true") { + uxLog("action", this, c.cyan(`Skipping Custom Settings modifications as SKIP_AUDIT_TRAIL_CUSTOM_SETTINGS=true has been found`)); + return; + } + // Add custom settings tracking + uxLog("action", this, c.cyan(`List available custom settings...`)); + uxLog("log", this, c.grey(`(Define SKIP_AUDIT_TRAIL_CUSTOM_SETTINGS=true if you don't want them)`)); + const customSettingsQuery = `SELECT QualifiedApiName, Label FROM EntityDefinition + WHERE IsCustomSetting = true`; + const customSettingsResult = await soqlQuery(customSettingsQuery, conn); + uxLog("action", this, c.cyan(`Analyze updates in ${customSettingsResult.records.length} Custom Settings...`)); + + let whereConstraintCustomSetting = `WHERE LastModifiedDate = LAST_N_DAYS:${this.lastNdays}` + ` AND LastModifiedBy.Username != NULL `; + if (this.excludeUsers.length > 0) { + whereConstraintCustomSetting += `AND LastModifiedBy.Username NOT IN ('${this.excludeUsers.join("','")}') `; + } + // Get custom settings modifications + const customSettingModifications: any[] = []; + for (const cs of customSettingsResult.records) { + try { + const result = await soqlQuery( + `SELECT Id, LastModifiedDate, LastModifiedBy.Name, LastModifiedBy.Username + FROM ${cs.QualifiedApiName} ` + + whereConstraintCustomSetting, + conn + ); + + if (result.records.length > 0) { + for (const record of result.records) { + customSettingModifications.push({ + CreatedDate: record.LastModifiedDate, + 'CreatedBy.Name': record['LastModifiedBy']?.['Name'], + 'CreatedBy.Username': record['LastModifiedBy']?.['Username'], + 'LastModifiedBy.Name': record['LastModifiedBy']?.['Name'], + 'LastModifiedBy.Username': record['LastModifiedBy']?.['Username'], + Action: `customSetting${cs.QualifiedApiName}`, + Section: 'Custom Settings', + Display: `Updated custom setting ${cs.Label} (${cs.QualifiedApiName})`, + ResponsibleNamespacePrefix: null, + DelegateUser: null, + Suspect: true, + severity: 'warning', + severityIcon: getSeverityIcon('warning'), + SuspectReason: `CustomSettingUpdate` + }); + } + } + } catch (error) { + uxLog("error", this, c.red(`Error querying Custom Setting ${cs.Label}: ${error}`)); + continue; + } + } + // Add custom setting updates to audit trail records + if (customSettingModifications.length > 0) { + uxLog("warning", this, c.yellow(`Found ${customSettingModifications.length} Custom Setting updates`)); + this.auditTrailRecords.push(...customSettingModifications); + + // Add to suspect records + for (const csUpdate of customSettingModifications) { + this.suspectRecords.push(csUpdate); + const suspectUserDisplayName = csUpdate['LastModifiedBy.Name']; + this.suspectUsers.push(suspectUserDisplayName); + const actionFullName = `${csUpdate.Section} - ${csUpdate.Display}`; + this.suspectActions.push(actionFullName); + + if (!this.suspectUsersAndActions[suspectUserDisplayName]) { + this.suspectUsersAndActions[suspectUserDisplayName] = { + name: csUpdate['LastModifiedBy.Name'], + actions: [] + }; + } + if (!this.suspectUsersAndActions[suspectUserDisplayName].actions.includes(csUpdate.Action)) { + this.suspectUsersAndActions[suspectUserDisplayName].actions.push(csUpdate.Action); + } + } + } + } + + private initializeAllowedSectionsActions() { + this.allowedSectionsActions = { + '': ['createScratchOrg', 'changedsenderemail', 'deleteScratchOrg', 'loginasgrantedtopartnerbt'], + 'Certificate and Key Management': ['insertCertificate'], + 'Custom App Licenses': [ + 'addeduserpackagelicense', + 'granteduserpackagelicense', + 'revokeduserpackagelicense' + ], + 'Customer Portal': [ + 'createdcustomersuccessuser', + 'CSPUserDisabled' + ], + Currency: ['updateddatedexchrate'], + 'Data Management': ['queueMembership'], + 'Email Administration': ['dkimRotationSuccessful', 'dkimRotationPreparationSuccessful'], + 'External Objects': ['xdsEncryptedFieldChange'], + Holidays: ['holiday_insert'], + 'Inbox mobile and legacy desktop apps': [ + 'enableSIQUserNonEAC', + 'siqUserAcceptedTOS' + ], + Groups: ['groupMembership'], + 'Manage Territories': ['tm2_userAddedToTerritory', 'tm2_userRemovedFromTerritory'], + 'Manage Users': [ + 'activateduser', + 'createduser', + 'changedcommunitynickname', + 'changedemail', + 'changedfederationid', + 'changedinteractionuseroffon', + 'changedinteractionuseronoff', + 'changedmarketinguseroffon', + 'changedmarketinguseronoff', + 'changedManager', + "changedofflineuseroffon", + 'changedprofileforuser', + 'changedprofileforusercusttostd', + 'changedprofileforuserstdtocust', + 'changedroleforusertonone', + 'changedroleforuser', + 'changedroleforuserfromnone', + 'changedpassword', + "changedprofileforuserstdtostd", + 'changedsfcontentuseroffon', + 'changedUserAdminVerifiedStatusVerified', + 'changedUserEmailVerifiedStatusUnverified', + 'changedUserEmailVerifiedStatusVerified', + 'changedknowledgeuseroffon', + 'changedsupportuseroffon', + 'changedusername', + 'changedUserPhoneNumber', + 'changedUserPhoneVerifiedStatusUnverified', + 'changedUserPhoneVerifiedStatusVerified', + 'deactivateduser', + 'deleteAuthenticatorPairing', + 'deleteTwoFactorInfo2', + 'deleteTwoFactorTempCode', + 'frozeuser', + 'insertAuthenticatorPairing', + 'insertTwoFactorInfo2', + 'insertTwoFactorTempCode', + 'lightningloginenroll', + 'PermSetAssign', + 'PermSetGroupAssign', + 'PermSetGroupUnassign', + 'PermSetLicenseAssign', + 'PermSetUnassign', + 'PermSetLicenseUnassign', + 'registeredUserPhoneNumber', + 'resetpassword', + 'suNetworkAdminLogin', + 'suNetworkAdminLogout', + 'suOrgAdminLogin', + 'suOrgAdminLogout', + 'unfrozeuser', + 'useremailchangesent', + ], + 'Mobile Administration': ['assigneduserstomobileconfig'], + 'Reporting Snapshots': ['createdReportJob', 'deletedReportJob'], + Sandboxes: ['DeleteSandbox'], + }; + } + + private async manageAuditTimeframe() { + if (!isCI && !this.lastNdays) { + const lastNdaysResponse = await prompts({ + type: 'select', + name: 'lastndays', + message: 'Please select the number of days in the past from today you want to detect suspiscious setup activities', + description: 'Choose the timeframe for analyzing audit trail records to detect suspicious administrative activities', + placeholder: 'Select number of days', + choices: [ + { title: `1`, value: 1 }, + { title: `2`, value: 2 }, + { title: `3`, value: 3 }, + { title: `4`, value: 4 }, + { title: `5`, value: 5 }, + { title: `6`, value: 6 }, + { title: `7`, value: 7 }, + { title: `14`, value: 14 }, + { title: `30`, value: 30 }, + { title: `60`, value: 60 }, + { title: `90`, value: 90 }, + { title: `180`, value: 180 }, + ], + }); + this.lastNdays = lastNdaysResponse.lastndays; + } else { + this.lastNdays = this.lastNdays || 1; + } + } + + private manageExcludedUsers(config: any) { + if (this.excludeUsers.length === 0) { + if (config.targetUsername) { + this.excludeUsers.push(config.targetUsername); + } + if (config.monitoringExcludeUsernames) { + this.excludeUsers.push(...config.monitoringExcludeUsernames); + } + } + let whereConstraint = `WHERE CreatedDate = LAST_N_DAYS:${this.lastNdays}` + ` AND CreatedBy.Username != NULL `; + if (this.excludeUsers.length > 0) { + whereConstraint += `AND CreatedBy.Username NOT IN ('${this.excludeUsers.join("','")}') `; + } + uxLog("log", this, c.grey(`Excluded users are ${this.excludeUsers.join(',') || 'None'}`)); + uxLog( + "log", + this, + c.grey( + `Use argument --excludeusers or .sfdx-hardis.yml property monitoringExcludeUsernames to exclude more users` + ) + ); + return whereConstraint; + } } diff --git a/src/commands/hardis/org/diagnose/instanceupgrade.ts b/src/commands/hardis/org/diagnose/instanceupgrade.ts index 9097e75af..b57041d9c 100644 --- a/src/commands/hardis/org/diagnose/instanceupgrade.ts +++ b/src/commands/hardis/org/diagnose/instanceupgrade.ts @@ -1,63 +1,81 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import axios from "axios"; -import * as moment from "moment"; -import * as c from "chalk"; -import { uxLog } from "../../../../common/utils"; -import { soqlQuery } from "../../../../common/utils/apiUtils"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; -import { getNotificationButtons, getOrgMarkdown } from "../../../../common/utils/notifUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class DiagnoseInstanceUpgrade extends SfdxCommand { - public static title = "Get Instance Upgrade date"; - - public static description = `Get the date when the org instance will be upgraded (to Spring, Summer or Winter) - `; - - public static examples = ["$ sfdx hardis:org:diagnose:instanceupgrade"]; - - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import axios from 'axios'; +import moment from 'moment'; +import c from 'chalk'; +import { uxLog } from '../../../../common/utils/index.js'; +import { soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { getNotificationButtons, getOrgMarkdown } from '../../../../common/utils/notifUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DiagnoseInstanceUpgrade extends SfCommand { + public static title = 'Get Instance Upgrade date'; + + public static description = ` +## Command Behavior + +**Retrieves and displays the scheduled upgrade date for a Salesforce org's instance.** + +This command provides crucial information about when your Salesforce instance will be upgraded to the next major release (Spring, Summer, or Winter). This is vital for release planning, testing, and ensuring compatibility with upcoming Salesforce features. + +Key functionalities: + +- **Instance Identification:** Determines the Salesforce instance name of your target org. +- **Upgrade Date Retrieval:** Fetches the planned start time of the next major core service upgrade for that instance from the Salesforce Status API. +- **Days Until Upgrade:** Calculates and displays the number of days remaining until the next major upgrade. +- **Severity-Based Logging:** Adjusts the log severity (info, warning) based on the proximity of the upgrade date, providing a visual cue for urgency. +- **Notifications:** Sends notifications to configured channels (e.g., Slack, MS Teams, Grafana) with the upgrade information, making it suitable for automated monitoring. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Query:** It first queries the \`Organization\` object in Salesforce to get the \`InstanceName\` of the target org. +- **Salesforce Status API Integration:** It makes an HTTP GET request to the Salesforce Status API (\`https://api.status.salesforce.com/v1/instances/{instanceName}/status\`) to retrieve detailed information about the instance, including scheduled maintenances. +- **Data Parsing:** It parses the JSON response from the Status API to extract the relevant major release upgrade information. +- **Date Calculation:** Uses the \`moment\` library to calculate the difference in days between the current date and the planned upgrade date. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications, including the instance name, upgrade date, and days remaining, along with relevant metrics for monitoring dashboards. +- **User Feedback:** Provides clear messages to the user about the upgrade status and proximity. +
+`; + + public static examples = ['$ sf hardis:org:diagnose:instanceupgrade']; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(DiagnoseInstanceUpgrade); + this.debugMode = flags.debug || false; // Get instance name - const orgQuery = "SELECT FIELDS(all) FROM Organization LIMIT 1"; - const orgQueryRes = await soqlQuery(orgQuery, this.org.getConnection()); + const orgQuery = 'SELECT FIELDS(all) FROM Organization LIMIT 1'; + const orgQueryRes = await soqlQuery(orgQuery, flags['target-org'].getConnection()); const orgInfo = orgQueryRes.records[0]; const instanceName = orgInfo.InstanceName; @@ -68,7 +86,11 @@ export default class DiagnoseInstanceUpgrade extends SfdxCommand { const maintenances = instanceInfo.Maintenances || []; orgInfo.maintenanceNextUpgrade = {}; for (const maintenance of maintenances) { - if (maintenance.isCore && maintenance.releaseType === "Major" && maintenance.serviceKeys.includes("coreService")) { + if ( + maintenance.isCore && + maintenance.releaseType === 'Major' && + maintenance.serviceKeys.includes('coreService') + ) { orgInfo.maintenanceNextUpgrade = maintenance; break; } @@ -76,30 +98,30 @@ export default class DiagnoseInstanceUpgrade extends SfdxCommand { // Get number of days before next major upgrade const nextUpgradeDate = moment(orgInfo?.maintenanceNextUpgrade?.plannedStartTime); - const nextMajorUpgradeDateStr = nextUpgradeDate.format(); + const nextMajorUpgradeDateStr = nextUpgradeDate.format("ll"); const today = moment(); - const daysBeforeUpgrade = nextUpgradeDate.diff(today, "days"); + const daysBeforeUpgrade = today.diff(nextUpgradeDate, 'days'); // Manage notifications - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; - const notifText = `Salesforce instance ${instanceName} of ${orgMarkdown} will be upgraded on ${nextMajorUpgradeDateStr} (${daysBeforeUpgrade} days) to ${orgInfo?.maintenanceNextUpgrade?.name}`; + let notifSeverity: NotifSeverity = 'log'; + const notifText = `Salesforce instance *${instanceName}* of ${orgMarkdown} will be upgraded on ${nextMajorUpgradeDateStr} (*${daysBeforeUpgrade} days*) to ${orgInfo?.maintenanceNextUpgrade?.name}`; // Change severity according to number of days if (daysBeforeUpgrade <= 15) { - notifSeverity = "warning"; - uxLog(this, c.yellow(notifText)); + notifSeverity = 'warning'; + uxLog("warning", this, c.yellow(notifText)); } else if (daysBeforeUpgrade <= 30) { - notifSeverity = "info"; - uxLog(this, c.green(notifText)); + notifSeverity = 'info'; + uxLog("success", this, c.green(notifText)); } else { - uxLog(this, c.green(notifText)); + uxLog("success", this, c.green(notifText)); } - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "ORG_INFO", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'ORG_INFO', text: notifText, attachments: [], buttons: notifButtons, diff --git a/src/commands/hardis/org/diagnose/legacyapi.ts b/src/commands/hardis/org/diagnose/legacyapi.ts index e820d6fc5..9b0bced3a 100644 --- a/src/commands/hardis/org/diagnose/legacyapi.ts +++ b/src/commands/hardis/org/diagnose/legacyapi.ts @@ -1,27 +1,29 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as sortArray from "sort-array"; -import { uxLog } from "../../../../common/utils"; -import * as dns from "dns"; -import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from "../../../../common/utils/notifUtils"; -import { soqlQuery } from "../../../../common/utils/apiUtils"; -import { WebSocketClient } from "../../../../common/websocketClient"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; -import { generateCsvFile, generateReportPath } from "../../../../common/utils/filesUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import sortArray from 'sort-array'; +import { createTempDir, uxLog } from '../../../../common/utils/index.js'; +import * as dns from 'dns'; +import Papa from 'papaparse'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import { soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { CONSTANTS } from '../../../../config/index.js'; +import { FileDownloader } from '../../../../common/utils/fileDownloader.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; const dnsPromises = dns.promises; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class LegacyApi extends SfdxCommand { - public static title = "Check for legacy API use"; +export default class LegacyApi extends SfCommand { + public static title = 'Check for legacy API use'; public static description = `Checks if an org uses retired or someday retired API version\n @@ -29,166 +31,178 @@ See article below [![Handle Salesforce API versions Deprecation like a pro](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deprecated-api.jpg)](https://nicolas.vuillamy.fr/handle-salesforce-api-versions-deprecation-like-a-pro-335065f52238) -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-deprecated-api-calls/) and can output Grafana, Slack and MsTeams Notifications. +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-deprecated-api-calls/) and can output Grafana, Slack and MsTeams Notifications. `; public static examples = [ - "$ sfdx hardis:org:diagnose:legacyapi", - "$ sfdx hardis:org:diagnose:legacyapi -u hardis@myclient.com", - "$ sfdx hardis:org:diagnose:legacyapi --outputfile 'c:/path/to/folder/legacyapi.csv'", - "$ sfdx hardis:org:diagnose:legacyapi -u hardis@myclient.com --outputfile ./tmp/legacyapi.csv", + '$ sf hardis:org:diagnose:legacyapi', + '$ sf hardis:org:diagnose:legacyapi -u hardis@myclient.com', + "$ sf hardis:org:diagnose:legacyapi --outputfile 'c:/path/to/folder/legacyapi.csv'", + '$ sf hardis:org:diagnose:legacyapi -u hardis@myclient.com --outputfile ./tmp/legacyapi.csv', ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - eventtype: flags.string({ - char: "e", - default: "ApiTotalUsage", - description: "Type of EventLogFile event to analyze", + public static flags: any = { + eventtype: Flags.string({ + char: 'e', + default: 'ApiTotalUsage', + description: 'Type of EventLogFile event to analyze', }), - limit: flags.number({ - char: "l", + limit: Flags.integer({ + char: 'l', default: 999, - description: "Number of latest EventLogFile events to analyze", + description: 'Number of latest EventLogFile events to analyze', }), - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; - protected apexSCannerCodeUrl = "https://raw.githubusercontent.com/pozil/legacy-api-scanner/main/legacy-api-scanner.apex"; + protected apexSCannerCodeUrl = + 'https://raw.githubusercontent.com/pozil/legacy-api-scanner/main/legacy-api-scanner.apex'; protected legacyApiDescriptors = [ { - apiFamily: ["SOAP", "REST", "BULK_API"], + apiFamily: ['SOAP', 'REST', 'BULK_API'], minApiVersion: 1.0, maxApiVersion: 6.0, - severity: "ERROR", - deprecationRelease: "Summer 21 - retirement of 1 to 6", - errors: [], + severity: 'ERROR', + deprecationRelease: 'Summer 21 - retirement of 1 to 6', + errors: [] as any[], }, { - apiFamily: ["SOAP", "REST", "BULK_API"], + apiFamily: ['SOAP', 'REST', 'BULK_API'], minApiVersion: 7.0, maxApiVersion: 20.0, - severity: "ERROR", - deprecationRelease: "Summer 22 - retirement of 7 to 20", - errors: [], + severity: 'ERROR', + deprecationRelease: 'Summer 22 - retirement of 7 to 20', + errors: [] as any[], }, { - apiFamily: ["SOAP", "REST", "BULK_API"], + apiFamily: ['SOAP', 'REST', 'BULK_API'], minApiVersion: 21.0, maxApiVersion: 30.0, - severity: "WARNING", - deprecationRelease: "Summer 25 - retirement of 21 to 30", - errors: [], + severity: 'WARNING', + deprecationRelease: 'Summer 25 - retirement of 21 to 30', + errors: [] as any[], }, ]; - protected allErrors = []; - protected ipResultsSorted = []; + protected allErrors: any[] = []; + protected ipResultsSorted: any[] = []; protected outputFile; protected outputFilesRes: any = {}; - /* jscpd:ignore-end */ + private tempDir: string; public async run(): Promise { - this.debugMode = this.flags.debug || false; - return await this.runJsForce(); + const { flags } = await this.parse(LegacyApi); + this.debugMode = flags.debug || false; + return await this.runJsForce(flags); } // Refactoring of Philippe Ozil's apex script with JsForce queries - private async runJsForce() { - const eventType = this.flags.eventtype || "ApiTotalUsage"; - const limit = this.flags.limit || 999; - this.outputFile = this.flags.outputfile || null; + private async runJsForce(flags) { + const eventType = flags.eventtype || 'ApiTotalUsage'; + const limit = flags.limit || 999; + this.outputFile = flags.outputfile || null; - const limitConstraint = limit ? ` LIMIT ${limit}` : ""; - const conn = this.org.getConnection(); + const limitConstraint = limit ? ` LIMIT ${limit}` : ''; + const conn = flags['target-org'].getConnection(); + this.tempDir = await createTempDir(); // Get EventLogFile records with EventType = 'ApiTotalUsage' const logCountQuery = `SELECT COUNT() FROM EventLogFile WHERE EventType = '${eventType}'`; const logCountRes = await soqlQuery(logCountQuery, conn); if (logCountRes.totalSize === 0) { - uxLog(this, c.green(`Found no EventLogFile entry of type ${eventType}.`)); - uxLog(this, c.green("This indicates that no legacy APIs were called during the log retention window.")); + uxLog("success", this, c.green(`Found no EventLogFile entry of type ${eventType}.`)); + uxLog("success", this, c.green('This indicates that no legacy APIs were called during the log retention window.')); } else { - uxLog(this, c.grey("Found " + c.bold(logCountRes.totalSize) + ` ${eventType} EventLogFile entries.`)); + uxLog("log", this, c.grey('Found ' + c.bold(logCountRes.totalSize) + ` ${eventType} EventLogFile entries.`)); } if (logCountRes.totalSize > limit) { - uxLog(this, c.yellow(`There are more than ${limit} results, you may consider to increase limit using --limit argument`)); + uxLog( + "warning", + this, + c.yellow(`There are more than ${limit} results, you may consider to increase limit using --limit argument`) + ); } // Fetch EventLogFiles with ApiTotalUsage entries - const logCollectQuery = `SELECT LogFile FROM EventLogFile WHERE EventType = '${eventType}' ORDER BY CreatedDate DESC` + limitConstraint; - uxLog(this, c.grey("Query: " + c.italic(logCollectQuery))); + const logCollectQuery = + `SELECT LogFile FROM EventLogFile WHERE EventType = '${eventType}' ORDER BY LogDate DESC` + limitConstraint; const eventLogRes: any = await soqlQuery(logCollectQuery, conn); // Collect legacy api calls from logs - uxLog(this, c.grey("Calling org API to get CSV content of each EventLogFile record, then parse and analyze it...")); + uxLog("action", this, c.cyan('Calling org API to get CSV content of each EventLogFile record, then parse and analyze it...')); for (const eventLogFile of eventLogRes.records) { await this.collectDeprecatedApiCalls(eventLogFile.LogFile, conn); } - this.allErrors = [...this.legacyApiDescriptors[0].errors, ...this.legacyApiDescriptors[1].errors, ...this.legacyApiDescriptors[2].errors]; + this.allErrors = [ + ...this.legacyApiDescriptors[0].errors, + ...this.legacyApiDescriptors[1].errors, + ...this.legacyApiDescriptors[2].errors, + ]; // Display summary - uxLog(this, ""); - uxLog(this, c.cyan("Results:")); + uxLog("other", this, ''); + uxLog("action", this, c.cyan('Results:')); for (const descriptor of this.legacyApiDescriptors) { const colorMethod = - descriptor.severity === "ERROR" && descriptor.errors.length > 0 + descriptor.severity === 'ERROR' && descriptor.errors.length > 0 ? c.red - : descriptor.severity === "WARNING" && descriptor.errors.length > 0 + : descriptor.severity === 'WARNING' && descriptor.errors.length > 0 ? c.yellow : c.green; - uxLog(this, colorMethod(`- ${descriptor.deprecationRelease} : ${c.bold(descriptor.errors.length)}`)); + uxLog("other", this, colorMethod(`- ${descriptor.deprecationRelease} : ${c.bold(descriptor.errors.length)}`)); } - uxLog(this, ""); + uxLog("other", this, ''); // Build command result - let msg = "No deprecated API call has been found in ApiTotalUsage logs"; + let msg = 'No deprecated API call has been found in ApiTotalUsage logs'; let statusCode = 0; - if (this.legacyApiDescriptors.filter((descriptor) => descriptor.severity === "ERROR" && descriptor.errors.length > 0).length > 0) { - msg = "Found legacy API versions calls in logs"; + if ( + this.legacyApiDescriptors.filter((descriptor) => descriptor.severity === 'ERROR' && descriptor.errors.length > 0) + .length > 0 + ) { + msg = 'Found legacy API versions calls in logs'; statusCode = 1; - uxLog(this, c.red(c.bold(msg))); - } else if (this.legacyApiDescriptors.filter((descriptor) => descriptor.severity === "WARNING" && descriptor.errors.length > 0).length > 0) { - msg = "Found deprecated API versions calls in logs that will not be supported anymore in the future"; + uxLog("error", this, c.red(c.bold(msg))); + } else if ( + this.legacyApiDescriptors.filter( + (descriptor) => descriptor.severity === 'WARNING' && descriptor.errors.length > 0 + ).length > 0 + ) { + msg = 'Found deprecated API versions calls in logs that will not be supported anymore in the future'; statusCode = 0; - uxLog(this, c.yellow(c.bold(msg))); + uxLog("warning", this, c.yellow(c.bold(msg))); } else { - uxLog(this, c.green(msg)); + uxLog("success", this, c.green(msg)); } // Generate main CSV file - this.outputFile = await generateReportPath("legacy-api-calls", this.outputFile); - this.outputFilesRes = await generateCsvFile(this.allErrors, this.outputFile); + this.outputFile = await generateReportPath('legacy-api-calls', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.allErrors, this.outputFile, { fileTitle: 'Legacy API Calls' }); // Generate one summary file by severity - const outputFileIps = []; + const outputFileIps: any[] = []; for (const descriptor of this.legacyApiDescriptors) { const errors = descriptor.errors; if (errors.length > 0) { @@ -202,11 +216,11 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co // Debug or manage CSV file generation error if (this.debugMode || this.outputFile == null) { for (const descriptor of this.legacyApiDescriptors) { - uxLog(this, c.grey(`- ${descriptor.deprecationRelease} : ${JSON.stringify(descriptor.errors.length)}`)); + uxLog("log", this, c.grey(`- ${descriptor.deprecationRelease} : ${JSON.stringify(descriptor.errors.length)}`)); } } - let notifDetailText = ""; + let notifDetailText = ''; for (const descriptor of this.legacyApiDescriptors) { if (descriptor.errors.length > 0) { notifDetailText += `• ${descriptor.severity}: API version calls found in logs: ${descriptor.errors.length} (${descriptor.deprecationRelease})\n`; @@ -219,18 +233,18 @@ See article to solve issue before it's too late: • FR: https://leblog.hardis-group.com/portfolio/versions-dapi-salesforce-decommissionnees-que-faire/`; // Build notifications - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No deprecated Salesforce API versions are used in ${orgMarkdown}`; if (this.allErrors.length > 0) { - notifSeverity = "error"; + notifSeverity = 'error'; notifText = `${this.allErrors.length} deprecated Salesforce API versions are used in ${orgMarkdown}`; } // Post notifications - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "LEGACY_API", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'LEGACY_API', text: notifText, attachments: [{ text: notifDetailText }], buttons: notifButtons, @@ -246,7 +260,7 @@ See article to solve issue before it's too late: }, }); - if ((this.argv || []).includes("legacyapi")) { + if ((this.argv || []).includes('legacyapi')) { process.exitCode = statusCode; } @@ -262,40 +276,64 @@ See article to solve issue before it's too late: // GET csv log file and check for legacy API calls within private async collectDeprecatedApiCalls(logFileUrl: string, conn: any) { - uxLog(this, c.grey(`- Request info for ${logFileUrl} ...`)); - const logEntries = await conn.request(logFileUrl); - uxLog(this, c.grey(`-- Processing ${logEntries.length} returned entries...`)); - const severityIconError = getSeverityIcon("error"); - const severityIconWarning = getSeverityIcon("warning"); - const severityIconInfo = getSeverityIcon("info"); - for (const logEntry of logEntries) { - const apiVersion = logEntry.API_VERSION ? parseFloat(logEntry.API_VERSION) : parseFloat("999.0"); - // const apiType = logEntry.API_TYPE || null ; - const apiFamily = logEntry.API_FAMILY || null; - - for (const legacyApiDescriptor of this.legacyApiDescriptors) { - if ( - legacyApiDescriptor.apiFamily.includes(apiFamily) && - legacyApiDescriptor.minApiVersion <= apiVersion && - legacyApiDescriptor.maxApiVersion >= apiVersion - ) { - logEntry.SFDX_HARDIS_DEPRECATION_RELEASE = legacyApiDescriptor.deprecationRelease; - logEntry.SFDX_HARDIS_SEVERITY = legacyApiDescriptor.severity; - if (legacyApiDescriptor.severity === "ERROR") { - logEntry.severity = "error"; - logEntry.severityIcon = severityIconError; - } else if (legacyApiDescriptor.severity === "WARNING") { - logEntry.severity = "warning"; - logEntry.severityIcon = severityIconWarning; - } else { - // severity === 'INFO' - logEntry.severity = "info"; - logEntry.severityIcon = severityIconInfo; - } - legacyApiDescriptor.errors.push(logEntry); - break; - } - } + // Load icons + const severityIconError = getSeverityIcon('error'); + const severityIconWarning = getSeverityIcon('warning'); + const severityIconInfo = getSeverityIcon('info'); + + // Download file as stream, and process chuck by chuck + uxLog("log", this, c.grey(`- processing ${logFileUrl}...`)); + const fetchUrl = `${conn.instanceUrl}${logFileUrl}`; + const outputFile = path.join(this.tempDir, Math.random().toString(36).substring(7) + ".csv"); + const downloadResult = await new FileDownloader(fetchUrl, { conn: conn, outputFile: outputFile }).download(); + if (downloadResult.success) { + uxLog("log", this, c.grey(`-- parsing downloaded CSV from ${outputFile} and check for deprecated calls...`)); + const outputFileStream = fs.createReadStream(outputFile, { encoding: 'utf8' }); + await new Promise((resolve, reject) => { + Papa.parse(outputFileStream, { + header: true, + worker: true, + chunk: (results) => { + // Look in check the entries that match a deprecation description + for (const logEntry of results.data as any[]) { + const apiVersion = logEntry.API_VERSION ? parseFloat(logEntry.API_VERSION) : parseFloat('999.0'); + const apiFamily = logEntry.API_FAMILY || null; + for (const legacyApiDescriptor of this.legacyApiDescriptors) { + if ( + legacyApiDescriptor.apiFamily.includes(apiFamily) && + legacyApiDescriptor.minApiVersion <= apiVersion && + legacyApiDescriptor.maxApiVersion >= apiVersion + ) { + logEntry.SFDX_HARDIS_DEPRECATION_RELEASE = legacyApiDescriptor.deprecationRelease; + logEntry.SFDX_HARDIS_SEVERITY = legacyApiDescriptor.severity; + if (legacyApiDescriptor.severity === 'ERROR') { + logEntry.severity = 'error'; + logEntry.severityIcon = severityIconError; + } else if (legacyApiDescriptor.severity === 'WARNING') { + logEntry.severity = 'warning'; + logEntry.severityIcon = severityIconWarning; + } else { + // severity === 'INFO' + logEntry.severity = 'info'; + logEntry.severityIcon = severityIconInfo; + } + legacyApiDescriptor.errors.push(logEntry); + break; + } + } + } + }, + complete: function () { + resolve(true); + }, + error: function (error) { + reject(error); + }, + }); + }); + } + else { + uxLog("warning", this, c.yellow(`Warning: Unable to process logs of ${logFileUrl}`)); } } @@ -310,7 +348,7 @@ See article to solve issue before it's too late: } } // Try to get hostname for ips - const ipResults = []; + const ipResults: any[] = []; for (const ip of Object.keys(ipList)) { const ipInfo = ipList[ip]; let hostname; @@ -318,24 +356,24 @@ See article to solve issue before it's too late: hostname = await dnsPromises.reverse(ip); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { - hostname = "unknown"; + hostname = 'unknown'; } const ipResult = { CLIENT_IP: ip, CLIENT_HOSTNAME: hostname, SFDX_HARDIS_COUNT: ipInfo.count }; ipResults.push(ipResult); } this.ipResultsSorted = sortArray(ipResults, { - by: ["SFDX_HARDIS_COUNT"], - order: ["desc"], + by: ['SFDX_HARDIS_COUNT'], + order: ['desc'], }); // Write output CSV with client api info - const outputFileIps = this.outputFile.endsWith(".csv") - ? this.outputFile.replace(".csv", ".api-clients-" + severity + ".csv") - : this.outputFile + "api-clients-" + severity + ".csv"; - const outputFileIpsRes = await generateCsvFile(this.ipResultsSorted, outputFileIps); + const outputFileIps = this.outputFile.endsWith('.csv') + ? this.outputFile.replace('.csv', '.api-clients-' + severity + '.csv') + : this.outputFile + 'api-clients-' + severity + '.csv'; + const outputFileIpsRes = await generateCsvFile(this.ipResultsSorted, outputFileIps, { fileTitle: `Legacy API Clients - ${severity}` }); if (outputFileIpsRes.xlsxFile) { this.outputFilesRes.xlsxFile2 = outputFileIpsRes.xlsxFile; } - uxLog(this, c.italic(c.cyan(`Please see info about ${severity} API callers in ${c.bold(outputFileIps)}`))); + uxLog("other", this, c.italic(c.cyan(`Please see info about ${severity} API callers in ${c.bold(outputFileIps)}`))); return outputFileIps; } } diff --git a/src/commands/hardis/org/diagnose/licenses.ts b/src/commands/hardis/org/diagnose/licenses.ts index b7c181df7..1a8a39308 100644 --- a/src/commands/hardis/org/diagnose/licenses.ts +++ b/src/commands/hardis/org/diagnose/licenses.ts @@ -1,57 +1,81 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { uxLog } from "../../../../common/utils"; -import { soqlQuery } from "../../../../common/utils/apiUtils"; -import { generateCsvFile, generateReportPath } from "../../../../common/utils/filesUtils"; -import { NotifProvider } from "../../../../common/notifProvider"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { sortCrossPlatform, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { NotifProvider } from '../../../../common/notifProvider/index.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DiagnoseUnusedUsers extends SfCommand { + public static title = 'List licenses subscribed and used in a Salesforce org'; -export default class DiagnoseUnusedUsers extends SfdxCommand { - public static title = "List licenses subscribed and used in a Salesforce org"; + public static description = ` +**Lists and analyzes User Licenses and Permission Set Licenses subscribed and used in a Salesforce org.** - public static description = `Mostly used for monitoring (Grafana) but you can also use it manually :)`; +This command provides a comprehensive overview of your Salesforce license consumption. It's particularly useful for: - public static examples = ["$ sfdx hardis:org:diagnose:licenses"]; +- **License Management:** Understanding which licenses are active, how many are available, and how many are being used. +- **Cost Optimization:** Identifying unused or underutilized licenses that could be reallocated or decommissioned. +- **Compliance:** Ensuring that your organization is compliant with Salesforce licensing agreements. +- **Monitoring:** Tracking license usage trends over time. + +Key functionalities: + +- **User License Details:** Retrieves information about standard and custom User Licenses, including \`MasterLabel\`, \`Name\`, \`TotalLicenses\`, and \`UsedLicenses\`. +- **Permission Set License Details:** Retrieves information about Permission Set Licenses, including \`MasterLabel\`, \`PermissionSetLicenseKey\`, \`TotalLicenses\`, and \`UsedLicenses\`. +- **Used Licenses Filter:** The \`--usedonly\` flag allows you to filter the report to show only licenses that have at least one \`UsedLicenses\` count greater than zero. +- **CSV Report Generation:** Generates a CSV file containing all the retrieved license information, suitable for detailed analysis. +- **Notifications:** Sends notifications to configured channels (e.g., Grafana, Slack, MS Teams) with a summary of license usage, including lists of active and used licenses. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Queries:** It executes SOQL queries against the \`UserLicense\` and \`PermissionSetLicense\` objects in Salesforce to retrieve license data. +- **Data Transformation:** It processes the query results, reformatting the data to be more readable and consistent for reporting purposes (e.g., removing \`Id\` and \`attributes\`, renaming \`PermissionSetLicenseKey\` to \`Name\`). +- **Data Aggregation:** It aggregates license information, creating a \`licensesByKey\` object for quick lookups and a \`usedLicenses\` array for a concise list of actively used licenses. +- **Report Generation:** It uses \`generateCsvFile\` to create the CSV report of license data. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **User Feedback:** Provides clear messages to the user about the license extraction process and the used licenses. +
+`; + + public static examples = ['$ sf hardis:org:diagnose:licenses']; //Comment default values to test the prompts - protected static flagsConfig = { - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - usedonly: flags.boolean({ - char: "u", + usedonly: Flags.boolean({ + char: 'u', default: false, - description: "Filter to have only used licenses", + description: 'Filter to have only used licenses', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected usedOnly = false; protected debugMode = false; @@ -63,16 +87,17 @@ export default class DiagnoseUnusedUsers extends SfdxCommand { /* jscpd:ignore-end */ public async run(): Promise { - this.usedOnly = this.flags.usedonly || false; - this.debugMode = this.flags.debug || false; - this.outputFile = this.flags.outputfile || null; + const { flags } = await this.parse(DiagnoseUnusedUsers); + this.usedOnly = flags.usedonly || false; + this.debugMode = flags.debug || false; + this.outputFile = flags.outputfile || null; // Retrieve the list of users who haven't logged in for a while - const conn = this.org.getConnection(); - uxLog(this, c.cyan(`Extracting Licenses from ${conn.instanceUrl} ...` + this.usedOnly ? "(used only)" : "")); + const conn = flags['target-org'].getConnection(); + uxLog("action", this, c.cyan(`Extracting Licenses from ${conn.instanceUrl} ...` + this.usedOnly ? '(used only)' : '')); const licensesByKey = {}; - const usedLicenses = []; + const usedLicenses: any[] = []; // Query User Licenses const userLicenseQuery = @@ -85,7 +110,7 @@ export default class DiagnoseUnusedUsers extends SfdxCommand { const userLicenseInfo = Object.assign({}, userLicense); delete userLicenseInfo.Id; delete userLicenseInfo.attributes; - userLicenseInfo.type = "UserLicense"; + userLicenseInfo.type = 'UserLicense'; licensesByKey[userLicenseInfo.MasterLabel] = userLicenseInfo.TotalLicenses; if (userLicenseInfo.UsedLicenses > 0) { usedLicenses.push(userLicenseInfo.MasterLabel); @@ -110,7 +135,7 @@ export default class DiagnoseUnusedUsers extends SfdxCommand { delete pslInfo.Id; delete pslInfo.attributes; delete pslInfo.PermissionSetLicenseKey; - pslInfo.type = "PermissionSetLicense"; + pslInfo.type = 'PermissionSetLicense'; licensesByKey[pslInfo.MasterLabel] = pslInfo.TotalLicenses; if (pslInfo.UsedLicenses > 0) { usedLicenses.push(pslInfo.MasterLabel); @@ -119,19 +144,20 @@ export default class DiagnoseUnusedUsers extends SfdxCommand { }); this.licenses.push(...pslLicenses); - usedLicenses.sort(); - console.table(this.licenses); - uxLog(this, c.cyan("Used licenses: " + usedLicenses.join(", "))); + sortCrossPlatform(usedLicenses); + uxLog("action", this, c.cyan('Used licenses: ' + usedLicenses.join(', '))); + uxLogTable(this, this.licenses); + // Generate output CSV file - this.outputFile = await generateReportPath("licenses", this.outputFile); - this.outputFilesRes = await generateCsvFile(this.licenses, this.outputFile); - - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "LICENSES", - text: "", - severity: "log", + this.outputFile = await generateReportPath('licenses', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.licenses, this.outputFile, { fileTitle: 'Unused Licenses' }); + + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'LICENSES', + text: '', + severity: 'log', attachedFiles: this.outputFilesRes.xlsxFile ? [this.outputFilesRes.xlsxFile] : [], logElements: this.licenses, data: { diff --git a/src/commands/hardis/org/diagnose/releaseupdates.ts b/src/commands/hardis/org/diagnose/releaseupdates.ts new file mode 100644 index 000000000..240f8435d --- /dev/null +++ b/src/commands/hardis/org/diagnose/releaseupdates.ts @@ -0,0 +1,142 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { soqlQueryTooling } from '../../../../common/utils/apiUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import moment from 'moment'; +import { CONSTANTS } from '../../../../config/index.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DiagnoseReleaseUpdates extends SfCommand { + public static title = 'Check Release Updates of an org'; + + public static description = `Export Release Updates into a CSV file with selected criteria, and highlight Release Updates that should be checked. + +Before publishing **Breaking Changes** ❌, Salesforce announce them in the setup menu [**Release Updates**](https://help.salesforce.com/s/articleView?id=sf.release_updates.htm&type=5) + +⚠️ Some of them are very important, because if you don't make the related upgrades in time (ex: before Winter 25) , your production org can crash ! + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-release-updates/) and can output Grafana, Slack and MsTeams Notifications. +`; + + public static examples = [ + '$ sf hardis:org:diagnose:releaseupdates', + ]; + + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + public static requiresProject = false; + + protected debugMode = false; + + protected releaseUpdatesRecords: any[] = []; + protected outputFile; + protected outputFilesRes: any = {}; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(DiagnoseReleaseUpdates); + this.debugMode = flags.debug || false; + this.outputFile = flags.outputfile || null; + const conn = flags['target-org'].getConnection(); + uxLog("action", this, c.cyan(`Extracting Release Updates and checks to perform in ${conn.instanceUrl} ...`)); + + // Fetch ReleaseUpdate records + const releaseUpdatesQuery = + `SELECT StepStage,Status,Category,Title,DueDate,Description,Release,ReleaseLabel,ReleaseDate,ApiVersion,DurableId,HasNewSteps,IsReleased,SupportsRevoke,DeveloperName ` + + `FROM ReleaseUpdate ` + + `WHERE StepStage IN ('Upcoming','OverDue') AND Status IN ('Invocable','Revocable','Nascent','Invoked','Info') AND DueDate >= LAST_N_DAYS:60 ` + + `ORDER BY DueDate ASC`; + const queryRes = await soqlQueryTooling(releaseUpdatesQuery, conn); + const severityIconWarning = getSeverityIcon('warning'); + const severityIconError = getSeverityIcon('error'); + this.releaseUpdatesRecords = queryRes.records.map((record) => { + delete record.attributes + record.severityIcon = record.StepStage === 'OverDue' ? severityIconError : severityIconWarning; + return record; + }); + + // Process result + if (this.releaseUpdatesRecords.length > 0) { + // Generate output CSV file + this.outputFile = await generateReportPath('release-updates', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.releaseUpdatesRecords, this.outputFile, { fileTitle: 'Release Updates to Check' }); + + // Build notification + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); + const notifButtons = await getNotificationButtons(); + const notifSeverity: NotifSeverity = 'warning'; + const notifText = `${this.releaseUpdatesRecords.length} Release Updates to check have been found in ${orgMarkdown}` + let notifDetailText = ''; + for (const releaseUpdate of this.releaseUpdatesRecords) { + notifDetailText += `• *${releaseUpdate.Title}* (${releaseUpdate.StepStage},${releaseUpdate.Status},${releaseUpdate.Category}), due for ${moment(releaseUpdate.DueDate).format("ll")}\n`; + } + const notifAttachments = [{ text: notifDetailText }]; + // Post notif + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'RELEASE_UPDATES', + text: notifText, + attachments: notifAttachments, + buttons: notifButtons, + severity: notifSeverity, + attachedFiles: this.outputFilesRes.xlsxFile ? [this.outputFilesRes.xlsxFile] : [], + logElements: this.releaseUpdatesRecords, + data: { metric: this.releaseUpdatesRecords.length }, + metrics: { + ReleaseUpdates: this.releaseUpdatesRecords.length, + }, + }); + + // Display output + const releaseUpdatesLight = this.releaseUpdatesRecords.map(releaseUpdate => { + return { + Title: releaseUpdate.Title, + StepStage: releaseUpdate.StepStage, + Status: releaseUpdate.Status, + Category: releaseUpdate.Category, + DueDate: moment(releaseUpdate.DueDate).format('ll') + } + }) + + uxLog("action", this, c.cyan(notifText)); + uxLogTable(this, releaseUpdatesLight); + } + else { + uxLog("success", this, c.green("No release updates has been found")); + } + + // Return an object to be displayed with --json + return { + status: this.releaseUpdatesRecords.length > 0 ? 1 : 0, + suspectRecords: this.releaseUpdatesRecords, + csvLogFile: this.outputFile, + }; + } +} diff --git a/src/commands/hardis/org/diagnose/unsecure-connected-apps.ts b/src/commands/hardis/org/diagnose/unsecure-connected-apps.ts new file mode 100644 index 000000000..0d47800fe --- /dev/null +++ b/src/commands/hardis/org/diagnose/unsecure-connected-apps.ts @@ -0,0 +1,254 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { bulkQuery, soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { getNotificationButtons, getOrgMarkdown } from '../../../../common/utils/notifUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; +import { CONSTANTS } from '../../../../config/index.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; +import sortArray from 'sort-array'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class UnsecuredConnectedApps extends SfCommand { + public static title = 'Detect Unsecured Connected Apps'; + + public static description = ` +## Command Behavior + +**Detects unsecured Connected Apps in a Salesforce org and generates detailed reports for security analysis.** + +This command is a critical security diagnostic tool that helps administrators identify Connected Apps that may pose security risks due to improper configuration. It provides comprehensive analysis of OAuth tokens and Connected App security settings to ensure proper access control. + +Key functionalities: + +- **OAuth Token Analysis:** Queries all OAuth tokens in the org using SOQL to retrieve comprehensive token information including app names, users, authorization status, and usage statistics. +- **Security Status Assessment:** Evaluates each Connected App's security configuration by checking the \`IsUsingAdminAuthorization\` flag to determine if admin pre-approval is required. +- **Unsecured App Detection:** Identifies Connected Apps that allow users to authorize themselves without admin approval, which can pose security risks. +- **Detailed Reporting:** Generates two comprehensive CSV reports: + - **OAuth Tokens Report:** Lists all OAuth tokens with security status, user information, and usage data + - **Connected Apps Summary:** Aggregates unsecured Connected Apps with counts of associated OAuth tokens +- **Visual Indicators:** Uses status icons (❌ for unsecured, ✅ for secured) to provide immediate visual feedback on security status. +- **Security Recommendations:** Provides actionable guidance on how to secure Connected Apps through proper configuration. +- **Notifications:** Sends alerts to configured channels (Grafana, Slack, MS Teams) with security findings and attached reports. + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-org-security/) and can output Grafana, Slack and MsTeams Notifications. + + + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Query Execution:** Executes a comprehensive SOQL query on the \`OauthToken\` object, joining with \`AppMenuItem\` and \`User\` objects to gather complete security context. +- **Security Analysis Logic:** Analyzes the \`AppMenuItem.IsUsingAdminAuthorization\` field to determine if a Connected App requires admin pre-approval for user authorization. +- **Data Transformation:** Processes raw SOQL results to add security status indicators and reorganize data for optimal reporting and analysis. +- **Aggregation Processing:** Groups OAuth tokens by Connected App name to provide summary statistics and identify the most problematic applications. +- **Report Generation:** Uses \`generateCsvFile\` to create structured CSV reports with proper formatting and metadata for easy analysis and sharing. +- **Notification Integration:** Integrates with the \`NotifProvider\` to send security alerts with detailed metrics, including the number of unsecured Connected Apps and associated OAuth tokens. +- **File Management:** Generates multiple output formats (CSV, XLSX) and manages file paths using \`generateReportPath\` for consistent report organization. +- **Connection Management:** Uses \`setConnectionVariables\` to ensure proper authentication context for notification providers that require org connection details. +
+`; + + public static examples = [ + '$ sf hardis:org:diagnose:unsecure-connected-apps', + ]; + + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + public static requiresProject = false; + + protected debugMode = false; + + protected connectedAppResults: any[] = []; + protected outputFile; + protected outputFilesRes: any = {}; + protected outputFileConnectedApps; + protected outputFilesResConnectedApps: any = {}; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(UnsecuredConnectedApps); + this.debugMode = flags.debug || false; + this.outputFile = flags.outputfile || null; + const conn = flags['target-org'].getConnection(); + + // Collect all Connected Apps + uxLog("action", this, c.cyan(`Extracting all OAuth Tokens from ${conn.instanceUrl} ...`)); + const tokensCountQuery = `SELECT count() FROM OauthToken`; + const tokensCountQueryRes = await soqlQuery(tokensCountQuery, conn); + const totalTokens = tokensCountQueryRes.totalSize; + uxLog("log", this, `${totalTokens} OAuth Tokens found.`); + + const allOAuthTokenQuery = + `SELECT AppName, AppMenuItem.IsUsingAdminAuthorization, LastUsedDate, CreatedDate, User.Name , User.Profile.Name, UseCount FROM OAuthToken ORDER BY CreatedDate ASC`; + const allOAuthTokenQueryRes = await bulkQuery(allOAuthTokenQuery, conn); + const allOAuthTokens = allOAuthTokenQueryRes.records; + + // If not all OAuth token has been found, it means SF hard limit of 2500 OAuth Tokens has been reached + // Recursively get remaining tokens using latest found Id as constraint + if (allOAuthTokens.length < totalTokens) { + uxLog("warning", this, c.yellow(`Salesforce API limit of 2500 OAuth Tokens reached. We will need to re-query to get all tokens...`)); + let lastCreatedDate = allOAuthTokens.length > 0 ? allOAuthTokens[allOAuthTokens.length - 1].CreatedDate : null; + while (lastCreatedDate != null) { + const remainingTokensQuery = `SELECT AppName, AppMenuItem.IsUsingAdminAuthorization, LastUsedDate, CreatedDate, User.Name , User.Profile.Name, UseCount FROM OAuthToken WHERE CreatedDate > ${lastCreatedDate} ORDER BY CreatedDate ASC`; + const remainingTokensQueryRes = await bulkQuery(remainingTokensQuery, conn); + const remainingTokens = remainingTokensQueryRes.records; + if (remainingTokens.length > 0) { + allOAuthTokens.push(...remainingTokens); + lastCreatedDate = remainingTokens[remainingTokens.length - 1].CreatedDate; + uxLog("log", this, `${allOAuthTokens.length} / ${totalTokens} OAuth Tokens retrieved...`); + if (allOAuthTokens.length >= totalTokens) { + lastCreatedDate = null; + } + } else { + lastCreatedDate = null; + } + } + } + uxLog("log", this, `${allOAuthTokens.length} OAuth Tokens retrieved.`); + sortArray(allOAuthTokens, { by: 'AppName' }); + + const allOAuthTokensWithStatus = allOAuthTokens.map(app => { + const adminPreApproved = app["AppMenuItem.IsUsingAdminAuthorization"] ?? false; + const appResult = { + AppName: app.AppName, + "Status": adminPreApproved ? '✅ Secured' : '❌ Unsecured', + "Admin Pre-Approved": adminPreApproved ? 'Yes' : 'No', + "User": app["User.Name"] ? app["User.Name"] : 'N/A', + "User Profile": app["User.Profile.Name"] ? app["User.Profile.Name"] : 'N/A', + "Last Used Date": app.LastUsedDate ? new Date(app.LastUsedDate).toISOString().split('T')[0] : 'N/A', + "Created Date": app.CreatedDate ? new Date(app.CreatedDate).toISOString().split('T')[0] : 'N/A', + "Use Count": app.UseCount ? app.UseCount : 0, + } + return appResult; + }); + + // Generate output CSV file + this.outputFile = await generateReportPath('unsecured-oauth-tokens', this.outputFile); + this.outputFilesRes = await generateCsvFile(allOAuthTokensWithStatus, this.outputFile, { fileTitle: "Unsecured OAuth Tokens" }); + + const unsecuredOAuthTokens = allOAuthTokensWithStatus.filter(app => app.Status === '❌ Unsecured'); + + // Display results + uxLog("action", this, `${unsecuredOAuthTokens.length} unsecured OAuth Tokens found.`); + uxLogTable(this, unsecuredOAuthTokens); + + const uniqueUnsecuredAppNamesAndTokenNumber: { [key: string]: number } = {}; + const uniqueUnsecuredAppNamesAndProfiles: { [key: string]: Set } = {}; + const uniqueUnsecuredAppNamesAndLastUsageDate: { [key: string]: string } = {}; + for (const app of unsecuredOAuthTokens) { + if (uniqueUnsecuredAppNamesAndTokenNumber[app.AppName]) { + uniqueUnsecuredAppNamesAndTokenNumber[app.AppName]++; + } + else { + uniqueUnsecuredAppNamesAndTokenNumber[app.AppName] = 1; + } + if (!uniqueUnsecuredAppNamesAndProfiles[app.AppName]) { + uniqueUnsecuredAppNamesAndProfiles[app.AppName] = new Set(); + } + if (app["User Profile"] && app["User Profile"] !== 'N/A') { + uniqueUnsecuredAppNamesAndProfiles[app.AppName].add(app["User Profile"]); + } + if (app["Last Used Date"] && app["Last Used Date"] !== 'N/A') { + const latestUsageDate = uniqueUnsecuredAppNamesAndLastUsageDate[app.AppName]; + if (!latestUsageDate || new Date(app["Last Used Date"]) > new Date(latestUsageDate)) { + uniqueUnsecuredAppNamesAndLastUsageDate[app.AppName] = app["Last Used Date"]; + } + } + } + const uniqueUnsecuredAppNames = Object.keys(uniqueUnsecuredAppNamesAndTokenNumber); + const uniqueUnsecureConnectedAppsWithTokens = uniqueUnsecuredAppNames.map(appName => { + return { + AppName: appName, + NumberOfUnsecuredOAuthTokens: uniqueUnsecuredAppNamesAndTokenNumber[appName], + LatestUsageDate: uniqueUnsecuredAppNamesAndLastUsageDate[appName] || "N/A", + ProfilesOfUsersUsingIt: Array.from(uniqueUnsecuredAppNamesAndProfiles[appName] || []).sort().join(', '), + } + }); + this.outputFileConnectedApps = await generateReportPath('unsecured-connected-apps', this.outputFileConnectedApps); + this.outputFilesResConnectedApps = await generateCsvFile(uniqueUnsecureConnectedAppsWithTokens, this.outputFileConnectedApps, { fileTitle: "Unsecured Connected Apps" }); + if (uniqueUnsecuredAppNames.length > 0) { + uxLog("action", this, c.cyan(`${uniqueUnsecuredAppNames.length} unsecured Connected Apps found.`)); + uxLogTable(this, uniqueUnsecureConnectedAppsWithTokens); + uxLog("warning", this, `You need to either block or secure these Connected Apps. +To block a connected app, click on "Block" +To secure a connected app: + - Install it if not installed + - Click on "Manage Policies" + - Set "Admin Users are pre-approved" then save + - Select profiles/permission sets allowed to access the connected app + - Users will then need to authenticate again`); + } + + // Build notification + const numberWarnings = uniqueUnsecuredAppNames.length; + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); + const notifButtons = await getNotificationButtons(); + const notifSeverity: NotifSeverity = numberWarnings > 0 ? 'warning' : 'log'; + const notifText = `${numberWarnings} Unsecured connected Apps have been found in ${orgMarkdown}` + let notifDetailText = ''; + for (const connectedApp of uniqueUnsecureConnectedAppsWithTokens) { + notifDetailText += `• *${connectedApp.AppName}* (${connectedApp.NumberOfUnsecuredOAuthTokens} OAuth Tokens)\n`; + } + const notifAttachments = [{ text: notifDetailText }]; + // Post notif + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'UNSECURED_CONNECTED_APPS', + text: notifText, + attachments: notifAttachments, + buttons: notifButtons, + severity: notifSeverity, + attachedFiles: this.outputFileConnectedApps.xlsxFile ? [this.outputFileConnectedApps.xlsxFile] : [], + logElements: uniqueUnsecureConnectedAppsWithTokens, + data: { metric: numberWarnings }, + metrics: { + UnsecuredConnectedApps: numberWarnings, + }, + }); + + // Display link to Setup UI if there are issues + if (numberWarnings > 0) { + const OAuthUsageSetupUrl = `${conn.instanceUrl}/lightning/setup/ConnectedAppsUsage/home`; + WebSocketClient.sendReportFileMessage(OAuthUsageSetupUrl, 'Review OAuth Connected Apps', "actionUrl"); + } + + if ((this.argv || []).includes('unsecure-connected-apps')) { + process.exitCode = numberWarnings > 0 ? 1 : 0; + } + + return { + status: numberWarnings === 0 ? 'success' : 'warning', + allOAuthTokensWithStatus: allOAuthTokensWithStatus, + unsecuredOAuthTokens: unsecuredOAuthTokens, + unsecuredConnectedApps: uniqueUnsecuredAppNames as AnyJson[], + } + } +} \ No newline at end of file diff --git a/src/commands/hardis/org/diagnose/unused-apex-classes.ts b/src/commands/hardis/org/diagnose/unused-apex-classes.ts new file mode 100644 index 000000000..d324db29d --- /dev/null +++ b/src/commands/hardis/org/diagnose/unused-apex-classes.ts @@ -0,0 +1,314 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { git, isGitRepo, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { soqlQuery, soqlQueryTooling } from '../../../../common/utils/apiUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import { CONSTANTS } from '../../../../config/index.js'; +import moment from 'moment'; +import sortArray from 'sort-array'; +import { MetadataUtils } from '../../../../common/metadata-utils/index.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DiagnoseUnusedApexClasses extends SfCommand { + public static title = 'Detect unused Apex classes in an org'; + + public static description = `List all async Apex classes (Batch,Queueable,Schedulable) that has not been called for more than 365 days. + +The result class list probably can be removed from the project, and that will improve your test classes performances :) + +The number of unused day is overridable using --days option. + +The command uses queries on AsyncApexJob and CronTrigger technical tables to build the result. + +Apex Classes CreatedBy and CreatedOn fields are calculated from MIN(date from git, date from org) + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-unused-apex-classes/) and can output Grafana, Slack and MsTeams Notifications. + +![](${CONSTANTS.DOC_URL_ROOT}/assets/images/screenshot-monitoring-unused-apex-grafana.jpg) +`; + + public static examples = [ + '$ sf hardis:org:diagnose:unused-apex-classes', + '$ sf hardis:org:diagnose:unused-apex-classes --days 700' + ]; + + //Comment default values to test the prompts + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + days: Flags.integer({ + char: 't', + description: + 'Extracts the users that have been inactive for the amount of days specified. In CI, default is 180 days', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = false; + + protected debugMode = false; + protected outputFile; + protected outputFilesRes: any = {}; + protected lastNdays: number; + protected asyncClassList: any[] = []; + protected unusedNumber: number = 0; + protected statusCode = 0; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(DiagnoseUnusedApexClasses); + this.debugMode = flags.debug || false; + this.outputFile = flags.outputfile || null; + this.lastNdays = Number(flags.days || 365); + + // Calculate lastNdays to use + const conn = flags['target-org'].getConnection(); + + // Retrieve the list of Apex class that are BatchApex, ScheduledApex or Queueable + await this.listAsyncApexClasses(conn); + + // Find latest AsyncJob for each class + const latestJobs = await this.findLatestApexJobsForEachClass(conn); + + const jobTriggers = await this.findCronTriggers(conn); + + // Aggregate results + this.matchClassesWithJobs(latestJobs, jobTriggers); + + // Build result text + const summary = this.displaySummaryOutput(); + + // Generate output CSV file + this.outputFile = await generateReportPath('unused-apex-classes', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.asyncClassList, this.outputFile, { fileTitle: 'Unused Apex Classes' }); + + // Exit code + if ((this.argv || []).includes('unused-apex-classes')) { + process.exitCode = this.statusCode; + } + + // Manage notifications + await this.manageNotifications(); + + // Return an object to be displayed with --json + return { + status: this.statusCode, + summary: summary, + asyncClassList: this.asyncClassList, + csvLogFile: this.outputFile, + xlsxLogFile: this.outputFilesRes.xlsxFile, + }; + } + + private async findCronTriggers(conn: any) { + uxLog("action", this, c.cyan(`Retrieving CronTriggers from org ${conn.instanceUrl}...`)); + const cronTriggersQuery = `SELECT Id, CronJobDetail.JobType, CronJobDetail.Name, State, NextFireTime FROM CronTrigger WHERE State IN ('WAITING', 'ACQUIRED', 'EXECUTING', 'PAUSED', 'BLOCKED', 'PAUSED_BLOCKED')`; + const cronTriggersResult = await soqlQuery(cronTriggersQuery, conn); + return cronTriggersResult.records; + } + + private displaySummaryOutput() { + uxLog("action", this, c.cyan(`Found ${this.unusedNumber} async Apex classes that might not be used anymore:`)); + let summary = `All async apex classes have been called during the latest ${this.lastNdays} days.`; + if (this.unusedNumber > 0) { + summary = `${this.unusedNumber} apex classes might not be used anymore. +Note: Salesforce does not provide all info to be 100% sure that a class is not used, so double-check before deleting them :)` + ; + const summaryClasses = this.asyncClassList.map(apexClass => { + return { + severity: `${apexClass.severityIcon}`, + name: apexClass.Name, + AsyncType: apexClass.AsyncType, + latestJobDate: apexClass.latestJobDate ? moment(apexClass.latestJobDate).format('YYYY-MM-DD hh:mm') : "Not found", + latestJobRunDays: apexClass.latestJobRunDays, + nextJobDate: apexClass.nextJobDate ? moment(apexClass.nextJobDate).format('YYYY-MM-DD hh:mm') : "None", + queued: apexClass.queued, + classCreatedOn: moment(apexClass.ClassCreatedDate).format('YYYY-MM-DD'), + classCreatedBy: apexClass.ClassCreatedBy + }; + }); + uxLogTable(this, summaryClasses); + } + + if (this.unusedNumber > 0) { + uxLog("warning", this, c.yellow(summary)); + } else { + uxLog("success", this, c.green(summary)); + } + return summary; + } + + private matchClassesWithJobs(latestJobsAll: any[], cronTriggers: any[]) { + uxLog("action", this, c.cyan(`Matching async Apex classes with latest jobs and cron triggers...`)); + this.asyncClassList = this.asyncClassList.map(apexClass => { + const futureJobs = cronTriggers.filter(cronJob => apexClass.Name === cronJob.CronJobDetail.Name); + apexClass.nextJobDate = ""; + apexClass.queued = false; + if (futureJobs.length > 0) { + apexClass.nextJobDate = futureJobs[0].NextFireTime; + } + const relatedJobs = latestJobsAll.filter(job => job.ApexClassId === apexClass.Id); + if (relatedJobs.length === 0) { + apexClass.latestJobDate = ""; + apexClass.latestJobRunDays = 99999; + if (apexClass.nextJobDate === "") { + apexClass.severity = "warning"; + this.unusedNumber++; + } + } + else { + const queuedJobs = relatedJobs.filter(job => job.Status === "Queued"); + if (queuedJobs.length > 0) { + apexClass.queued = true; + } + apexClass.latestJobDate = relatedJobs[0].expr0; + const today = moment(); + apexClass.latestJobRunDays = today.diff(apexClass.latestJobDate, 'days'); + if (apexClass.latestJobRunDays > this.lastNdays && apexClass.nextJobDate === "" && apexClass.queued === false) { + apexClass.severity = "warning"; + this.unusedNumber++; + } + else { + apexClass.severity = "info"; + } + } + apexClass.severityIcon = getSeverityIcon(apexClass.severity); + delete apexClass.Id; + return apexClass; + }); + this.asyncClassList = sortArray(this.asyncClassList, { by: ['latestJobRunDays', 'Name'], order: ['desc', 'asc'] }) as any[]; + if (this.unusedNumber > 0) { + this.statusCode = 1; + } + } + + private async findLatestApexJobsForEachClass(conn: any) { + uxLog("action", this, c.cyan(`Retrieving latest Apex jobs from org ${conn.instanceUrl}...`)); + const classIds = this.asyncClassList.map(apexClass => apexClass.Id); + const query = `SELECT ApexClassId, Status, MAX(CreatedDate)` + + ` FROM AsyncApexJob` + + ` WHERE JobType IN ('BatchApex', 'ScheduledApex', 'Queueable') AND ApexClassId IN ('${classIds.join("','")}') GROUP BY ApexClassId, Status`; + const latestJobQueryRes = await soqlQuery(query, conn); + const latestJobs = latestJobQueryRes.records; + return latestJobs; + } + + private async listAsyncApexClasses(conn: any) { + uxLog("action", this, c.cyan(`Retrieving async Apex classes from org ${conn.instanceUrl}...`)); + const classListRes = await soqlQueryTooling("SELECT Id, Name, Body FROM ApexClass WHERE ManageableState ='unmanaged' ORDER BY Name ASC", conn); + const allClassList: any[] = classListRes.records || []; + for (const classItem of allClassList) { + if (classItem.Body.includes("implements Database.Batchable")) { + this.asyncClassList.push({ Id: classItem.Id, Name: classItem.Name, AsyncType: "Database.Batchable" }); + } + else if (classItem.Body.includes("implements Queueable")) { + this.asyncClassList.push({ Id: classItem.Id, Name: classItem.Name, AsyncType: "Queueable" }); + } + else if (classItem.Body.includes("implements Schedulable")) { + this.asyncClassList.push({ Id: classItem.Id, Name: classItem.Name, AsyncType: "Schedulable" }); + } + } + const classIds = this.asyncClassList.map(apexClass => apexClass.Id); + const classDtlRes = await soqlQueryTooling(`SELECT Id, Name, CreatedDate, CreatedBy.Name FROM ApexClass WHERE Id IN ('${classIds.join("','")}')`, conn); + const classDtlResRecords: any[] = classDtlRes.records || []; + const isRepo = isGitRepo(); + this.asyncClassList = await Promise.all(this.asyncClassList.map(async (cls) => { + const matchingClass = classDtlResRecords.filter(classDtl => classDtl.Id === cls.Id)[0]; + // Use date & user found in org by default + cls.ClassCreatedDate = moment(matchingClass.CreatedDate).format('YYYY-MM-DD'); + cls.ClassCreatedBy = `${matchingClass.CreatedBy.Name} (org)`; + // If file found in git, and if git date is lower than org date, use git date and user + if (isRepo) { + const gitInstance = git({ output: false, displayCommand: false }); + const fileMetadata = await MetadataUtils.findMetaFileFromTypeAndName("ApexClass", cls.Name); + if (fileMetadata) { + const log = await gitInstance.log({ + file: fileMetadata, + '--diff-filter': 'A', // Filter to include only commits that added the file + '--max-count': 1, // Limit to the first commit + }); + if (log && log.all.length === 1) { + const orgCreatedDate = moment(cls.ClassCreatedDate); + const gitCreatedDate = moment(log.all[0].date); + // Use date from git only if it is before date from org + if (gitCreatedDate.isBefore(orgCreatedDate)) { + cls.ClassCreatedDate = moment(log.all[0].date).format('YYYY-MM-DD'); + cls.ClassCreatedBy = `${log.all[0].author_name} (git)`; + } + } + } + } + return cls; + })) + } + + private async manageNotifications() { + const { flags } = await this.parse(DiagnoseUnusedApexClasses); + // Build notification + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); + const notifButtons = await getNotificationButtons(); + let notifSeverity: NotifSeverity = 'log'; + let notifText = `All async apex classes of org ${orgMarkdown} have been called during the latest ${this.lastNdays} days.`; + let attachments: any[] = []; + if (this.unusedNumber > 0) { + notifSeverity = 'warning'; + notifText = `${this.unusedNumber} apex classes might not be used anymore.`; + const notifDetailText = this.asyncClassList + .filter(apexClass => ["warning", "error"].includes(apexClass.severity)) + .map(apexClass => { + if (apexClass.nextJobDate) { + return `• *${apexClass.Name}*: Will run on ${moment(apexClass.nextJobDate.format('YYYY-MM-DD hh:mm'))}` + } + else if (apexClass.queued) { + return `• *${apexClass.Name}*: A future job is queued` + } + else if (apexClass.latestJobRunDays < 99999) { + return `• *${apexClass.Name}*: ${apexClass.latestJobRunDays} days (created on ${moment(apexClass.ClassCreatedDate).format('YYYY-MM-DD')} by ${apexClass.ClassCreatedBy})` + } + else { + return `• *${apexClass.Name}*: No past or future job found (created on ${moment(apexClass.ClassCreatedDate).format('YYYY-MM-DD')} by ${apexClass.ClassCreatedBy})` + } + }).join("\n"); + attachments = [{ text: notifDetailText }]; + } + /* jscpd:ignore-start */ + // Send notifications + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'UNUSED_APEX_CLASSES', + text: notifText, + attachments: attachments, + buttons: notifButtons, + severity: notifSeverity, + attachedFiles: this.outputFilesRes.xlsxFile ? [this.outputFilesRes.xlsxFile] : [], + logElements: this.asyncClassList, + data: { metric: this.unusedNumber }, + metrics: { unusedApexClasses: this.unusedNumber }, + }); + /* jscpd:ignore-end */ + return []; + } +} diff --git a/src/commands/hardis/org/diagnose/unused-connected-apps.ts b/src/commands/hardis/org/diagnose/unused-connected-apps.ts new file mode 100644 index 000000000..7351747f8 --- /dev/null +++ b/src/commands/hardis/org/diagnose/unused-connected-apps.ts @@ -0,0 +1,318 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from "path"; +import { createTempDir, execCommand, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import moment from 'moment'; +import { CONSTANTS } from '../../../../config/index.js'; +import sortArray from 'sort-array'; +import { createBlankSfdxProject } from '../../../../common/utils/projectUtils.js'; +import { parseXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DiagnoseUnusedConnectedApps extends SfCommand { + public static title = 'Unused Connected Apps in an org'; + + public static allowedInactiveConnectedApps = [ + "Ant Migration Tool", + "Chatter Desktop", + "Chatter Mobile for BlackBerry", + "Force.com IDE", + "OIQ_Integration", + "Salesforce CLI", + "Salesforce Files", + "Salesforce Mobile Dashboards", + "Salesforce Touch", + "Salesforce for Outlook", + "SalesforceA", + "SalesforceA for Android", + "SalesforceA for iOS", + "SalesforceDX Namespace Registry", + "SalesforceIQ" + ] + + public static description = ` +## Command Behavior + +**Identifies and reports on potentially unused Connected Apps in a Salesforce org, suggesting candidates for deletion or deactivation.** + +This command helps improve org security and reduce technical debt by pinpointing Connected Apps that are no longer actively used. Connected Apps can pose security risks if left unmonitored, and cleaning them up contributes to a healthier Salesforce environment. + +Key functionalities: + +- **Connected App Data Collection:** Gathers information about all Connected Apps in the org, including creation and last modified dates, and associated users. +- **Usage Analysis:** Analyzes \`LoginHistory\` and \`OAuthToken\` records to determine the last usage date of each Connected App. +- **Inactivity Detection:** Flags Connected Apps as potentially unused if they have no recent login history or OAuth token usage. +- **Accessibility Check:** Examines Connected App metadata to identify if they are accessible (e.g., if they require admin approval and have no profiles or permission sets assigned). +- **Ignored Apps:** Automatically ignores a predefined list of common Salesforce Connected Apps (e.g., \`Salesforce CLI\`, \`Salesforce Mobile Dashboards\`). You can extend this list by defining the \`ALLOWED_INACTIVE_CONNECTED_APPS\` environment variable. +- **CSV Report Generation:** Generates a CSV file containing details of all analyzed Connected Apps, including their usage status, last usage date, and reasons for being flagged as potentially unused. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of potentially unused Connected Apps. + +**Default Ignored Connected Apps:** + +- ${this.allowedInactiveConnectedApps.join("\n- ")} + +You can add more ignored apps by defining a comma-separated list of names in the \`ALLOWED_INACTIVE_CONNECTED_APPS\` environment variable. + +_Example: +ALLOWED_INACTIVE_CONNECTED_APPS=My App 1,My App 2, My App 3_ + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-unused-connected-apps/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce SOQL Queries:** It performs SOQL queries against \`ConnectedApplication\`, \`LoginHistory\`, and \`OAuthToken\` objects to gather comprehensive data about Connected Apps and their usage. +- **Temporary SFDX Project:** It creates a temporary SFDX project to retrieve Connected App metadata, allowing for local parsing and analysis of their XML files. +- **Metadata Parsing:** It parses the \`connectedApp-meta.xml\` files to check for \`isAdminApproved\` and the presence of \`profileName\` or \`permissionsetName\` to determine accessibility. +- **Data Correlation:** It correlates data from various Salesforce objects to build a complete picture of each Connected App's usage and status. +- **Date Calculation:** Uses \`moment\` to calculate the time since the last OAuth token usage. +- **Report Generation:** It uses \`generateCsvFile\` to create the CSV report of unused Connected Apps. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **File System Operations:** Uses \`fs-extra\` for creating and removing temporary directories and files. +- **Environment Variable Reading:** Reads the \`ALLOWED_INACTIVE_CONNECTED_APPS\` environment variable to customize the list of ignored Connected Apps. +
+`; + + public static examples = [ + '$ sf hardis:org:diagnose:unused-connected-apps', + ]; + + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + public static requiresProject = false; + + protected debugMode = false; + + protected tmpSfdxProjectPath: string; + protected connectedAppResults: any[] = []; + protected outputFile; + protected outputFilesRes: any = {}; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(DiagnoseUnusedConnectedApps); + this.debugMode = flags.debug || false; + this.outputFile = flags.outputfile || null; + const conn = flags['target-org'].getConnection(); + + // Collect all Connected Apps + uxLog("action", this, c.cyan(`Extracting the whole list of Connected Apps from ${conn.instanceUrl} ...`)); + const allConnectedAppsQuery = + `SELECT Name,CreatedBy.Name,CreatedDate,LastModifiedBy.Name,LastModifiedDate,OptionsAllowAdminApprovedUsersOnly FROM ConnectedApplication ORDER BY Name`; + const allConnectedAppsQueryRes = await soqlQuery(allConnectedAppsQuery, conn); + const allConnectedApps = allConnectedAppsQueryRes.records; + + // Collect all Connected Apps metadata in a blank project + const tmpDirForSfdxProject = await createTempDir(); + this.tmpSfdxProjectPath = await createBlankSfdxProject(tmpDirForSfdxProject); + uxLog("action", this, c.cyan(`Retrieve ConnectedApp Metadatas from ${conn.instanceUrl} ...`)); + await execCommand( + `sf project retrieve start -m ConnectedApp --target-org ${conn.username}`, + this, + { cwd: this.tmpSfdxProjectPath, fail: true, output: true }); + + // Collect all Connected Apps used in LoginHistory table + uxLog("action", this, c.cyan(`Extracting all applications found in LoginHistory object from ${conn.instanceUrl} ...`)); + const allAppsInLoginHistoryQuery = + `SELECT Application FROM LoginHistory GROUP BY Application ORDER BY Application`; + const allAppsInLoginHistoryQueryRes = await soqlQuery(allAppsInLoginHistoryQuery, conn); + const allAppsInLoginHistoryNames = allAppsInLoginHistoryQueryRes.records.map(loginHistory => loginHistory.Application); + + // Perform analysis + uxLog("action", this, c.cyan(`Starting analysis...`)); + this.connectedAppResults = await Promise.all(allConnectedApps.map(async (connectedApp) => { + return await this.analyzeConnectedApp(allAppsInLoginHistoryNames, connectedApp, conn); + })); + + uxLog("log", this, c.grey(`Analysis complete. Deleting temporary project files...`)); + await fs.rm(tmpDirForSfdxProject, { recursive: true }); + + this.connectedAppResults = sortArray(this.connectedAppResults, + { + by: ['severity', 'Name'], + order: ['severity', 'asc'], + customOrders: { + severity: ["critical", "error", "warning", "info", "success", "log"] + } + }) as any[]; + const numberWarnings = this.connectedAppResults.filter(app => app.severity === "warning").length; + + // Process result + if (this.connectedAppResults.length > 0) { + + // Build notification + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); + const notifButtons = await getNotificationButtons(); + const notifSeverity: NotifSeverity = numberWarnings > 0 ? 'warning' : 'log'; + const notifText = `${numberWarnings} Connected Apps to check have been found in ${orgMarkdown}` + let notifDetailText = ''; + for (const connectedApp of this.connectedAppResults.filter(app => app.severity === "warning")) { + notifDetailText += `• *${connectedApp.Name}*\n`; + } + const notifAttachments = [{ text: notifDetailText }]; + // Post notif + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'CONNECTED_APPS', + text: notifText, + attachments: notifAttachments, + buttons: notifButtons, + severity: notifSeverity, + attachedFiles: this.outputFilesRes.xlsxFile ? [this.outputFilesRes.xlsxFile] : [], + logElements: this.connectedAppResults, + data: { metric: numberWarnings }, + metrics: { + ConnectedApps: numberWarnings, + }, + }); + + // Display output + const connectedAppsLight = this.connectedAppResults.filter(app => app.severity === "warning").map(connectedApp => { + return { + SeverityIcon: connectedApp.severityIcon, + ConnectedApp: connectedApp.Name, + AppLastModifiedDate: moment(connectedApp.LastModifiedDate).format('ll'), + AppLastModifiedBy: connectedApp.LastModifiedBy, + LastOAuthUsageDate: connectedApp.LastOAuthUsageDate ? moment(connectedApp.LastOAuthUsageDate).format('ll') : '', + LastOAuthUsageBy: connectedApp.LastOAuthUsageDate, + SeverityReason: connectedApp.severityReason, + } + }) + uxLog("action", this, c.cyan(`Found ${c.bold(numberWarnings)} Connected Apps to check.`)); + uxLogTable(this, connectedAppsLight); + + // Generate output CSV file + this.outputFile = await generateReportPath('connected-apps', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.connectedAppResults, this.outputFile, { fileTitle: 'Connected Apps Analysis' }); + } + + // Return an object to be displayed with --json + return { + status: numberWarnings > 0 ? 1 : 0, + allConnectedAppResults: this.connectedAppResults, + csvLogFile: this.outputFile, + }; + } + + private async analyzeConnectedApp(allAppsInLoginHistoryNames: any, connectedApp: any, conn: any) { + let loginHistoryFound = true; + let reason = "Found in Login History" + let severity: NotifSeverity = !allAppsInLoginHistoryNames.includes(connectedApp.Name) ? 'warning' : 'log'; + if (severity === "warning") { + loginHistoryFound = false; + reason = "Not Found in Login History"; + } + // Check OAuthToken + ({ severity, reason } = await this.checkOAuthToken(connectedApp, conn, loginHistoryFound, severity, reason)); + + // If OAuthToken < 6 months found, check on the metadata if the app is not available + if (severity === "warning") { + ({ severity, reason } = await this.checkNotAccessible(connectedApp, severity, reason)); + } + + // Check if app name is in allowedInactiveConnectedApps + const additionalIgnoredConnectedApps = process.env?.ALLOWED_INACTIVE_CONNECTED_APPS ? process.env?.ALLOWED_INACTIVE_CONNECTED_APPS.split(",") : []; + const allowedInactiveConnectedApps = DiagnoseUnusedConnectedApps.allowedInactiveConnectedApps.concat(additionalIgnoredConnectedApps); + if (severity === "warning" && allowedInactiveConnectedApps.includes(connectedApp.Name)) { + severity = "info"; + reason = "Member of ignored connected apps" + } + + // Build result + const severityIcon = getSeverityIcon(severity); + connectedApp.CreatedBy = connectedApp?.CreatedBy?.Name || 'Not set'; + connectedApp.LastModifiedBy = connectedApp?.LastModifiedBy?.Name || 'Not set'; + connectedApp.loginHistoryFound = loginHistoryFound; + connectedApp.severityReason = reason; + delete connectedApp.attributes; + return Object.assign({ + severityIcon: severityIcon, + severity: severity, + }, connectedApp); + } + + private async checkOAuthToken(connectedApp: any, conn: any, loginHistoryFound: boolean, severity: NotifSeverity, reason: string) { + uxLog("log", this, c.grey(`Looking in OAuthToken for last usage of ${connectedApp.Name}...`)); + const oAuthTokenQuery = `SELECT AppName,User.Name,LastUsedDate FROM OAuthToken WHERE AppName='${connectedApp.Name.replace(/'/g, "\\'")}' ORDER BY LastUsedDate DESC LIMIT 1`; + const oAuthTokenQueryRes = await soqlQuery(oAuthTokenQuery, conn); + const latestOAuthToken = oAuthTokenQueryRes.records.length === 1 ? oAuthTokenQueryRes.records[0] : null; + if (latestOAuthToken && latestOAuthToken.LastUsedDate) { + connectedApp.LastOAuthUsageDate = latestOAuthToken.LastUsedDate; + connectedApp.LastOAuthUsageBy = latestOAuthToken?.User?.Name || 'Not set'; + const today = moment(); + const lastUsage = moment(connectedApp.LastOAuthUsageDate); + if (today.diff(lastUsage, "months") < 6 && loginHistoryFound === false) { + severity = 'log'; + reason = "OAuth Token < 6 months"; + } + else { + reason = loginHistoryFound === false ? "Not Found in Login History and OAuth Token > 6 months" : reason; + } + } + else { + reason = loginHistoryFound === false ? "Not Found in Login History or used OAuth Token" : reason; + connectedApp.LastOAuthUsageDate = ''; + connectedApp.LastOAuthUsageBy = ''; + } + return { severity, reason }; + } + + private async checkNotAccessible(connectedApp: any, severity: NotifSeverity, reason: string) { + const connectedAppMdFile = path.join( + this.tmpSfdxProjectPath, + "force-app", + "main", + "default", + "connectedApps", + `${connectedApp.Name}.connectedApp-meta.xml`); + if (fs.existsSync(connectedAppMdFile)) { + const connectedAppXml = await parseXmlFile(connectedAppMdFile); + if (connectedAppXml?.ConnectedApp?.oauthConfig[0]?.isAdminApproved[0] === "true" && + (!this.hasProfiles(connectedAppXml)) && + (!this.hasPermissionSets((connectedAppXml)))) { + severity = "info"; + reason = "Not accessible (Admin pre-auth + no profiles and PS)"; + } + } + return { severity, reason }; + } + + private hasProfiles(connectedAppXml: any) { + return connectedAppXml?.ConnectedApp?.profileName?.length > 0 + } + + private hasPermissionSets(connectedAppXml: any) { + return connectedAppXml?.ConnectedApp?.permissionsetName?.length > 0 + } +} \ No newline at end of file diff --git a/src/commands/hardis/org/diagnose/unusedlicenses.ts b/src/commands/hardis/org/diagnose/unusedlicenses.ts index d18eb8ad6..2f84d8210 100644 --- a/src/commands/hardis/org/diagnose/unusedlicenses.ts +++ b/src/commands/hardis/org/diagnose/unusedlicenses.ts @@ -1,95 +1,113 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { isCI, uxLog } from "../../../../common/utils"; -import { bulkQuery, bulkQueryChunksIn, bulkUpdate } from "../../../../common/utils/apiUtils"; -import { generateCsvFile, generateReportPath } from "../../../../common/utils/filesUtils"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; -import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from "../../../../common/utils/notifUtils"; -import { prompts } from "../../../../common/utils/prompts"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { bulkQuery, bulkQueryChunksIn, bulkUpdate } from '../../../../common/utils/apiUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { CONSTANTS } from '../../../../config/index.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DiagnoseUnusedLicenses extends SfCommand { + public static title = 'Detect unused Permission Set Licenses (beta)'; -export default class DiagnoseUnusedLicenses extends SfdxCommand { - public static title = "Detect unused Permission Set Licenses (beta)"; + public static description = ` +## Command Behavior - public static description = `When you assign a Permission Set to a user, and that this Permission Set is related to a Permission Set License, a Permission Set License Assignment is automatically created for the user. +**Detects and suggests the deletion of unused Permission Set License Assignments in a Salesforce org.** -But when you unassign this Permission Set from the user, **the Permission Set License Assignment is not deleted**. +When a Permission Set (PS) linked to a Permission Set License (PSL) is assigned to a user, a Permission Set License Assignment (PSLA) is automatically created. However, when that PS is unassigned from the user, the PSLA is *not* automatically deleted. This can lead to organizations being charged for unused PSLAs, representing a hidden cost and technical debt. -This leads that you can be **charged for Permission Set Licenses that are not used** ! +This command identifies such useless PSLAs and provides options to delete them, helping to optimize license usage and reduce unnecessary expenses. -This command detects such useless Permission Set Licenses Assignments and suggests to delete them. +Key functionalities: + +- **PSLA Detection:** Queries the Salesforce org to find all active PSLAs. +- **Usage Verification:** Correlates PSLAs with actual Permission Set Assignments and Permission Set Group Assignments to determine if the underlying Permission Sets are still assigned to the user. +- **Special Case Handling:** Accounts for specific scenarios where profiles might implicitly assign PSLAs (e.g., \`Salesforce API Only\` profile assigning \`SalesforceAPIIntegrationPsl\`) and allows for always excluding certain PSLAs from the unused check. +- **Reporting:** Generates a CSV report of all identified unused PSLAs, including the user and the associated Permission Set License. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of unused PSLAs. +- **Interactive Deletion:** In non-CI environments, it offers an interactive prompt to bulk delete the identified unused PSLAs. Many thanks to [Vincent Finet](https://www.linkedin.com/in/vincentfinet/) for the inspiration during his great speaker session at [French Touch Dreamin '23](https://frenchtouchdreamin.com/), and his kind agreement for reusing such inspiration in this command :) -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-unused-licenses/) and can output Grafana, Slack and MsTeams Notifications. +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-unused-licenses/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves extensive querying of Salesforce objects and data correlation: + +- **SOQL Queries (Bulk API):** It uses \`bulkQuery\` and \`bulkQueryChunksIn\` to efficiently retrieve large volumes of data from \`PermissionSetLicenseAssign\`, \`PermissionSetLicense\`, \`PermissionSet\`, \`PermissionSetGroupComponent\`, and \`PermissionSetAssignment\` objects. +- **Data Correlation:** It meticulously correlates data across these objects to determine if a \`PermissionSetLicenseAssign\` record has a corresponding active assignment to a Permission Set or Permission Set Group for the same user. +- **Filtering Logic:** It applies complex filtering logic to exclude PSLAs that are genuinely in use or are part of predefined exceptions (e.g., \`alwaysExcludeForActiveUsersPermissionSetLicenses\`). +- **Bulk Deletion:** If the user opts to delete unused PSLAs, it uses \`bulkUpdate\` with the \`delete\` operation to efficiently remove multiple records. +- **Report Generation:** It uses \`generateCsvFile\` to create the CSV report of unused PSLAs. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **User Interaction:** Uses \`prompts\` for interactive confirmation before performing deletion operations. +
`; - public static examples = ["$ sfdx hardis:org:diagnose:unusedlicenses", "$ sfdx hardis:org:diagnose:unusedlicenses --fix"]; + public static examples = ['$ sf hardis:org:diagnose:unusedlicenses', '$ sf hardis:org:diagnose:unusedlicenses --fix']; - protected static flagsConfig = { - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; + public static requiresProject = false; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; - - protected static additionalPermissionSetsToAlwaysGet = ["Sales_User"]; + protected static additionalPermissionSetsToAlwaysGet = ['Sales_User']; - protected static permSetsPermSetLicenses = [{ permSet: "Sales_User", permSetLicense: "SalesUserPsl" }]; + protected static permSetsPermSetLicenses = [{ permSet: 'Sales_User', permSetLicense: 'SalesUserPsl' }]; - protected static profilesPermissionSetLicenses = [{ profile: "Salesforce API Only", permSetLicense: "SalesforceAPIIntegrationPsl" }]; + protected static profilesPermissionSetLicenses = [ + { profile: 'Salesforce API Only', permSetLicense: 'SalesforceAPIIntegrationPsl' }, + ]; - protected static alwaysExcludeForActiveUsersPermissionSetLicenses = ["IdentityConnect"]; + protected static alwaysExcludeForActiveUsersPermissionSetLicenses = ['IdentityConnect']; protected debugMode = false; protected outputFile; protected outputFilesRes: any = {}; - protected permissionSetLicenseAssignmentsActive = []; - protected permissionSetLicenses = []; - protected unusedPermissionSetLicenseAssignments = []; - protected permissionSets = []; - protected permissionSetsGroupMembers = []; - protected permissionSetAssignments = []; - protected permissionSetGroupAssignments = []; - protected allPermissionSetAssignments = []; + protected permissionSetLicenseAssignmentsActive: any[] = []; + protected permissionSetLicenses: any[] = []; + protected unusedPermissionSetLicenseAssignments: any[] = []; + protected permissionSets: any[] = []; + protected permissionSetsGroupMembers: any[] = []; + protected permissionSetAssignments: any[] = []; + protected permissionSetGroupAssignments: any[] = []; + protected allPermissionSetAssignments: any[] = []; protected statusCode = 0; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; - this.outputFile = this.flags.outputfile || null; + const { flags } = await this.parse(DiagnoseUnusedLicenses); + this.debugMode = flags.debug || false; + this.outputFile = flags.outputfile || null; - const conn = this.org.getConnection(); + const conn = flags['target-org'].getConnection(); // List Permission Set Licenses Assignments this.permissionSetLicenseAssignmentsActive = await this.listAllPermissionSetLicenseAssignments(conn); @@ -117,17 +135,20 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co this.allPermissionSetAssignments = this.permissionSetGroupAssignments.concat(this.permissionSetAssignments); // Browse Permission Sets License assignments - const severityIconWarning = getSeverityIcon("warning"); + const severityIconWarning = getSeverityIcon('warning'); for (const psla of this.permissionSetLicenseAssignmentsActive) { - const pslaUsername = psla["Assignee.Username"]; + const pslaUsername = psla['Assignee.Username']; // Find related Permission Set assignments const foundMatchingPsAssignments = this.allPermissionSetAssignments.filter((psa) => { - if (psa["Assignee.Username"] === pslaUsername) { + if (psa['Assignee.Username'] === pslaUsername) { if (psa.licenseIds.includes(psla.PermissionSetLicenseId)) { return true; } else if ( DiagnoseUnusedLicenses.permSetsPermSetLicenses.some((psPsl) => { - if (psa["PermissionSet.Name"] === psPsl.permSet && psla["PermissionSetLicense.DeveloperName"] === psPsl.permSetLicense) { + if ( + psa['PermissionSet.Name'] === psPsl.permSet && + psla['PermissionSetLicense.DeveloperName'] === psPsl.permSetLicense + ) { return true; } return false; @@ -142,17 +163,20 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co // Handle special cases of Profiles that assigns Permission set licenses when selected on a user const isProfileRelatedPSLA = DiagnoseUnusedLicenses.profilesPermissionSetLicenses.some((profilePsl) => { return ( - psla["Assignee.Profile.Name"].startsWith(profilePsl.profile) && psla["PermissionSetLicense.DeveloperName"] === profilePsl.permSetLicense + psla['Assignee.Profile.Name'].startsWith(profilePsl.profile) && + psla['PermissionSetLicense.DeveloperName'] === profilePsl.permSetLicense ); }); - const isExcluded = DiagnoseUnusedLicenses.alwaysExcludeForActiveUsersPermissionSetLicenses.includes(psla["PermissionSetLicense.DeveloperName"]); + const isExcluded = DiagnoseUnusedLicenses.alwaysExcludeForActiveUsersPermissionSetLicenses.includes( + psla['PermissionSetLicense.DeveloperName'] + ); if (foundMatchingPsAssignments.length === 0 && !isProfileRelatedPSLA && !isExcluded) { this.unusedPermissionSetLicenseAssignments.push({ Id: psla.Id, - PermissionsSetLicense: psla["PermissionSetLicense.MasterLabel"], - User: psla["Assignee.Username"], - Reason: "Related PS assignment not found", - severity: "warning", + PermissionsSetLicense: psla['PermissionSetLicense.MasterLabel'], + User: psla['Assignee.Username'], + Reason: 'Related PS assignment not found', + severity: 'warning', severityIcon: severityIconWarning, }); } @@ -169,29 +193,29 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co let msg = `No unused permission set license assignment has been found`; if (this.unusedPermissionSetLicenseAssignments.length > 0) { this.statusCode = 1; - msg = `${this.unusedPermissionSetLicenseAssignments.length} unused Permission Set License Assignments have been found`; - uxLog(this, c.red(msg)); - for (const pslMasterLabel of Object.keys(summary).sort()) { - const psl = this.getPermissionSetLicenseByMasterLabel(pslMasterLabel); - uxLog(this, c.red(`- ${pslMasterLabel}: ${summary[pslMasterLabel]} (${psl.UsedLicenses} used on ${psl.TotalLicenses} available)`)); - } + const pslMasterLabels = Object.keys(summary).sort().map((pslMasterLabel) => { + return "- " + this.getPermissionSetLicenseByMasterLabel(pslMasterLabel).MasterLabel; + }).join('\n'); + msg = `Unused Permission Set License Assignments:\n${pslMasterLabels}`; + uxLog("warning", this, c.yellow(msg)); } else { - uxLog(this, c.green(msg)); + uxLog("success", this, c.green(msg)); } // Generate output CSV file if (this.unusedPermissionSetLicenseAssignments.length > 0) { - this.outputFile = await generateReportPath("unused-ps-license-assignments", this.outputFile); - this.outputFilesRes = await generateCsvFile(this.unusedPermissionSetLicenseAssignments, this.outputFile); + uxLogTable(this, this.unusedPermissionSetLicenseAssignments); + this.outputFile = await generateReportPath('unused-ps-license-assignments', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.unusedPermissionSetLicenseAssignments, this.outputFile, { fileTitle: "Unused PSL assignments" }); } // Manage notifications - await this.manageNotifications(this.unusedPermissionSetLicenseAssignments, summary); + await this.manageNotifications(this.unusedPermissionSetLicenseAssignments, summary, flags); // Propose to delete await this.managePermissionSetLicenseAssignmentsDeletion(conn); - if ((this.argv || []).includes("unusedlicenses")) { + if ((this.argv || []).includes('unusedlicenses')) { process.exitCode = this.statusCode; } @@ -206,14 +230,14 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private async listAllPermissionSetLicenseAssignments(conn: any) { - uxLog(this, c.cyan(`Extracting all active Permission Sets Licenses Assignments...`)); + uxLog("action", this, c.cyan(`Extracting all active Permission Sets Licenses Assignments...`)); const pslaQueryRes = await bulkQuery( ` SELECT Id,PermissionSetLicenseId, PermissionSetLicense.DeveloperName, PermissionSetLicense.MasterLabel, AssigneeId, Assignee.Username, Assignee.IsActive, Assignee.Profile.Name FROM PermissionSetLicenseAssign WHERE Assignee.IsActive=true ORDER BY PermissionSetLicense.MasterLabel, Assignee.Username`, - conn, + conn ); return pslaQueryRes.records; } @@ -223,20 +247,23 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co .map((psla) => { return { Id: psla.PermissionSetLicenseId, - DeveloperName: psla["PermissionSetLicense.DeveloperName"], - MasterLabel: psla["PermissionSetLicense.MasterLabel"], + DeveloperName: psla['PermissionSetLicense.DeveloperName'], + MasterLabel: psla['PermissionSetLicense.MasterLabel'], }; }) - .filter((value, index, self) => index === self.findIndex((t) => t.Id === value.Id && t.MasterLabel === value.MasterLabel)); + .filter( + (value, index, self) => + index === self.findIndex((t) => t.Id === value.Id && t.MasterLabel === value.MasterLabel) + ); const psLicensesIds = relatedPermissionSetLicenses.map((psl) => psl.Id); if (relatedPermissionSetLicenses.length > 0) { - uxLog(this, c.cyan(`Extracting related Permission Sets Licenses...`)); + uxLog("action", this, c.cyan(`Extracting related Permission Sets Licenses...`)); const pslQueryRes = await bulkQueryChunksIn( `SELECT Id,DeveloperName,MasterLabel,UsedLicenses,TotalLicenses FROM PermissionSetLicense WHERE Id in ({{IN}})`, conn, - psLicensesIds, + psLicensesIds ); return pslQueryRes.records; } @@ -244,54 +271,56 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private async listRelatedPermissionSets(psLicensesIds: string[], conn) { - uxLog(this, c.cyan(`Extracting related Permission Sets...`)); + uxLog("action", this, c.cyan(`Extracting related Permission Sets...`)); const psQueryRes = await bulkQueryChunksIn( `SELECT Id,Label,Name,LicenseId FROM PermissionSet WHERE LicenseId in ({{IN}})`, conn, - psLicensesIds, + psLicensesIds ); const psQueryAdditionalRes = await bulkQueryChunksIn( `SELECT Id,Label,Name,LicenseId FROM PermissionSet WHERE Name in ({{IN}})`, conn, - DiagnoseUnusedLicenses.additionalPermissionSetsToAlwaysGet, + DiagnoseUnusedLicenses.additionalPermissionSetsToAlwaysGet ); return psQueryRes.records.concat(psQueryAdditionalRes.records); } private async listRelatedPermissionSetGroupsComponents(permissionSetsIds: string[], conn) { - uxLog(this, c.cyan(`Extracting related Permission Sets Group Components...`)); + uxLog("action", this, c.cyan(`Extracting related Permission Sets Group Components...`)); const psgcQueryRes = await bulkQueryChunksIn( `SELECT Id,PermissionSetId,PermissionSetGroupId,PermissionSet.LicenseId,PermissionSet.Name,PermissionSetGroup.DeveloperName FROM PermissionSetGroupComponent WHERE PermissionSetId in ({{IN}})`, conn, - permissionSetsIds, + permissionSetsIds ); return psgcQueryRes.records; } private async listRelatedPermissionSetAssignmentsToGroups(conn) { - const permissionSetsGroupIds = [...new Set(this.permissionSetsGroupMembers.map((psgc) => psgc.PermissionSetGroupId))]; + const permissionSetsGroupIds = [ + ...new Set(this.permissionSetsGroupMembers.map((psgc) => psgc.PermissionSetGroupId)), + ]; if (permissionSetsGroupIds.length > 0) { - uxLog(this, c.cyan(`Extracting related Permission Set Group Assignments...`)); + uxLog("action", this, c.cyan(`Extracting related Permission Set Group Assignments...`)); const psgaQueryRes = await bulkQueryChunksIn( `SELECT Id,Assignee.Username,PermissionSetGroupId,PermissionSetGroup.DeveloperName FROM PermissionSetAssignment WHERE PermissionSetGroupId in ({{IN}})`, conn, - permissionSetsGroupIds, + permissionSetsGroupIds ); // Add related licenses in licenseIds for each PS Assignment psgaQueryRes.records = psgaQueryRes.records.map((psga) => { psga.licenseIds = []; for (const psgm of this.permissionSetsGroupMembers) { if (psgm.PermissionSetGroupId === psga.PermissionSetGroupId) { - if (psgm["PermissionSet.LicenseId"]) { - psga.licenseIds.push(psgm["PermissionSet.LicenseId"]); + if (psgm['PermissionSet.LicenseId']) { + psga.licenseIds.push(psgm['PermissionSet.LicenseId']); } } } @@ -303,35 +332,35 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private async listRelatedPermissionSetAssignmentsToPs(permissionSetsIds: string[], conn) { - uxLog(this, c.cyan(`Extracting related Permission Sets Assignments...`)); + uxLog("action", this, c.cyan(`Extracting related Permission Sets Assignments...`)); const psaQueryRes = await bulkQueryChunksIn( `SELECT Id,Assignee.Username,PermissionSetId,PermissionSet.LicenseId,PermissionSet.Name FROM PermissionSetAssignment WHERE PermissionSetId in ({{IN}})`, conn, - permissionSetsIds, + permissionSetsIds ); // Add related license in licenseIds for each PS Assignment psaQueryRes.records = psaQueryRes.records.map((psa) => { psa.licenseIds = []; - if (psa["PermissionSet.LicenseId"]) { - psa.licenseIds.push(psa["PermissionSet.LicenseId"]); + if (psa['PermissionSet.LicenseId']) { + psa.licenseIds.push(psa['PermissionSet.LicenseId']); } return psa; }); return psaQueryRes.records; } - private async manageNotifications(unusedPermissionSetLicenseAssignments: any[], summary: any) { + private async manageNotifications(unusedPermissionSetLicenseAssignments: any[], summary: any, flags) { // Build notification - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No unused Permission Set Licenses Assignments has been found in ${orgMarkdown}`; let notifDetailText = ``; - let attachments = []; + let attachments: any[] = []; if (unusedPermissionSetLicenseAssignments.length > 0) { - notifSeverity = "warning"; + notifSeverity = 'warning'; notifText = `${unusedPermissionSetLicenseAssignments.length} unused Permission Set Licenses Assignments have been found in ${orgMarkdown}`; for (const pslMasterLabel of Object.keys(summary).sort()) { const psl = this.getPermissionSetLicenseByMasterLabel(pslMasterLabel); @@ -340,9 +369,9 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co attachments = [{ text: notifDetailText }]; } // Send notifications - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "UNUSED_LICENSES", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'UNUSED_LICENSES', text: notifText, attachments: attachments, buttons: notifButtons, @@ -360,31 +389,46 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co private async managePermissionSetLicenseAssignmentsDeletion(conn) { if (!isCI && this.unusedPermissionSetLicenseAssignments.length) { const confirmRes = await prompts({ - type: "select", - message: "Do you want to delete unused Permission Set License Assignments ?", + type: 'select', + message: 'Do you want to delete unused Permission Set License Assignments ?', + description: 'Remove permission set license assignments that are not being used, freeing up licenses for other users', + placeholder: 'Select an option', choices: [ { title: `Yes, delete the ${this.unusedPermissionSetLicenseAssignments.length} useless Permission Set License Assignments !`, - value: "all", + value: 'all', }, - { title: "No" }, + { title: 'No' }, ], }); - if (confirmRes.value === "all") { + if (confirmRes.value === 'all') { const pslaToDelete = this.unusedPermissionSetLicenseAssignments.map((psla) => { return { Id: psla.Id }; }); - const deleteRes = await bulkUpdate("PermissionSetLicenseAssign", "delete", pslaToDelete, conn); - const deleteSuccessNb = deleteRes.successRecordsNb; - const deleteErrorNb = deleteRes.errorRecordsNb; + const deleteRes = await bulkUpdate('PermissionSetLicenseAssign', 'delete', pslaToDelete, conn); + const deleteSuccessNb = deleteRes.successfulResults.length; + const deleteErrorNb = deleteRes.failedResults.length; + uxLog("action", this, "Deletions Summary"); if (deleteErrorNb > 0) { - uxLog(this, c.yellow(`Warning: ${c.red(c.bold(deleteErrorNb))} assignments has not been deleted (bulk API errors)`)); + uxLog( + "warning", + this, + c.yellow(`Warning: ${c.red(c.bold(deleteErrorNb))} assignments has not been deleted (bulk API errors)`) + ); + uxLogTable(this, deleteRes.failedResults); + this.outputFile = await generateReportPath('failed-delete-ps-license-assignments', this.outputFile); + this.outputFilesRes = await generateCsvFile(deleteRes.failedResults, this.outputFile, { fileTitle: "Failed PSL assignments deletions" }); this.statusCode = 1; } else { this.statusCode = 0; } // Build results summary - uxLog(this, c.green(`${c.bold(deleteSuccessNb)} assignments has been deleted.`)); + uxLog("success", this, c.green(`${c.bold(deleteSuccessNb)} assignments has been deleted.`)); + if (deleteSuccessNb) { + uxLogTable(this, deleteRes.successfulResults); + this.outputFile = await generateReportPath('deleted-ps-license-assignments', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.unusedPermissionSetLicenseAssignments, this.outputFile, { fileTitle: "Deleted PSL assignments" }); + } } } return this.statusCode; @@ -395,6 +439,6 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co if (pslList.length === 1) { return pslList[0]; } - throw new SfdxError(`Unable to find Permission Set License with MasterLabel ${masterLabel}`); + throw new SfError(`Unable to find Permission Set License with MasterLabel ${masterLabel}`); } } diff --git a/src/commands/hardis/org/diagnose/unusedusers.ts b/src/commands/hardis/org/diagnose/unusedusers.ts index 9d7e07ff8..1a28d4916 100644 --- a/src/commands/hardis/org/diagnose/unusedusers.ts +++ b/src/commands/hardis/org/diagnose/unusedusers.ts @@ -1,119 +1,136 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { isCI, uxLog } from "../../../../common/utils"; -import { bulkQuery } from "../../../../common/utils/apiUtils"; -import { generateCsvFile, generateReportPath } from "../../../../common/utils/filesUtils"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; -import { getNotificationButtons, getOrgMarkdown } from "../../../../common/utils/notifUtils"; -import { prompts } from "../../../../common/utils/prompts"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { isCI, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { bulkQuery } from '../../../../common/utils/apiUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { getNotificationButtons, getOrgMarkdown } from '../../../../common/utils/notifUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { CONSTANTS } from '../../../../config/index.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DiagnoseUnusedUsers extends SfCommand { + public static title = 'Detect unused Users in Salesforce'; -export default class DiagnoseUnusedUsers extends SfdxCommand { - public static title = "Detect unused Users in Salesforce"; + public static description = ` +## Command Behavior - public static description = `Efficient user management is vital in Salesforce to ensure resources are optimized and costs are controlled. However, inactive or unused user accounts can often go unnoticed, leading to wasted licenses and potential security risks. This tool addresses this challenge by enabling administrators to identify users who haven't logged in within a specified period. +**Detects and reports on inactive or unused Salesforce user accounts, helping to optimize license usage and enhance security.** + +Efficient user management is vital in Salesforce to ensure resources are optimized and costs are controlled. However, inactive or unused user accounts can often go unnoticed, leading to wasted licenses and potential security risks. This tool addresses this challenge by enabling administrators to identify users who haven't logged in within a specified period. By analyzing user login activity and last login timestamps, this feature highlights inactive user accounts, allowing administrators to take appropriate action. Whether it's deactivating dormant accounts, freeing up licenses, or ensuring compliance with security policies, this functionality empowers administrators to maintain a lean and secure Salesforce environment. -licensetypes values are the following: +Key functionalities: -- all-crm: SFDC,AUL,AUL1,AULL_IGHT +- **Inactivity Detection:** Identifies users who have not logged in for a specified number of days (\`--days\` flag, default 180 days in CI, 365 days otherwise). +- **License Type Filtering:** Allows filtering users by license type using \`--licensetypes\` (e.g., \`all-crm\`, \`all-paying\`) or specific license identifiers using \`--licenseidentifiers\`. + - \`all-crm\`: Includes \`SFDC\`, \`AUL\`, \`AUL1\`, \`AULL_IGHT\` licenses. + - \`all-paying\`: Includes \`SFDC\`, \`AUL\`, \`AUL1\`, \`AULL_IGHT\`, \`PID_Customer_Community\`, \`PID_Customer_Community_Login\`, \`PID_Partner_Community\`, \`PID_Partner_Community_Login\` licenses. + - Note: You can see the full list of available license identifiers in [Salesforce Documentation](https://developer.salesforce.com/docs/atlas.en-us.object_reference.meta/sfdx_cli_reference/sforce_api_objects_userlicense.htm). +- **Active User Retrieval:** The \`--returnactiveusers\` flag inverts the command, allowing you to retrieve active users who *have* logged in during the specified period. +- **CSV Report Generation:** Generates a CSV file containing details of all identified users (inactive or active), including their last login date, profile, and license information. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of inactive or active users. -- all-paying: SFDC,AUL,AUL1,AULL_IGHT,PID_Customer_Community,PID_Customer_Community_Login,PID_Partner_Community,PID_Partner_Community_Login +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-inactive-users/) and can output Grafana, Slack and MsTeams Notifications. -Note: You can see the full list of available license identifiers in [Salesforce Documentation](https://developer.salesforce.com/docs/atlas.en-us.object_reference.meta/object_reference/sforce_api_objects_userlicense.htm) +
+Technical explanations -Use --returnactiveusers to revert the command and retrieve active users that has logged in during the period. +The command's technical implementation involves: -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-inactive-users/) and can output Grafana, Slack and MsTeams Notifications. -`; +- **SOQL Query (Bulk API):** It uses \`bulkQuery\` to efficiently retrieve user records from the Salesforce \`User\` object. The SOQL query dynamically constructs its WHERE clause based on the \`--days\`, \`--licensetypes\`, \`--licenseidentifiers\`, and \`--returnactiveusers\` flags. +- **Interactive Prompts:** Uses \`prompts\` to interactively ask the user for the number of inactive days and license types if not provided via flags. +- **License Mapping:** Internally maps common license type aliases (e.g., \`all-crm\`) to their corresponding Salesforce \`LicenseDefinitionKey\` values. +- **Report Generation:** It uses \`generateCsvFile\` to create the CSV report of users. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications, including attachments of the generated CSV report and metrics for monitoring dashboards. +- **User Feedback:** Provides a summary of the findings in the console, indicating the number of inactive or active users found. +
` + ; public static examples = [ - "$ sfdx hardis:org:diagnose:unusedusers", - "$ sfdx hardis:org:diagnose:unusedusers --days 365", - "$ sfdx hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm", - "$ sfdx hardis:org:diagnose:unusedusers --days 60 --licenseidentifiers SFDC,AUL,AUL1", - "$ sfdx hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm --returnactiveusers", + '$ sf hardis:org:diagnose:unusedusers', + '$ sf hardis:org:diagnose:unusedusers --days 365', + '$ sf hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm', + '$ sf hardis:org:diagnose:unusedusers --days 60 --licenseidentifiers SFDC,AUL,AUL1', + '$ sf hardis:org:diagnose:unusedusers --days 60 --licensetypes all-crm --returnactiveusers', ]; //Comment default values to test the prompts - protected static flagsConfig = { - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - days: flags.number({ - char: "t", - description: "Extracts the users that have been inactive for the amount of days specified. In CI, default is 180 days", + days: Flags.integer({ + char: 't', + description: + 'Extracts the users that have been inactive for the amount of days specified. In CI, default is 180 days', }), - licensetypes: flags.enum({ - char: "l", - options: ["all", "all-crm", "all-paying"], - description: "Type of licenses to check. If set, do not use licenseidentifiers option. In CI, default is all-crm", + licensetypes: Flags.string({ + char: 'l', + options: ['all', 'all-crm', 'all-paying'], + description: 'Type of licenses to check. If set, do not use licenseidentifiers option. In CI, default is all-crm', }), - licenseidentifiers: flags.string({ - char: "i", + licenseidentifiers: Flags.string({ + char: 'i', description: - "Comma-separated list of license identifiers, in case licensetypes is not used.. Identifiers available at https://developer.salesforce.com/docs/atlas.en-us.object_reference.meta/object_reference/sforce_api_objects_userlicense.htm", + 'Comma-separated list of license identifiers, in case licensetypes is not used.. Identifiers available at https://developer.salesforce.com/docs/atlas.en-us.object_reference.meta/object_reference/sforce_api_objects_userlicense.htm', }), - returnactiveusers: flags.boolean({ + returnactiveusers: Flags.boolean({ default: false, - description: "Inverts the command by returning the active users", + description: 'Inverts the command by returning the active users', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected licenseTypesCorrespondances = { - all: "all", - "all-crm": "SFDC,AUL,AUL1,AULL_IGHT", - "all-paying": "SFDC,AUL,AUL1,AULL_IGHT,PID_Customer_Community,PID_Customer_Community_Login,PID_Partner_Community,PID_Partner_Community_Login", + all: 'all', + 'all-crm': 'SFDC,AUL,AUL1,AULL_IGHT', + 'all-paying': + 'SFDC,AUL,AUL1,AULL_IGHT,PID_Customer_Community,PID_Customer_Community_Login,PID_Partner_Community,PID_Partner_Community_Login', }; protected returnActiveUsers = false; protected debugMode = false; protected outputFile; protected outputFilesRes: any = {}; - protected lastNdays: number; - protected licenseTypes: string; - protected licenseIdentifiers: string; - protected users = []; + protected lastNdays: number | null; + protected licenseTypes: string | null; + protected licenseIdentifiers: string | null; + protected users: any[] = []; protected statusCode = 0; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; - this.returnActiveUsers = this.flags.returnactiveusers ?? false; - this.outputFile = this.flags.outputfile || null; - this.lastNdays = this.flags.days || null; - this.licenseIdentifiers = this.flags.licenseidentifiers || null; - this.licenseTypes = this.flags.licensetypes; + const { flags } = await this.parse(DiagnoseUnusedUsers); + this.debugMode = flags.debug || false; + this.returnActiveUsers = flags.returnactiveusers ?? false; + this.outputFile = flags.outputfile || null; + this.lastNdays = flags.days || null; + this.licenseIdentifiers = flags.licenseidentifiers || null; + this.licenseTypes = flags.licensetypes || null; // Calculate lastNdays to use await this.defineNumberOfInactiveDays(); @@ -121,21 +138,25 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co await this.defineLicenseIdentifiers(); // Retrieve the list of users who haven't logged in for a while - const conn = this.org.getConnection(); + const conn = flags['target-org'].getConnection(); uxLog( + "action", this, c.cyan( this.returnActiveUsers ? `Extracting active users on ${conn.instanceUrl} ...` - : `Extracting active users who haven't logged in for a while on ${conn.instanceUrl} ...`, - ), + : `Extracting active users who haven't logged in for a while on ${conn.instanceUrl} ...` + ) ); this.users = await this.listUsersFromLicenses(conn); // Generate output CSV file if (this.users.length > 0) { - this.outputFile = await generateReportPath(this.returnActiveUsers ? "active-users" : "unused-users", this.outputFile); - this.outputFilesRes = await generateCsvFile(this.users, this.outputFile); + this.outputFile = await generateReportPath( + this.returnActiveUsers ? 'active-users' : 'unused-users', + this.outputFile + ); + this.outputFilesRes = await generateCsvFile(this.users, this.outputFile, { fileTitle: "Inactive users found" }); } let summary; @@ -144,7 +165,7 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co summary = `${this.users.length} users have logged in ${conn.instanceUrl} in the last ${this.lastNdays} days`; } else { // Inactive users mode - const userSummaryInfo = this.users.length == 1 ? "user has" : "users have"; + const userSummaryInfo = this.users.length == 1 ? 'user has' : 'users have'; summary = `No unused users have been found`; if (this.users.length === 0) { summary = `All users have logged in to ${conn.instanceUrl} within the last ${this.lastNdays} days!`; @@ -153,7 +174,7 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co summary = `${this.users.length} active ${userSummaryInfo} not logged in to ${conn.instanceUrl} in the last ${this.lastNdays} days!`; } - if ((this.argv || []).includes("unusedusers")) { + if ((this.argv || []).includes('unusedusers')) { process.exitCode = this.statusCode; } } @@ -162,9 +183,10 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co await this.manageNotifications(this.users); if (this.users.length > 0 && !this.returnActiveUsers) { - uxLog(this, c.yellow(summary)); + uxLog("warning", this, c.yellow(summary)); + uxLogTable(this, this.users); } else { - uxLog(this, c.green(summary)); + uxLog("success", this, c.green(summary)); } // Return an object to be displayed with --json @@ -182,21 +204,23 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co // Ask user if interactive mode if (!this.licenseTypes && !isCI) { const licenseTypesResponse = await prompts({ - type: "select", - name: "licensetypes", - message: "Please select the type of licenses you want to detect ", + type: 'select', + name: 'licensetypes', + message: 'Please select the type of licenses you want to detect ', + description: 'Choose which categories of user licenses to analyze for unused accounts', + placeholder: 'Select license type', choices: [ - { value: "all", title: "All licenses types" }, - { value: `all-crm`, title: "Salesforce Licenses" }, - { value: `all-paying`, title: "Salesforce Licences + Experience + Other paying" }, + { value: 'all', title: 'All licenses types' }, + { value: `all-crm`, title: 'Salesforce Licenses' }, + { value: `all-paying`, title: 'Salesforce Licences + Experience + Other paying' }, ], }); this.licenseTypes = licenseTypesResponse.licensetypes; } else if (!this.licenseTypes) { - this.licenseTypes = "all-crm"; + this.licenseTypes = 'all-crm'; } // Get licenseIdentifiers from licenseType - this.licenseIdentifiers = this.licenseTypesCorrespondances[this.licenseTypes]; + this.licenseIdentifiers = this.licenseTypesCorrespondances[this.licenseTypes || '']; } } @@ -205,9 +229,11 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co if (!isCI) { // If manual mode and days not sent as parameter, prompt user const lastNdaysResponse = await prompts({ - type: "select", - name: "days", - message: "Please select the period to detect users.", + type: 'select', + name: 'days', + message: 'Please select the period to detect users.', + description: 'Choose how far back to look for user activity when determining if users are inactive', + placeholder: 'Select time period', choices: [ { title: `1 day`, value: 1 }, { title: `2 days`, value: 2 }, @@ -231,16 +257,18 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co private async listUsersFromLicenses(conn) { let whereConstraint = this.returnActiveUsers ? // Active users - `WHERE IsActive = true AND (` + `(LastLoginDate >= LAST_N_DAYS:${this.lastNdays} AND LastLoginDate != NULL)` + `)` + `WHERE IsActive = true AND (` + + `(LastLoginDate >= LAST_N_DAYS:${this.lastNdays} AND LastLoginDate != NULL)` + + `)` : // Inactive users - `WHERE IsActive = true AND (` + - `(LastLoginDate < LAST_N_DAYS:${this.lastNdays} AND LastLoginDate != NULL) OR ` + - `(CreatedDate < LAST_N_DAYS:${this.lastNdays} AND LastLoginDate = NULL)` + // Check also for users never used - `)`; + `WHERE IsActive = true AND (` + + `(LastLoginDate < LAST_N_DAYS:${this.lastNdays} AND LastLoginDate != NULL) OR ` + + `(CreatedDate < LAST_N_DAYS:${this.lastNdays} AND LastLoginDate = NULL)` + // Check also for users never used + `)`; // Add License constraint only if necessary - if (this.licenseTypes !== "all") { - const licenseIdentifierValues = this.licenseIdentifiers.split(","); - const licenseIdentifierCondition = licenseIdentifierValues.map((value) => `'${value}'`).join(","); + if (this.licenseTypes !== 'all') { + const licenseIdentifierValues = (this.licenseIdentifiers || '').split(','); + const licenseIdentifierCondition = licenseIdentifierValues.map((value) => `'${value}'`).join(','); whereConstraint += ` AND Profile.UserLicense.LicenseDefinitionKey IN (${licenseIdentifierCondition})`; } // Build & call Bulk API Query @@ -254,26 +282,29 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } private async manageNotifications(users: any[]) { + const { flags } = await this.parse(DiagnoseUnusedUsers); // Build notification - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; - let notifText = this.returnActiveUsers ? `No active user has logged in in ${orgMarkdown}` : `No inactive user has been found in ${orgMarkdown}`; + let notifSeverity: NotifSeverity = 'log'; + let notifText = this.returnActiveUsers + ? `No active user has logged in in ${orgMarkdown}` + : `No inactive user has been found in ${orgMarkdown}`; const notifDetailText = ``; - let attachments = []; + let attachments: any[] = []; if (users.length > 0) { - notifSeverity = this.returnActiveUsers ? "log" : "warning"; + notifSeverity = this.returnActiveUsers ? 'log' : 'warning'; notifText = this.returnActiveUsers - ? `${this.users.length} active users have logged in to ${orgMarkdown} within the last ${this.lastNdays} days.` - : `${this.users.length} active users have not logged in to ${orgMarkdown} within the last ${this.lastNdays} days.`; + ? `*${this.users.length}* active users have logged in to ${orgMarkdown} within the last ${this.lastNdays} days.` + : `*${this.users.length}* active users have not logged in to ${orgMarkdown} within the last ${this.lastNdays} days.`; attachments = [{ text: notifDetailText }]; } const metrics = this.returnActiveUsers ? { ActiveUsers: this.users.length } : { UnusedUsers: this.users.length }; /* jscpd:ignore-start */ // Send notifications - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: this.returnActiveUsers ? "ACTIVE_USERS" : "UNUSED_USERS", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: this.returnActiveUsers ? 'ACTIVE_USERS' : 'UNUSED_USERS', text: notifText, attachments: attachments, buttons: notifButtons, diff --git a/src/commands/hardis/org/files/export.ts b/src/commands/hardis/org/files/export.ts index 4151476c1..41afc7244 100644 --- a/src/commands/hardis/org/files/export.ts +++ b/src/commands/hardis/org/files/export.ts @@ -1,96 +1,141 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { uxLog } from "../../../../common/utils"; -import { FilesExporter, getFilesWorkspaceDetail, promptFilesExportConfiguration, selectFilesWorkspace } from "../../../../common/utils/filesUtils"; -import { prompts } from "../../../../common/utils/prompts"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class FilesExport extends SfdxCommand { - public static title = "Export files"; - - public static description = `Export file attachments from a Salesforce org - -See article below +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import path from 'path'; +import { humanizeObjectKeys, uxLog, uxLogTable, isCI } from '../../../../common/utils/index.js'; +import { + FilesExporter, + getFilesWorkspaceDetail, + promptFilesExportConfiguration, + selectFilesWorkspace, +} from '../../../../common/utils/filesUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class FilesExport extends SfCommand { + public static title = 'Export files'; + + public static description = ` +## Command Behavior + +**Exports file attachments (ContentVersion, Attachment) from a Salesforce org based on a predefined configuration.** + +This command enables the mass download of files associated with Salesforce records, providing a robust solution for backing up files, migrating them to other systems, or integrating them with external document management solutions. + +Key functionalities: + +- **Configuration-Driven Export:** Relies on an \`export.json\` file within a designated file export project to define the export criteria, including the SOQL query for parent records, file types to export, output naming conventions, and file size filtering. +- **File Size Filtering:** Supports minimum file size filtering via the \`fileSizeMin\` configuration parameter (in KB). Files smaller than the specified size will be skipped during export. +- **File Validation:** After downloading each file, validates the integrity by: + - **Checksum Validation:** For ContentVersion files, compares MD5 checksum with Salesforce's stored checksum + - **Size Validation:** For both ContentVersion and Attachment files, verifies actual file size matches expected size + - **Status Tracking:** Files are categorized with specific statuses: \`success\` (valid files), \`failed\` (download errors), \`skipped\` (filtered files), \`invalid\` (downloaded but failed validation) + - All validation results are logged in the CSV export log for audit purposes +- **Resume/Restart Capability:** + - **Resume Mode:** When \`--resume\` flag is used (default in CI environments), checks existing downloaded files for validity. Valid files are skipped, invalid files are re-downloaded. + - **Restart Mode:** When resume is disabled, clears the output folder and starts a fresh export. + - **Interactive Mode:** When existing files are found and \`--resume\` is not explicitly specified (non-CI environments), prompts the user to choose between resume or restart. +- **Interactive Project Selection:** If the file export project path is not provided via the \`--path\` flag, it interactively prompts the user to select one. +- **Configurable Export Options:** Allows overriding default export settings such as \`chunksize\` (number of records processed in a batch), \`polltimeout\` (timeout for Bulk API calls), and \`startchunknumber\` (to resume a failed export). +- **Support for ContentVersion and Attachment:** Handles both modern Salesforce Files (ContentVersion) and older Attachments. + +See this article for a practical example: [![How to mass download notes and attachments files from a Salesforce org](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-mass-download.jpg)](https://nicolas.vuillamy.fr/how-to-mass-download-notes-and-attachments-files-from-a-salesforce-org-83a028824afd) + +
+Technical explanations + +The command's technical implementation involves: + +- **FilesExporter Class:** The core logic is encapsulated within the \`FilesExporter\` class, which orchestrates the entire export process. +- **SOQL Queries (Bulk API):** It uses Salesforce Bulk API queries to efficiently retrieve large volumes of parent record IDs and file metadata, including checksums and file sizes. +- **File Download:** Downloads the actual file content from Salesforce. +- **File Validation:** After each successful download, validates file integrity by comparing checksums (ContentVersion) and file sizes (both ContentVersion and Attachment) against Salesforce metadata. +- **Resume Logic:** In resume mode, checks for existing files before downloading, validates their integrity, and only re-downloads invalid or missing files. This enables efficient recovery from interrupted exports. +- **File System Operations:** Writes the downloaded files to the local file system, organizing them into folders based on the configured naming conventions. +- **Configuration Loading:** Reads the \`export.json\` file to get the export configuration. It also allows for interactive overriding of these settings. +- **Interactive Prompts:** Uses \`selectFilesWorkspace\` to allow the user to choose a file export project, \`promptFilesExportConfiguration\` for customizing export options, and prompts for resume/restart choice when existing files are found. +- **Error Handling:** Includes mechanisms to handle potential errors during the export process, such as network issues, API limits, and file validation failures. Each file is assigned a specific status (\`success\`, \`failed\`, \`skipped\`, \`invalid\`) for comprehensive tracking and troubleshooting. +
`; - public static examples = ["$ sfdx hardis:org:files:export"]; + public static examples = ['$ sf hardis:org:files:export']; - protected static flagsConfig = { - path: flags.string({ - char: "p", - description: "Path to the file export project", + public static flags: any = { + path: Flags.string({ + char: 'p', + description: 'Path to the file export project', }), - chunksize: flags.number({ - char: "c", - description: "Number of records to add in a chunk before it is processed", + chunksize: Flags.integer({ + char: 'c', + description: 'Number of records to add in a chunk before it is processed', default: 1000, }), - polltimeout: flags.number({ - char: "t", - description: "Timeout in MS for Bulk API calls", + polltimeout: Flags.integer({ + char: 't', + description: 'Timeout in MS for Bulk API calls', default: 300000, }), - startchunknumber: flags.number({ - char: "s", - description: "Chunk number to start from", + startchunknumber: Flags.integer({ + char: 's', + description: 'Chunk number to start from', default: 0, }), - debug: flags.boolean({ - char: "d", + resume: Flags.boolean({ + char: 'r', + description: 'Resume previous export by checking existing files (default in CI)', + default: false, + }), + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; /* jscpd:ignore-end */ public async run(): Promise { - let filesPath = this.flags.path || null; - const recordsChunkSize = this.flags.chunksize; - const pollTimeout = this.flags.polltimeout; - const startChunkNumber = this.flags.startchunknumber || 0; - //const debugMode = this.flags.debug || false; + const { flags } = await this.parse(FilesExport); + let filesPath = flags.path || null; + const recordsChunkSize = flags.chunksize; + const pollTimeout = flags.polltimeout; + const startChunkNumber = flags.startchunknumber || 0; + const resumeExport = flags.resume; + //const debugMode = flags.debug || false; const exportOptions: any = { pollTimeout: pollTimeout, recordsChunkSize: recordsChunkSize, startChunkNumber: startChunkNumber, + resumeExport: resumeExport, }; // Identify files workspace if not defined if (filesPath == null) { - filesPath = await selectFilesWorkspace({ selectFilesLabel: "Please select a files workspace to EXPORT" }); - const exportConfigInitial = await getFilesWorkspaceDetail(filesPath); + filesPath = await selectFilesWorkspace({ selectFilesLabel: 'Please select a files workspace to EXPORT' }); + const exportConfigInitial: any = (await getFilesWorkspaceDetail(filesPath || '')) || {}; // Request to use defaut config or to override it for this run const defaultConfigRes = await prompts({ - type: "confirm", - message: c.cyanBright("Do you want to use default configuration for " + exportConfigInitial.label + " ?"), + type: 'confirm', + message: c.cyanBright('Do you want to use default configuration for ' + exportConfigInitial.label + ' ?'), + description: 'Use the saved configuration settings or customize them for this export operation', }); if (defaultConfigRes.value !== true) { const exportConfig = await promptFilesExportConfiguration(exportConfigInitial, true); @@ -98,13 +143,66 @@ See article below } } - // Export files from org + // Display final export configuration + let exportConfigFinal: any = (await getFilesWorkspaceDetail(filesPath || '')) || {}; + if (exportOptions.exportConfig) { + // Merge with existing config + exportConfigFinal = Object.assign(exportConfigFinal, exportOptions.exportConfig); + } + const exportConfigHuman = humanizeObjectKeys(exportConfigFinal || {}); + uxLog("action", this, c.cyan(`Export configuration has been defined (see details below)`)); + uxLogTable(this, exportConfigHuman); + + // Check for existing files and prompt user if needed + let finalResumeExport = resumeExport; + if (!isCI && !resumeExport) { + // User didn't explicitly set --resume and we're not in CI + const exportFolder = path.join(filesPath || '', 'export'); + if (fs.existsSync(exportFolder)) { + try { + const files = await fs.readdir(exportFolder); + const hasFiles = files.length > 0; + + if (hasFiles) { + uxLog("action", this, c.yellow(`Found existing files in output folder: ${exportFolder}`)); + const resumePrompt = await prompts({ + type: 'confirm', + message: c.cyanBright('Do you want to resume the previous export (validate and skip existing valid files)?'), + description: 'Choose "Yes" to resume (skip valid existing files) or "No" to restart (clear folder and download all files)', + }); + finalResumeExport = resumePrompt.value === true; + + if (finalResumeExport) { + uxLog("log", this, c.cyan('Resume mode selected: existing files will be validated and skipped if valid')); + } else { + uxLog("log", this, c.yellow('Restart mode selected: output folder will be cleared')); + } + } + } catch (error) { + uxLog("warning", this, c.yellow(`Could not check existing files in ${exportFolder}: ${(error as Error).message}`)); + } + } + } + + // Update export options with final resume decision + exportOptions.resumeExport = finalResumeExport; - const exportResult = await new FilesExporter(filesPath, this.org.getConnection(), exportOptions, this).processExport(); + // Export files from org + const exportResult = await new FilesExporter( + filesPath || '', + flags['target-org'].getConnection(), + exportOptions, + this + ).processExport(); // Output message - const message = `Successfully exported files from project ${c.green(filesPath)} from org ${c.green(this.org.getUsername())}`; - uxLog(this, c.cyan(message)); + const message = `Successfully exported files from project ${c.green(filesPath)} from org ${c.green( + flags['target-org'].getUsername() + )}`; + uxLog("action", this, c.cyan(message)); + + const statsTable = humanizeObjectKeys(exportResult.stats); + uxLogTable(this, statsTable); return { outputString: message, exportResult: exportResult }; } diff --git a/src/commands/hardis/org/files/import.ts b/src/commands/hardis/org/files/import.ts index 83a9f1f44..b97b4a8ec 100644 --- a/src/commands/hardis/org/files/import.ts +++ b/src/commands/hardis/org/files/import.ts @@ -1,79 +1,94 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { isCI, uxLog } from "../../../../common/utils"; -import { FilesImporter, selectFilesWorkspace } from "../../../../common/utils/filesUtils"; -import { prompts } from "../../../../common/utils/prompts"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { humanizeObjectKeys, isCI, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { FilesImporter, selectFilesWorkspace } from '../../../../common/utils/filesUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class FilesImport extends SfCommand { + public static title = 'Import files'; -export default class FilesImport extends SfdxCommand { - public static title = "Import files"; + public static description = ` +This command facilitates the mass upload of files into Salesforce, allowing you to populate records with associated documents, images, or other file types. It's a crucial tool for data migration, content seeding, or synchronizing external file repositories with Salesforce. - public static description = `Import file attachments into a Salesforce org +Key functionalities: -See article below to see how to Export them. +- **Configuration-Driven Import:** Relies on an \`export.json\` file within a designated file export project (created using \`sf hardis:org:configure:files\`) to determine which files to import and how they should be associated with Salesforce records. +- **Interactive Project Selection:** If the file import project path is not provided via the \`--path\` flag, it interactively prompts the user to select one. +- **Overwrite Option:** The \`--overwrite\` flag allows you to replace existing files in Salesforce with local versions that have the same name. Be aware that this option doubles the number of API calls used. +- **Support for ContentVersion and Attachment:** Handles both modern Salesforce Files (ContentVersion) and older Attachments. + +See this article for how to export files, which is often a prerequisite for importing: [![How to mass download notes and attachments files from a Salesforce org](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-mass-download.jpg)](https://nicolas.vuillamy.fr/how-to-mass-download-notes-and-attachments-files-from-a-salesforce-org-83a028824afd) + +
+Technical explanations + +The command's technical implementation involves: + +- **FilesImporter Class:** The core logic is encapsulated within the \`FilesImporter\` class, which orchestrates the entire import process. +- **File System Scan:** Scans the local file system within the configured project directory to identify files for import. +- **Salesforce API Interaction:** Uses Salesforce APIs (e.g., ContentVersion, Attachment) to upload files and associate them with records. +- **Configuration Loading:** Reads the \`export.json\` file to get the import configuration, including SOQL queries to identify parent records for file association. +- **Interactive Prompts:** Uses \`selectFilesWorkspace\` to allow the user to choose a file import project and \`prompts\` for confirming the overwrite behavior. +- **Error Handling:** Includes mechanisms to handle potential errors during the import process, such as API limits or file upload failures. +
`; - public static examples = ["$ sfdx hardis:org:files:import"]; + public static examples = ['$ sf hardis:org:files:import']; - protected static flagsConfig = { - path: flags.string({ - char: "p", - description: "Path to the file export project", + public static flags: any = { + path: Flags.string({ + char: 'p', + description: 'Path to the file export project', }), - overwrite: flags.boolean({ - char: "o", - description: "Override existing files (doubles the number of API calls)", + overwrite: Flags.boolean({ + char: 'f', + description: 'Override existing files (doubles the number of API calls)', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected handleOverwrite; /* jscpd:ignore-end */ public async run(): Promise { - let filesPath = this.flags.path || null; - this.handleOverwrite = this.flags?.overwrite === true; + const { flags } = await this.parse(FilesImport); + let filesPath = flags.path || null; + this.handleOverwrite = flags?.overwrite === true; // Identify files workspace if not defined if (filesPath == null) { - filesPath = await selectFilesWorkspace({ selectFilesLabel: "Please select a files workspace to IMPORT" }); + filesPath = await selectFilesWorkspace({ selectFilesLabel: 'Please select a files workspace to IMPORT' }); } if (!isCI) { const handleOverwriteRes = await prompts({ - type: "confirm", - name: "value", - message: "Do you want to overwrite the existing files with the same name ? (doubles the number of used API calls)", + type: 'confirm', + name: 'value', + message: + 'Do you want to overwrite the existing files with the same name ?', + description: 'Replace existing files in Salesforce with local versions (doubles the number of API calls used)', }); this.handleOverwrite = handleOverwriteRes.value; } @@ -81,11 +96,21 @@ See article below to see how to Export them. const importOptions: any = { handleOverwrite: this.handleOverwrite }; // Import files into org - const importResult = await new FilesImporter(filesPath, this.org.getConnection(), importOptions, this).processImport(); + const importResult = await new FilesImporter( + filesPath || '', + flags['target-org'].getConnection(), + importOptions, + this + ).processImport(); // Output message - const message = `Successfully imported files from project ${c.green(filesPath)} from org ${c.green(this.org.getUsername())}`; - uxLog(this, c.cyan(message)); + const message = `Successfully imported files from project ${c.green(filesPath)} to org ${c.green( + flags['target-org'].getUsername() + )}`; + uxLog("action", this, c.cyan(message)); + + const statsTable = humanizeObjectKeys(importResult.stats); + uxLogTable(this, statsTable); return { outputString: message, importResult: importResult }; } diff --git a/src/commands/hardis/org/fix/listviewmine.ts b/src/commands/hardis/org/fix/listviewmine.ts index b52f4cda8..4b5cb57c5 100644 --- a/src/commands/hardis/org/fix/listviewmine.ts +++ b/src/commands/hardis/org/fix/listviewmine.ts @@ -1,21 +1,17 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { uxLog } from "../../../../common/utils"; -import { restoreListViewMine } from "../../../../common/utils/orgConfigUtils"; -import { getConfig } from "../../../../config"; -import * as c from "chalk"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { uxLog } from '../../../../common/utils/index.js'; +import { restoreListViewMine } from '../../../../common/utils/orgConfigUtils.js'; +import { getConfig } from '../../../../config/index.js'; +import c from 'chalk'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class FixListViewMine extends SfdxCommand { - public static title = "Fix listviews with "; +export default class FixListViewMine extends SfCommand { + public static title = 'Fix listviews with '; public static description = `Fix listviews whose scope Mine has been replaced by Everything @@ -69,38 +65,31 @@ ENV PUPPETEER_EXECUTABLE_PATH="$\\{CHROMIUM_PATH}" // remove \\ before { `; public static examples = [ - "$ sfdx hardis:org:fix:listviewmine", - "$ sfdx hardis:org:fix:listviewmine --listviews Opportunity:MySubscriptions,Account:MyActivePartners", + '$ sf hardis:org:fix:listviewmine', + '$ sf hardis:org:fix:listviewmine --listviews Opportunity:MySubscriptions,Account:MyActivePartners', ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - listviews: flags.string({ - char: "l", + public static flags: any = { + listviews: Flags.string({ + char: 'l', description: `Comma-separated list of listviews following format Object:ListViewName\nExample: Contact:MyContacts,Contact:MyActiveContacts,Opportunity:MYClosedOpportunities`, }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected debugMode = false; @@ -108,21 +97,24 @@ ENV PUPPETEER_EXECUTABLE_PATH="$\\{CHROMIUM_PATH}" // remove \\ before { /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(FixListViewMine); + this.debugMode = flags.debug || false; - uxLog(this, c.cyan("Setting back listviews to Mine instead of Everything...")); + uxLog("action", this, c.cyan('Setting back listviews to Mine instead of Everything...')); // Identify listviews to process - if (this.flags.listviews) { + if (flags.listviews) { // Use input flag - this.listViewsStrings = this.flags.listviews.split(","); + this.listViewsStrings = flags.listviews.split(','); } else { // Use property listViewsToSetToMine from .sfdx-hardis.yml config file - const config = await getConfig("project"); + const config = await getConfig('project'); this.listViewsStrings = config.listViewsToSetToMine || []; } - const result = await restoreListViewMine(this.listViewsStrings, this.org.getConnection(), { debug: this.debugMode }); + const result = await restoreListViewMine(this.listViewsStrings, flags['target-org'].getConnection(), { + debug: this.debugMode, + }); return result; } } diff --git a/src/commands/hardis/org/generate/packagexmlfull.ts b/src/commands/hardis/org/generate/packagexmlfull.ts index 94ebc297c..6b1a5c470 100644 --- a/src/commands/hardis/org/generate/packagexmlfull.ts +++ b/src/commands/hardis/org/generate/packagexmlfull.ts @@ -1,57 +1,78 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as path from "path"; -import { isCI, uxLog } from "../../../../common/utils"; -import { getReportDirectory } from "../../../../config"; -import { buildOrgManifest } from "../../../../common/utils/deployUtils"; -import { promptOrgUsernameDefault } from "../../../../common/utils/orgUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Connection, Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import * as path from 'path'; +import { isCI, uxLog } from '../../../../common/utils/index.js'; +import { getReportDirectory } from '../../../../config/index.js'; +import { buildOrgManifest } from '../../../../common/utils/deployUtils.js'; +import { promptOrgUsernameDefault } from '../../../../common/utils/orgUtils.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class GeneratePackageXmlFull extends SfCommand { + public static title = 'Generate Full Org package.xml'; -export default class GeneratePackageXmlFull extends SfdxCommand { - public static title = "Generate Full Org package.xml"; + public static description = ` +## Command Behavior - public static description = "Generates full org package.xml, including managed items"; +**Generates a comprehensive \`package.xml\` file for a Salesforce org, including all metadata components, even managed ones.** + +This command is essential for various Salesforce development and administration tasks, especially when you need a complete snapshot of an org's metadata. It goes beyond typical source tracking by including managed package components, which is crucial for understanding the full metadata footprint of an org. + +Key functionalities: + +- **Full Org Metadata Retrieval:** Connects to a specified Salesforce org (or prompts for one if not provided) and retrieves a complete list of all metadata types and their members. +- **Managed Package Inclusion:** Unlike standard source retrieval, this command explicitly includes metadata from managed packages, providing a truly comprehensive \`package.xml\`. +- **Customizable Output:** Allows you to specify the output file path for the generated \`package.xml\`. +- **Interactive Org Selection:** If no target org is specified, it interactively prompts the user to choose an org. (or use --no-prompt to skip this step) + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce Metadata API Interaction:** It leverages the Salesforce Metadata API to list all available metadata types and then retrieve all components for each type. +- **\`buildOrgManifest\` Utility:** The core logic for querying the org's metadata and constructing the \`package.xml\` is encapsulated within the \`buildOrgManifest\` utility function. +- **XML Generation:** It dynamically builds the XML structure of the \`package.xml\` file, including the \`types\` and \`members\` elements for all retrieved metadata. +- **File System Operations:** It writes the generated \`package.xml\` file to the specified output path. +- **Interactive Prompts:** Uses \`promptOrgUsernameDefault\` to guide the user in selecting the target Salesforce org. +
+`; public static examples = [ - "$ sfdx hardis:org:generate:packagexmlfull", - "$ sfdx hardis:org:generate:packagexmlfull --outputfile /tmp/packagexmlfull.xml", - "$ sfdx hardis:org:generate:packagexmlfull --targetusername nico@example.com", + '$ sf hardis:org:generate:packagexmlfull', + '$ sf hardis:org:generate:packagexmlfull --outputfile /tmp/packagexmlfull.xml', + '$ sf hardis:org:generate:packagexmlfull --target-org nico@example.com', ]; - protected static flagsConfig = { - outputfile: flags.string({ - description: "Output package.xml file", + public static flags: any = { + outputfile: Flags.string({ + description: 'Output package.xml file', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + "no-prompt": Flags.boolean({ + char: 'n', + description: "Do not prompt for org username, use the default one", + default: false, }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; protected outputFile; @@ -59,30 +80,42 @@ export default class GeneratePackageXmlFull extends SfdxCommand { /* jscpd:ignore-end */ public async run(): Promise { - this.outputFile = this.flags.outputfile || null; - this.debugMode = this.flags.debugMode || false; + const { flags } = await this.parse(GeneratePackageXmlFull); + this.outputFile = flags.outputfile || null; + this.debugMode = flags.debug || false; + const noPrompt = flags['no-prompt'] ?? false; // Select org that will be used to export records - let conn = null; - let orgUsername = this.org.getUsername(); - if (!isCI) { + let conn: Connection | null = null; + let orgUsername = flags['target-org'].getUsername(); + if (orgUsername && (isCI || noPrompt)) { + conn = flags['target-org'].getConnection(); + } + else { const prevOrgUsername = orgUsername; - orgUsername = await promptOrgUsernameDefault(this, orgUsername, { devHub: false, setDefault: false }); + orgUsername = await promptOrgUsernameDefault(this, orgUsername || '', { devHub: false, setDefault: false }); if (prevOrgUsername === orgUsername) { - conn = this.org.getConnection(); + conn = flags['target-org'].getConnection(); } } - uxLog(this, c.cyan(`Generating full package xml for ${orgUsername}`)); + uxLog("action", this, c.cyan(`Generating full package xml for ${orgUsername}`)); // Calculate default output file if not provided as input if (this.outputFile == null) { const reportDir = await getReportDirectory(); - this.outputFile = path.join(reportDir, "org-package-xml-full.xml"); + this.outputFile = path.join(reportDir, 'org-package-xml-full.xml'); } await buildOrgManifest(orgUsername, this.outputFile, conn); - uxLog(this, c.cyan(`Generated full package.xml for ${orgUsername} at location ${c.green(this.outputFile)}`)); + uxLog("action", this, c.cyan(`Generated full package.xml for ${orgUsername}`)); + uxLog("log", this, c.grey(`Output file: ${c.green(this.outputFile)}`)); + + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(this.outputFile, 'Full Org package.xml', "report"); + } else { + WebSocketClient.requestOpenFile(this.outputFile); + } // Return an object to be displayed with --json return { outputString: `Generated full package.xml for ${orgUsername}`, outputFile: this.outputFile }; diff --git a/src/commands/hardis/org/monitor/all.ts b/src/commands/hardis/org/monitor/all.ts index c94f26d15..059790406 100644 --- a/src/commands/hardis/org/monitor/all.ts +++ b/src/commands/hardis/org/monitor/all.ts @@ -1,20 +1,115 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { execCommand, uxLog } from "../../../../common/utils"; -import { getConfig, getEnvVar } from "../../../../config"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { execCommand, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { CONSTANTS, getConfig, getEnvVar } from '../../../../config/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class MonitorAll extends SfCommand { + public static title = 'Monitor org'; -export default class MonitorAll extends SfdxCommand { - public static title = "Monitor org"; + public static monitoringCommandsDefault = [ + { + key: 'AUDIT_TRAIL', + title: 'Detect suspect setup actions in major org', + command: 'sf hardis:org:diagnose:audittrail', + frequency: 'daily', + }, + { + key: 'LEGACY_API', + title: 'Detect calls to deprecated API versions', + command: 'sf hardis:org:diagnose:legacyapi', + frequency: 'daily', + }, + { + key: 'ORG_LIMITS', + title: 'Detect if org limits are close to be reached', + command: 'sf hardis:org:monitor:limits', + frequency: 'daily', + }, + { + key: 'UNSECURED_CONNECTED_APPS', + title: 'Detect unsecured Connected Apps in an org', + command: 'sf hardis:org:diagnose:unsecure-connected-apps', + frequency: 'daily', + }, + { + key: 'LICENSES', + title: 'Extract licenses information', + command: 'sf hardis:org:diagnose:licenses', + frequency: 'weekly', + }, + { + key: 'LINT_ACCESS', + title: 'Detect custom elements with no access rights defined in permission sets', + command: 'sf hardis:lint:access', + frequency: 'weekly', + }, + { + key: 'UNUSED_LICENSES', + title: 'Detect permission set licenses that are assigned to users that do not need them', + command: 'sf hardis:org:diagnose:unusedlicenses', + frequency: 'weekly', + }, + { + key: 'UNUSED_USERS', + title: 'Detect active users without recent logins', + command: 'sf hardis:org:diagnose:unusedusers', + frequency: 'weekly', + }, + { + key: 'ACTIVE_USERS', + title: 'Detect active users with recent logins', + command: 'sf hardis:org:diagnose:unusedusers --returnactiveusers', + frequency: 'weekly', + }, + { + key: 'ORG_INFO', + title: 'Get org info + SF instance info + next major upgrade date', + command: 'sf hardis:org:diagnose:instanceupgrade', + frequency: 'weekly', + }, + { + key: 'RELEASE_UPDATES', + title: 'Gather warnings about incoming and overdue Release Updates', + command: 'sf hardis:org:diagnose:releaseupdates', + frequency: 'weekly', + }, + { + key: 'UNUSED_METADATAS', + title: 'Detect custom labels and custom permissions that are not in use', + command: 'sf hardis:lint:unusedmetadatas', + frequency: 'weekly', + }, + { + key: 'UNUSED_APEX_CLASSES', + title: 'Detect unused Apex classes in an org', + command: 'sf hardis:org:diagnose:unused-apex-classes', + frequency: 'weekly', + }, + { + key: 'CONNECTED_APPS', + title: 'Detect unused Connected Apps in an org', + command: 'sf hardis:org:diagnose:unused-connected-apps', + frequency: 'weekly', + }, + { + key: 'METADATA_STATUS', + title: 'Detect inactive metadata', + command: 'sf hardis:lint:metadatastatus', + frequency: 'weekly', + }, + { + key: 'MISSING_ATTRIBUTES', + title: 'Detect missing description on custom field', + command: 'sf hardis:lint:missingattributes', + frequency: 'weekly', + }, + ]; public static description = `Monitor org, generate reports and sends notifications @@ -35,49 +130,45 @@ Example in env var: MONITORING_DISABLE=METADATA_STATUS,MISSING_ATTRIBUTES,UNUSED_METADATAS \`\`\` -A [default list of monitoring commands](https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/#monitoring-commands) is used, if you want to override it you can define property **monitoringCommands** in your .sfdx-hardis.yml file +A [default list of monitoring commands](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-home/#monitoring-commands) is used, if you want to override it you can define property **monitoringCommands** in your .sfdx-hardis.yml file Example: \`\`\`yaml monitoringCommands: - title: My Custom command - command: sfdx my:custom:command + command: sf my:custom:command - title: My Custom command 2 - command: sfdx my:other:custom:command + command: sf my:other:custom:command \`\`\` You can force the daily run of all commands by defining env var \`MONITORING_IGNORE_FREQUENCY=true\` +The default list of commands is the following: + +${this.getDefaultCommandsMarkdown()} + `; - public static examples = ["$ sfdx hardis:org:monitor:all"]; + public static examples = ['$ sf hardis:org:monitor:all']; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; - - // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials"]; + public static requiresProject = true; // Trigger notification(s) to MsTeams channel protected static triggerNotification = true; @@ -86,138 +177,97 @@ You can force the daily run of all commands by defining env var \`MONITORING_IGN /* jscpd:ignore-end */ + public static getDefaultCommandsMarkdown() { + const mdLines = [ + "| Key | Description | Command | Frequency |", + "| :---: | :---- | :---- | :-----: |", + + ]; + for (const cmd of MonitorAll.monitoringCommandsDefault) { + const commandDocUrl = `${CONSTANTS.DOC_URL_ROOT}/${cmd.command.split(" ")[1].replaceAll(":", "/")}`; + mdLines.push(`| [${cmd.key}](${commandDocUrl}) | ${cmd.title} | [${cmd.command}](${commandDocUrl}) | ${cmd.frequency} |`); + } + return mdLines.join("\n"); + } + public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(MonitorAll); + this.debugMode = flags.debug || false; // Build target org full manifest - uxLog(this, c.cyan("Running monitoring scripts for org " + c.bold(this.org.getConnection().instanceUrl)) + " ..."); - - const monitoringCommandsDefault = [ - { - key: "AUDIT_TRAIL", - title: "Detect suspect setup actions in major org", - command: "sfdx hardis:org:diagnose:audittrail", - frequency: "daily", - }, - { - key: "LEGACY_API", - title: "Detect calls to deprecated API versions", - command: "sfdx hardis:org:diagnose:legacyapi", - frequency: "daily", - }, - { - key: "ORG_LIMITS", - title: "Detect if org limits are close to be reached", - command: "sfdx hardis:org:monitor:limits", - frequency: "daily", - }, - { - key: "LICENSES", - title: "Extract licenses information", - command: "sfdx hardis:org:diagnose:licenses", - frequency: "weekly", - }, - { - key: "LINT_ACCESS", - title: "Detect custom elements with no access rights defined in permission sets", - command: "sfdx hardis:lint:access", - frequency: "weekly", - }, - { - key: "UNUSED_LICENSES", - title: "Detect permission set licenses that are assigned to users that do not need them", - command: "sfdx hardis:org:diagnose:unusedlicenses", - frequency: "weekly", - }, - { - key: "UNUSED_USERS", - title: "Detect active users without recent logins", - command: "sfdx hardis:org:diagnose:unusedusers", - frequency: "weekly", - }, - { - key: "ACTIVE_USERS", - title: "Detect active users with recent logins", - command: "sfdx hardis:org:diagnose:unusedusers --returnactiveusers", - frequency: "weekly", - }, - { - key: "ORG_INFO", - title: "Get org info + SF instance info + next major upgrade date", - command: "sfdx hardis:org:diagnose:instanceupgrade", - frequency: "weekly", - }, - { - key: "UNUSED_METADATAS", - title: "Detect custom labels and custom permissions that are not in use", - command: "sfdx hardis:lint:unusedmetadatas", - frequency: "weekly", - }, - { - key: "METADATA_STATUS", - title: "Detect inactive metadata", - command: "sfdx hardis:lint:metadatastatus", - frequency: "weekly", - }, - { - key: "MISSING_ATTRIBUTES", - title: "Detect missing description on custom field", - command: "sfdx hardis:lint:missingattributes", - frequency: "weekly", - }, - ]; - const config = await getConfig("user"); - const commands = monitoringCommandsDefault.concat(config.monitoringCommands || []); - const monitoringDisable = config.monitoringDisable ?? (process.env?.MONITORING_DISABLE ? process.env.MONITORING_DISABLE.split(",") : []); + uxLog( + "action", + this, + c.cyan('Running monitoring scripts for org ' + c.bold(flags['target-org'].getConnection().instanceUrl)) + ' ...' + ); + + const config = await getConfig('user'); + const commands = MonitorAll.monitoringCommandsDefault.concat(config.monitoringCommands || []); + const monitoringDisable = + config.monitoringDisable ?? (process.env?.MONITORING_DISABLE ? process.env.MONITORING_DISABLE.split(',') : []); let success = true; - const commandsSummary = []; + const commandsSummary: any[] = []; for (const command of commands) { if (monitoringDisable.includes(command.key)) { - uxLog(this, c.grey(`Skipped command ${c.bold(command.key)} according to custom configuration`)); + uxLog("log", this, c.grey(`Skipped command ${c.bold(command.key)} according to custom configuration`)); continue; } - if (command?.frequency === "weekly" && new Date().getDay() !== 6 && getEnvVar("MONITORING_IGNORE_FREQUENCY") !== "true") { - uxLog(this, c.grey(`Skipped command ${c.bold(command.key)} as its frequency is defined as weekly and we are not Saturday`)); + if ( + command?.frequency === 'weekly' && + new Date().getDay() !== 6 && + getEnvVar('MONITORING_IGNORE_FREQUENCY') !== 'true' + ) { + uxLog( + "log", + this, + c.grey(`Skipped command ${c.bold(command.key)} as its frequency is defined as weekly and we are not Saturday`) + ); continue; } // Run command - uxLog(this, c.cyan(`Running monitoring command ${c.bold(command.title)} (key: ${c.bold(command.key)})`)); + uxLog("action", this, c.cyan(`Running monitoring command ${c.bold(command.title)} (key: ${c.bold(command.key)})`)); try { const execCommandResult = await execCommand(command.command, this, { fail: false, output: true }); if (execCommandResult.status === 0) { - uxLog(this, c.green(`Command ${c.bold(command.title)} has been run successfully`)); + uxLog("success", this, c.green(`Command ${c.bold(command.title)} has been run successfully`)); } else { success = false; - uxLog(this, c.yellow(`Command ${c.bold(command.title)} has failed`)); + uxLog("warning", this, c.yellow(`Command ${c.bold(command.title)} has failed`)); } commandsSummary.push({ title: command.title, - status: execCommandResult.status === 0 ? "success" : "failure", + status: execCommandResult.status === 0 ? 'success' : 'failure', command: command.command, }); } catch (e) { // Handle unexpected failure success = false; - uxLog(this, c.yellow(`Command ${c.bold(command.title)} has failed !\n${e.message}`)); + uxLog("warning", this, c.yellow(`Command ${c.bold(command.title)} has failed !\n${(e as Error).message}`)); commandsSummary.push({ title: command.title, - status: "error", + status: 'error', command: command.command, }); } } - uxLog(this, c.cyan("Summary of monitoring scripts")); - console.table(commandsSummary); - uxLog(this, c.cyan("You can check details in reports in Job Artifacts")); + uxLog("action", this, c.cyan('Summary of monitoring scripts')); + uxLogTable(this, commandsSummary); + uxLog("log", this, c.grey('You can check details in reports in Job Artifacts')); - uxLog(this, c.yellow("To know more about sfdx-hardis monitoring, please check https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/")); + uxLog( + "warning", + this, + c.yellow( + `To know more about sfdx-hardis monitoring, please check ${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-home/` + ) + ); // Exit code is 1 if monitoring detected stuff if (success === false) { process.exitCode = 1; } - return { outputString: "Monitoring processed on org " + this.org.getConnection().instanceUrl }; + return { outputString: 'Monitoring processed on org ' + flags['target-org'].getConnection().instanceUrl }; } } diff --git a/src/commands/hardis/org/monitor/backup.ts b/src/commands/hardis/org/monitor/backup.ts index e38c2f84c..02fd5ece5 100644 --- a/src/commands/hardis/org/monitor/backup.ts +++ b/src/commands/hardis/org/monitor/backup.ts @@ -1,80 +1,169 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { buildOrgManifest } from "../../../../common/utils/deployUtils"; -import { execCommand, filterPackageXml, uxLog } from "../../../../common/utils"; -import { MetadataUtils } from "../../../../common/metadata-utils"; -import { CONSTANTS } from "../../../../config"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; -import { MessageAttachment } from "@slack/web-api"; -import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from "../../../../common/utils/notifUtils"; -import { generateCsvFile, generateReportPath } from "../../../../common/utils/filesUtils"; -import { parsePackageXmlFile, writePackageXmlFile } from "../../../../common/utils/xmlUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class MonitorBackup extends SfdxCommand { - public static title = "Backup DX sources"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { buildOrgManifest } from '../../../../common/utils/deployUtils.js'; +import { execCommand, filterPackageXml, uxLog } from '../../../../common/utils/index.js'; +import { MetadataUtils } from '../../../../common/metadata-utils/index.js'; +import { CONSTANTS, getApiVersion, getConfig, getEnvVar } from '../../../../config/index.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { MessageAttachment } from '@slack/web-api'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { countPackageXmlItems, parsePackageXmlFile, writePackageXmlFile } from '../../../../common/utils/xmlUtils.js'; +import Project2Markdown from '../../doc/project2markdown.js'; +import MkDocsToSalesforce from '../../doc/mkdocs-to-salesforce.js'; +import MkDocsToCloudflare from '../../doc/mkdocs-to-cf.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; +import { makeFileNameGitCompliant } from '../../../../common/utils/gitUtils.js'; +import { updateSfdxProjectApiVersion } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class MonitorBackup extends SfCommand { + public static title = 'Backup DX sources'; public static description = `Retrieve sfdx sources in the context of a monitoring backup +The command exists in 2 modes: filtered(default & recommended) and full. + +## Filtered mode (default, better performances) + Automatically skips metadatas from installed packages with namespace. You can remove more metadata types from backup, especially in case you have too many metadatas and that provokes a crash, using: - Manual update of \`manifest/package-skip-items.xml\` config file (then commit & push in the same branch) + - Works with full wildcard (\`*\`) , named metadata (\`Account.Name\`) or partial wildcards names (\`pi__*\` , \`*__dlm\` , or \`prefix*suffix\`) + - Environment variable MONITORING_BACKUP_SKIP_METADATA_TYPES (example: \`MONITORING_BACKUP_SKIP_METADATA_TYPES=CustomLabel,StaticResource,Translation\`): that will be applied to all monitoring branches. -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-metadata-backup/) and can output Grafana, Slack and MsTeams Notifications. +## Full mode + +Activate it with **--full** parameter, or variable MONITORING_BACKUP_MODE_FULL=true + +Ignores filters (namespaces items & manifest/package-skip-items.xml) to retrieve ALL metadatas, including those you might not care about (reports, translations...) + +As we can retrieve only 10000 files by call, the list of all metadatas will be chunked to make multiple calls (and take more time than filtered mode) + +- if you use \`--full-apply-filters\` , manifest/package-skip-items.xml and MONITORING_BACKUP_SKIP_METADATA_TYPES filters will be applied anyway +- if you use \`--exclude-namespaces\` , namespaced items will be ignored + +_With those both options, it's like if you are not using --full, but with chunked metadata download_ + +## In CI/CD + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-metadata-backup/) and can output Grafana, Slack and MsTeams Notifications. + +## Troubleshooting + +If you have unknown errors (it happens !), you can investigate using the full command with smaller chunks. + +Example: \`sf hardis:org:monitor:backup --full --exclude-namespaces --full-apply-filters --max-by-chunk 500\` + +It will allow you the identify the responsible metadata and ignore it using package-skip-items.xml or MONITORING_BACKUP_SKIP_METADATA_TYPES env variable. + +## Documentation + +[Doc generation (including visual flows)](${CONSTANTS.DOC_URL_ROOT}/hardis/doc/project2markdown/) is triggered at the end of the command. + +If you want to also upload HTML Documentation on your Salesforce Org as static resource, use variable **SFDX_HARDIS_DOC_DEPLOY_TO_ORG="true"** + +If you want to also upload HTML Documentation on Cloudflare, use variable **SFDX_HARDIS_DOC_DEPLOY_TO_CLOUDFLARE="true"** + +- If you want to generate the documentation in multiple languages, define variable SFDX_DOC_LANGUAGES (ex: SFDX_DOC_LANGUAGES=en,fr,de) +- You can define one Cloudflare site by language, for example with the following variables: + - CLOUDFLARE_PROJECT_NAME_EN=cloudity-demo-english + - CLOUDFLARE_PROJECT_NAME_FR=cloudity-demo-french + - CLOUDFLARE_PROJECT_NAME_DE=cloudity-demo-german + +If Flow history doc always display a single state, you probably need to update your workflow configuration: + +- on Gitlab: Env variable [\`GIT_FETCH_EXTRA_FLAGS: --depth 10000\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.gitlab-ci.yml#L11) +- on GitHub: [\`fetch-depth: 0\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/.github/workflows/org-monitoring.yml#L58) +- on Azure: [\`fetchDepth: "0"\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/azure-pipelines.yml#L39) +- on Bitbucket: [\`step: clone: depth: full\`](https://github.com/hardisgroupcom/sfdx-hardis/blob/main/defaults/monitoring/bitbucket-pipelines.yml#L18) `; - public static examples = ["$ sfdx hardis:org:monitor:backup"]; + public static examples = [ + '$ sf hardis:org:monitor:backup', + '$ sf hardis:org:monitor:backup --full', + '$ sf hardis:org:monitor:backup --full --exclude-namespaces', + '$ sf hardis:org:monitor:backup --full --exclude-namespaces --full-apply-filters' + ]; - protected static flagsConfig = { - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + public static flags: any = { + full: Flags.boolean({ + description: 'Dot not take in account filtering using package-skip-items.xml and MONITORING_BACKUP_SKIP_METADATA_TYPES. Efficient but much much slower !', }), - debug: flags.boolean({ - char: "d", + "max-by-chunk": Flags.integer({ + char: "m", + default: 3000, + description: 'If mode --full is activated, maximum number of metadatas in a package.xml chunk', + }), + "exclude-namespaces": Flags.boolean({ + char: "e", + default: false, + description: 'If mode --full is activated, exclude namespaced metadatas', + }), + "full-apply-filters": Flags.boolean({ + char: "z", default: false, - description: messages.getMessage("debugMode"), + description: 'If mode --full is activated, apply filters of manifest/package-skip-items.xml and MONITORING_BACKUP_SKIP_METADATA_TYPES anyway', }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + "start-chunk": Flags.integer({ + default: 1, + description: 'Use this parameter to troubleshoot a specific chunk. It will be used as the first chunk to retrieve', }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + "skip-doc": Flags.boolean({ + default: false, + description: 'Skip the generation of project documentation at the end of the command', }), + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; - - // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials"]; + public static requiresProject = true; // Trigger notification(s) to MsTeams channel protected static triggerNotification = true; - protected diffFiles = []; - protected diffFilesSimplified = []; + protected diffFiles: any[] = []; + protected diffFilesSimplified: any[] = []; + protected full: boolean = false; + protected maxByChunk: number = 3000; + protected startChunk: number = 1; + protected excludeNamespaces: boolean = false; + protected fullApplyFilters: boolean = false; + protected skipDoc: boolean = false; + + protected packageXmlToRemove: string | null = null; + protected extractPackageXmlChunks: any[] = []; + protected currentPackage: any = {}; + protected currentPackageLen = 0; + + protected namespaces: string[]; + protected installedPackages: any[]; protected outputFile; protected outputFilesRes: any = {}; protected debugMode = false; @@ -82,92 +171,56 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co /* jscpd:ignore-end */ public async run(): Promise { - this.outputFile = this.flags.outputfile || null; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(MonitorBackup); + this.full = flags.full || (process.env?.MONITORING_BACKUP_MODE_FULL === "true" ? true : false); + this.maxByChunk = flags["max-by-chunk"] || 3000; + this.startChunk = flags["start-chunk"] || 1; + this.excludeNamespaces = flags["exclude-namespaces"] === true ? true : false; + this.fullApplyFilters = flags["full-apply-filters"] === true ? true : false; + this.skipDoc = flags["skip-doc"] === true ? true : false; + this.outputFile = flags.outputfile || null; + this.debugMode = flags.debug || false; - // Build target org full manifest - uxLog(this, c.cyan("Building full manifest for org " + c.bold(this.org.getConnection().instanceUrl)) + " ..."); - const packageXmlFullFile = "manifest/package-all-org-items.xml"; - await buildOrgManifest("", packageXmlFullFile, this.org.getConnection()); + // Update apiVersion if necessary + await updateSfdxProjectApiVersion(); - // Check if we have package-skip_items.xml - const packageXmlBackUpItemsFile = "manifest/package-backup-items.xml"; - const packageXmlSkipItemsFile = "manifest/package-skip-items.xml"; - let packageXmlToRemove = null; - if (fs.existsSync(packageXmlSkipItemsFile)) { - uxLog(this, c.grey(`${packageXmlSkipItemsFile} has been found and will be use to reduce the content of ${packageXmlFullFile} ...`)); - packageXmlToRemove = packageXmlSkipItemsFile; - } - - // Add more metadata types to ignore using global variable MONITORING_BACKUP_SKIP_METADATA_TYPES - const additionalSkipMetadataTypes = process.env?.MONITORING_BACKUP_SKIP_METADATA_TYPES; - if (additionalSkipMetadataTypes) { - uxLog( - this, - c.grey( - `En var MONITORING_BACKUP_SKIP_METADATA_TYPES has been found and will also be used to reduce the content of ${packageXmlFullFile} ...`, - ), - ); - let packageSkipItems = {}; - if (fs.existsSync(packageXmlToRemove)) { - packageSkipItems = await parsePackageXmlFile(packageXmlToRemove); - } - for (const metadataType of additionalSkipMetadataTypes.split(",")) { - packageSkipItems[metadataType] = ["*"]; - } - packageXmlToRemove = "manifest/package-skip-items-dynamic-do-not-update-manually.xml"; - await writePackageXmlFile(packageXmlToRemove, packageSkipItems); - } + // Build target org full manifest + uxLog( + "action", + this, + c.cyan('Building full manifest for org ' + c.bold(flags['target-org'].getConnection().instanceUrl)) + ' ...' + ); + const packageXmlFullFile = 'manifest/package-all-org-items.xml'; + await buildOrgManifest('', packageXmlFullFile, flags['target-org'].getConnection()); // List namespaces used in the org - const namespaces = []; - const installedPackages = await MetadataUtils.listInstalledPackages(null, this); - for (const installedPackage of installedPackages) { - if (installedPackage?.SubscriberPackageNamespace !== "" && installedPackage?.SubscriberPackageNamespace != null) { - namespaces.push(installedPackage.SubscriberPackageNamespace); + this.namespaces = []; + this.installedPackages = await MetadataUtils.listInstalledPackages(null, this); + for (const installedPackage of this.installedPackages) { + if (installedPackage?.SubscriberPackageNamespace !== '' && installedPackage?.SubscriberPackageNamespace != null) { + this.namespaces.push(installedPackage.SubscriberPackageNamespace); } } - // Apply filters to package.xml - uxLog(this, c.cyan(`Reducing content of ${packageXmlFullFile} to generate ${packageXmlBackUpItemsFile} ...`)); - await filterPackageXml(packageXmlFullFile, packageXmlBackUpItemsFile, { - removeNamespaces: namespaces, - removeStandard: true, - removeFromPackageXmlFile: packageXmlToRemove, - updateApiVersion: CONSTANTS.API_VERSION, - }); + // Create force-app/main/default if not exists + await fs.ensureDir(path.join(process.cwd(), 'force-app', 'main', 'default')); - // Retrieve sfdx sources in local git repo - uxLog(this, c.cyan(`Run the retrieve command for retrieving filtered metadatas ...`)); - try { - await execCommand(`sfdx force:source:retrieve -x ${packageXmlBackUpItemsFile} -u ${this.org.getUsername()} --wait 120`, this, { - fail: true, - output: true, - debug: this.debugMode, - }); - } catch (e) { - const failedPackageXmlContent = await fs.readFile(packageXmlBackUpItemsFile, "utf8"); - uxLog(this, c.yellow("BackUp package.xml that failed to be retrieved:\n" + c.grey(failedPackageXmlContent))); - uxLog( - this, - c.red( - c.bold( - "Crash during backup. You may exclude more metadata types by updating file manifest/package-skip-items.xml then commit and push it, or use variable NOTIFICATIONS_DISABLE", - ), - ), - ); - uxLog(this, c.yellow(c.bold("See troubleshooting doc at https://sfdx-hardis.cloudity.com/salesforce-monitoring-config-home/#troubleshooting"))); - throw e; + // Check if we have package-skip_items.xml + if (this.full) { + await this.extractMetadatasFull(packageXmlFullFile, flags); + } + else { + await this.extractMetadatasFiltered(packageXmlFullFile, flags); } // Write installed packages - uxLog(this, c.cyan(`Write installed packages ...`)); - const installedPackagesLog = []; - const packageFolder = path.join(process.cwd(), "installedPackages"); + uxLog("action", this, c.cyan(`Write installed packages ...`)); + const installedPackagesLog: any[] = []; + const packageFolder = path.join(process.cwd(), 'installedPackages'); await fs.ensureDir(packageFolder); - for (const installedPackage of installedPackages) { - const fileName = (installedPackage.SubscriberPackageName || installedPackage.SubscriberPackageId) + ".json"; - const fileNameNoSep = fileName.replace(/\//g, "_").replace(/:/g, "_"); // Handle case when package name contains slashes or colon + for (const installedPackage of this.installedPackages) { + const fileName = (installedPackage.SubscriberPackageName || installedPackage.SubscriberPackageId) + '.json'; + const fileNameNoSep = makeFileNameGitCompliant(fileName); // Handle case when package name contains slashes or colon delete installedPackage.Id; // Not needed for diffs await fs.writeFile(path.join(packageFolder, fileNameNoSep), JSON.stringify(installedPackage, null, 2)); const installedPackageLog = { @@ -178,63 +231,79 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co SubscriberPackageVersionNumber: installedPackage.SubscriberPackageVersionNumber, }; installedPackagesLog.push(installedPackageLog); + // Clean repo: Remove previous versions of file names + const fileNameNoSepBad1 = fileName.replace(/\//g, '_').replace(/:/g, '_'); + const fileNameNoSepBad2 = fileName; + for (const oldFileName of [fileNameNoSepBad1, fileNameNoSepBad2]) { + if (oldFileName === fileNameNoSep) { + continue; + } + const oldFilePath = path.join(packageFolder, oldFileName); + if (fs.existsSync(oldFilePath)) { + await fs.remove(oldFilePath); + } + } + } this.diffFiles = await MetadataUtils.listChangedFiles(); // Write output file if (this.diffFiles.length > 0) { - const severityIconLog = getSeverityIcon("log"); - this.outputFile = await generateReportPath("backup-updated-files", this.outputFile); + const filesHumanUnformatted = MetadataUtils.getMetadataPrettyNames(this.diffFiles.map((diffFile) => diffFile.path), false); + const severityIconLog = getSeverityIcon('log'); + this.outputFile = await generateReportPath('backup-updated-files', this.outputFile); this.diffFilesSimplified = this.diffFiles.map((diffFile) => { return { - File: diffFile.path.replace("force-app/main/default/", ""), - ChangeType: diffFile.index === "?" ? "A" : diffFile.index, - WorkingDir: diffFile.working_dir === "?" ? "" : diffFile.working_dir, - PrevName: diffFile?.from || "", - severity: "log", + File: diffFile.path.replace('force-app/main/default/', ''), + ChangeType: diffFile.index === '?' ? 'A' : diffFile.index, + FileHuman: filesHumanUnformatted.get(diffFile.path) || diffFile.path.replace('force-app/main/default/', ''), + WorkingDir: diffFile.working_dir === '?' ? '' : diffFile.working_dir, + PrevName: diffFile?.from || '', + severity: 'log', severityIcon: severityIconLog, }; }); - this.outputFilesRes = await generateCsvFile(this.diffFilesSimplified, this.outputFile); + this.outputFilesRes = await generateCsvFile(this.diffFilesSimplified, this.outputFile, { fileTitle: 'Updated Metadatas' }); } // Build notifications - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No updates detected in ${orgMarkdown}`; let notifAttachments: MessageAttachment[] = []; if (this.diffFiles.length > 0) { - notifSeverity = "info"; + const filesHumanFormatted = MetadataUtils.getMetadataPrettyNames(this.diffFiles.map((diffFile) => diffFile.path), true); + notifSeverity = 'info'; notifText = `Updates detected in ${orgMarkdown}`; notifAttachments = [ { text: this.diffFiles .map((diffFile) => { - let flag = ""; - if (diffFile.index && diffFile.index !== " ") { - flag = ` (${diffFile.index === "?" ? "A" : diffFile.index})`; + let flag = ''; + if (diffFile.index && diffFile.index !== ' ') { + flag = ` (${diffFile.index === '?' ? 'A' : diffFile.index})`; } - const line = `• ${diffFile.path.replace("force-app/main/default/", "")}` + flag; + const line = `• ${filesHumanFormatted.get(diffFile.path)}` + flag; return line; }) - .join("\n"), + .join('\n'), }, ]; } else { - uxLog(this, c.grey("No updated metadata for today's backup :)")); + uxLog("log", this, c.grey("No updated metadata for today's backup :)")); } // Post notifications - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "BACKUP", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'BACKUP', text: notifText, buttons: notifButtons, attachments: notifAttachments, severity: notifSeverity, - sideImage: "backup", + sideImage: 'backup', attachedFiles: this.outputFilesRes.xlsxFile ? [this.outputFilesRes.xlsxFile] : [], logElements: this.diffFilesSimplified, data: { @@ -246,6 +315,244 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co }, }); - return { outputString: "BackUp processed on org " + this.org.getConnection().instanceUrl }; + // Run project documentation generation + if (this.skipDoc !== true) { + try { + const docLanguages = (getEnvVar('SFDX_DOC_LANGUAGES') || getEnvVar('PROMPTS_LANGUAGE') || 'en').split(",").reverse(); // Can be 'fr,en,de' for example + const prevPromptsLanguage = getEnvVar('PROMPTS_LANGUAGE') || 'en'; + for (const langKey of docLanguages) { + uxLog("action", this, c.cyan("Generating doc in language " + c.bold(langKey))); + process.env.PROMPTS_LANGUAGE = langKey; + await Project2Markdown.run(["--diff-only", "--with-history"]); + uxLog("action", this, c.cyan("Documentation generated from retrieved sources. If you want to skip it, use option --skip-doc")); + const config = await getConfig("user"); + if (config.docDeployToOrg || process.env?.SFDX_HARDIS_DOC_DEPLOY_TO_ORG === "true") { + await MkDocsToSalesforce.run(["--type", "Monitoring"]); + } + else if (config.docDeployToCloudflare || process.env?.SFDX_HARDIS_DOC_DEPLOY_TO_CLOUDFLARE === "true") { + await MkDocsToCloudflare.run([]); + } + } + process.env.PROMPTS_LANGUAGE = prevPromptsLanguage; + } catch (e: any) { + uxLog("warning", this, c.yellow("Error while generating project documentation " + e.message)); + uxLog("log", this, c.grey(e.stack)); + } + } + + return { outputString: 'BackUp processed on org ' + flags['target-org'].getConnection().instanceUrl }; + } + + private async extractMetadatasFull(packageXmlFullFile: string, flags) { + let packageXmlToExtract = packageXmlFullFile; + // Filter namespaces if requested in the command + if (this.excludeNamespaces || process.env?.SFDX_HARDIS_BACKUP_EXCLUDE_NAMESPACES === "true" || this.fullApplyFilters) { + packageXmlToExtract = await this.buildFilteredManifestsForRetrieve(packageXmlFullFile); + const packageXmlFullFileWithoutNamespace = 'manifest/package-all-org-items-except-namespaces.xml'; + const namespacesToFilter = (this.excludeNamespaces || process.env?.SFDX_HARDIS_BACKUP_EXCLUDE_NAMESPACES === "true") ? this.namespaces : []; + await filterPackageXml(packageXmlFullFile, packageXmlFullFileWithoutNamespace, { + removeNamespaces: namespacesToFilter, + removeStandard: this.fullApplyFilters, + removeFromPackageXmlFile: this.packageXmlToRemove, + updateApiVersion: getApiVersion(), + }); + packageXmlToExtract = packageXmlFullFileWithoutNamespace; + } + + // Build packageXml chunks + const packageElements = await parsePackageXmlFile(packageXmlToExtract); + + // Handle predefined chunks + const predefinedChunkTypes = [ + { types: ["CustomLabel"], memberMode: "*" }, + // { types: ["CustomObject", "Profile"] }, + { types: ["SharingRules", "SharingOwnerRule", "SharingCriteriaRule"] }, + { types: ["Workflow", "WorkflowAlert", "WorkflowFieldUpdate", "WorkflowRule"] } + ] + for (const predefinedChunkType of predefinedChunkTypes) { + if (predefinedChunkType.types.some(mdType => Object.keys(packageElements).includes(mdType))) { + for (const mdType of predefinedChunkType.types) { + if (predefinedChunkType.memberMode === "*") { + this.currentPackage[mdType] = "*"; + } + else { + this.currentPackage[mdType] = packageElements[mdType]; + } + delete packageElements[mdType]; + } + this.manageAddCurrentPackageInChunks(); + } + } + + // Handle other chunks + for (const metadataType of Object.keys(packageElements)) { + const members = packageElements[metadataType]; + // If current chunk would be too big, store it then create a new one + if ((this.currentPackageLen + members.length) > this.maxByChunk) { + this.manageAddCurrentPackageInChunks(); + } + // If a metadata type has too many members for a single chunk: split it into chunks ! + if (members.length > this.maxByChunk) { + this.manageAddCurrentPackageInChunks(); + const memberChunks = Array.from({ length: Math.ceil(members.length / this.maxByChunk) }, (_, i) => members.slice(i * this.maxByChunk, (i + 1) * this.maxByChunk)); + for (const memberChunk of memberChunks) { + this.currentPackage[metadataType] = memberChunk; + this.manageAddCurrentPackageInChunks(); + } + } + // Add to current chunk + else { + this.currentPackage[metadataType] = members; + this.currentPackageLen += members.length + } + } + this.manageAddCurrentPackageInChunks(); + + // Write chunks into package.xml files + let pos = 0; + const packageXmlChunkFiles: string[] = []; + const chunksFolder = path.join("manifest", "chunks"); + await fs.ensureDir(chunksFolder); + uxLog("action", this, c.cyan(`Building package.xml files for ${this.extractPackageXmlChunks.length} chunks...`)); + for (const packageChunk of this.extractPackageXmlChunks) { + pos++; + const packageChunkFileName = path.join(chunksFolder, "chunk-" + pos + ".xml"); + await writePackageXmlFile(packageChunkFileName, packageChunk); + packageXmlChunkFiles.push(packageChunkFileName); + uxLog("log", this, c.grey(`Chunk ${pos} -> ${packageChunkFileName}:`)) + for (const mdType of Object.keys(packageChunk)) { + uxLog("log", this, c.grey(`- ${mdType} (${packageChunk?.[mdType]?.length || 0} elements)`)); + } + uxLog("other", this, ""); + } + + // Retrieve metadatas for each chunk + uxLog("action", this, c.cyan(`Starting the retrieve of ${packageXmlChunkFiles.length} chunks...`)); + let posChunk = 0; + for (const packageXmlChunkFile of packageXmlChunkFiles) { + posChunk++; + if (this.startChunk > posChunk) { + uxLog("log", this, c.grey(`Skipping chunk ${posChunk} (${packageXmlChunkFile}) according to --start-chunk option`)); + continue; + } + await this.retrievePackageXml(packageXmlChunkFile, flags); + } + } + + private manageAddCurrentPackageInChunks() { + if (Object.keys(this.currentPackage).length > 0) { + this.extractPackageXmlChunks.push(Object.assign({}, this.currentPackage)); + this.currentPackage = {}; + this.currentPackageLen = 0; + } + } + + private async extractMetadatasFiltered(packageXmlFullFile: string, flags) { + const packageXmlBackUpItemsFile = await this.buildFilteredManifestsForRetrieve(packageXmlFullFile); + + // Apply filters to package.xml + uxLog("action", this, c.cyan(`Reducing content of ${packageXmlFullFile} to generate ${packageXmlBackUpItemsFile} ...`)); + await filterPackageXml(packageXmlFullFile, packageXmlBackUpItemsFile, { + removeNamespaces: this.namespaces, + removeStandard: true, + removeFromPackageXmlFile: this.packageXmlToRemove, + updateApiVersion: getApiVersion(), + }); + + // Retrieve sfdx sources in local git repo + await this.retrievePackageXml(packageXmlBackUpItemsFile, flags); + } + + private async buildFilteredManifestsForRetrieve(packageXmlFullFile: string) { + const packageXmlBackUpItemsFile = 'manifest/package-backup-items.xml'; + const packageXmlSkipItemsFile = 'manifest/package-skip-items.xml'; + if (fs.existsSync(packageXmlSkipItemsFile)) { + uxLog( + "log", + this, + c.grey( + `${packageXmlSkipItemsFile} has been found and will be use to reduce the content of ${packageXmlFullFile} ...` + ) + ); + this.packageXmlToRemove = packageXmlSkipItemsFile; + } + + // Add more metadata types to ignore using global variable MONITORING_BACKUP_SKIP_METADATA_TYPES + const additionalSkipMetadataTypes = process.env?.MONITORING_BACKUP_SKIP_METADATA_TYPES; + if (additionalSkipMetadataTypes) { + uxLog( + "log", + this, + c.grey( + `En var MONITORING_BACKUP_SKIP_METADATA_TYPES has been found and will also be used to reduce the content of ${packageXmlFullFile} ...` + ) + ); + let packageSkipItems = {}; + if (fs.existsSync(this.packageXmlToRemove || '')) { + packageSkipItems = await parsePackageXmlFile(this.packageXmlToRemove || ''); + } + for (const metadataType of additionalSkipMetadataTypes.split(',')) { + packageSkipItems[metadataType] = ['*']; + } + this.packageXmlToRemove = 'manifest/package-skip-items-dynamic-do-not-update-manually.xml'; + await writePackageXmlFile(this.packageXmlToRemove, packageSkipItems); + } + return packageXmlBackUpItemsFile; + } + + private async retrievePackageXml(packageXmlBackUpItemsFile: string, flags: any) { + const nbRetrievedItems = await countPackageXmlItems(packageXmlBackUpItemsFile); + const packageXml = await parsePackageXmlFile(packageXmlBackUpItemsFile); + uxLog("action", this, c.cyan(`Run the retrieve command for ${path.basename(packageXmlBackUpItemsFile)}, containing ${nbRetrievedItems} items:`)); + const mdTypesString = Object.keys(packageXml).map((mdType) => { + return `- ${mdType} (${packageXml?.[mdType]?.length || 0})`; + }).join('\n'); + uxLog("log", this, c.grey(mdTypesString)); + try { + await execCommand( + `sf project retrieve start -x "${packageXmlBackUpItemsFile}" -o ${flags['target-org'].getUsername()} --ignore-conflicts --wait 120`, + this, + { + fail: true, + output: true, + debug: this.debugMode, + } + ); + } catch (e) { + const failedPackageXmlContent = await fs.readFile(packageXmlBackUpItemsFile, 'utf8'); + uxLog("warning", this, c.yellow('BackUp package.xml that failed to be retrieved:\n' + c.grey(failedPackageXmlContent))); + if (this.full) { + uxLog( + "error", + this, + c.red( + c.bold( + 'This should not happen: Please report the issue on sfdx-hardis repository: https://github.com/hardisgroupcom/sfdx-hardis/issues' + ) + ) + ); + } + else { + uxLog( + "error", + this, + c.red( + c.bold( + 'Crash during backup. You may exclude more metadata types by updating file manifest/package-skip-items.xml then commit and push it, or use variable MONITORING_BACKUP_SKIP_METADATA_TYPES' + ) + ) + ); + } + uxLog( + "warning", + this, + c.yellow( + c.bold( + `See troubleshooting doc at ${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-config-home/#troubleshooting` + ) + ) + ); + throw e; + } } } diff --git a/src/commands/hardis/org/monitor/limits.ts b/src/commands/hardis/org/monitor/limits.ts index 40af39601..1158f581a 100644 --- a/src/commands/hardis/org/monitor/limits.ts +++ b/src/commands/hardis/org/monitor/limits.ts @@ -1,66 +1,86 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { execSfdxJson, uxLog } from "../../../../common/utils"; -import { getEnvVar } from "../../../../config"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; -import { MessageAttachment } from "@slack/web-api"; -import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from "../../../../common/utils/notifUtils"; -import { generateCsvFile, generateReportPath } from "../../../../common/utils/filesUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class MonitorBackup extends SfdxCommand { - public static title = "Check org limits"; - - public static description = `Check limits of a SF org and send notifications about limits are superior to 50%, 75% or 100%. - -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-org-limits/) and can output Grafana, Slack and MsTeams Notifications. +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { execSfdxJson, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { CONSTANTS, getEnvVar } from '../../../../config/index.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { MessageAttachment } from '@slack/web-api'; +import { getNotificationButtons, getOrgMarkdown, getSeverityIcon } from '../../../../common/utils/notifUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../../common/utils/filesUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class MonitorLimits extends SfCommand { + public static title = 'Check org limits'; + + public static description = ` +## Command Behavior + +**Checks the current usage of various Salesforce org limits and sends notifications if thresholds are exceeded.** + +This command is a critical component of proactive Salesforce org management, helping administrators and developers monitor resource consumption and prevent hitting critical limits that could impact performance or functionality. It provides early warnings when limits are approaching their capacity. + +Key functionalities: + +- **Limit Retrieval:** Fetches a comprehensive list of all Salesforce org limits using the Salesforce CLI. +- **Usage Calculation:** Calculates the percentage of each limit that is currently being used. +- **Threshold-Based Alerting:** Assigns a severity (success, warning, or error) to each limit based on configurable thresholds: + - **Warning:** If usage exceeds 50% (configurable via \`LIMIT_THRESHOLD_WARNING\` environment variable). + - **Error:** If usage exceeds 75% (configurable via \`LIMIT_THRESHOLD_ERROR\` environment variable). +- **CSV Report Generation:** Generates a CSV file containing all org limits, their current usage, maximum allowed, and calculated percentage used, along with the assigned severity. +- **Notifications:** Sends notifications to configured channels (Grafana, Slack, MS Teams) with a summary of limits that have exceeded the warning or error thresholds. + +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-org-limits/) and can output Grafana, Slack and MsTeams Notifications. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce CLI Integration:** It executes the \`sf org limits list\` command to retrieve the current org limits. It parses the JSON output of this command. +- **Data Processing:** It iterates through the retrieved limits, calculates the \`used\` and \`percentUsed\` values, and assigns a \`severity\` (success, warning, error) based on the configured thresholds. +- **Environment Variable Configuration:** Reads \`LIMIT_THRESHOLD_WARNING\` and \`LIMIT_THRESHOLD_ERROR\` environment variables to set the warning and error thresholds for limit usage. +- **Report Generation:** It uses \`generateCsvFile\` to create the CSV report of org limits. +- **Notification Integration:** It integrates with the \`NotifProvider\` to send notifications, including attachments of the generated CSV report and detailed metrics for each limit, which can be consumed by monitoring dashboards like Grafana. +- **Exit Code Management:** Sets the process exit code to 1 if any limit is in an 'error' state, indicating a critical issue. +
`; - public static examples = ["$ sfdx hardis:org:monitor:limits"]; + public static examples = ['$ sf hardis:org:monitor:limits']; - protected static flagsConfig = { - outputfile: flags.string({ - char: "o", - description: "Force the path and name of output report file. Must end with .csv", + public static flags: any = { + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; // Trigger notification(s) to MsTeams channel protected static triggerNotification = true; - protected limitThresholdWarning = Number(getEnvVar("LIMIT_THRESHOLD_WARNING") || 50.0); - protected limitThresholdError = Number(getEnvVar("LIMIT_THRESHOLD_WARNING") || 75.0); + protected limitThresholdWarning = Number(getEnvVar('LIMIT_THRESHOLD_WARNING') || 50.0); + protected limitThresholdError = Number(getEnvVar('LIMIT_THRESHOLD_WARNING') || 75.0); - protected limitEntries = []; + protected limitEntries: any[] = []; protected outputFile; protected outputFilesRes: any = {}; protected debugMode = false; @@ -68,11 +88,12 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co /* jscpd:ignore-end */ public async run(): Promise { - this.outputFile = this.flags.outputfile || null; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(MonitorLimits); + this.outputFile = flags.outputfile || null; + this.debugMode = flags.debug || false; // List org limits - uxLog(this, c.cyan(`Run the org limits list command ...`)); + uxLog("action", this, c.cyan(`Run the org limits list command ...`)); const limitsCommandRes = await execSfdxJson(`sf org limits list`, this, { fail: true, output: true, @@ -88,59 +109,63 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co limit.percentUsed = 0.0; } limit.severity = - limit.percentUsed > this.limitThresholdError ? "error" : limit.percentUsed > this.limitThresholdWarning ? "warning" : "success"; + limit.percentUsed > this.limitThresholdError + ? 'error' + : limit.percentUsed > this.limitThresholdWarning + ? 'warning' + : 'success'; limit.severityIcon = getSeverityIcon(limit.severity); - limit.label = limit.name.replace(/([A-Z])/g, " $1"); + limit.label = limit.name.replace(/([A-Z])/g, ' $1'); return limit; }); - console.table(this.limitEntries); + uxLogTable(this, this.limitEntries); - this.outputFile = await generateReportPath("org-limits", this.outputFile); - this.outputFilesRes = await generateCsvFile(this.limitEntries, this.outputFile); + this.outputFile = await generateReportPath('org-limits', this.outputFile); + this.outputFilesRes = await generateCsvFile(this.limitEntries, this.outputFile, { fileTitle: 'Org Limits' }); - const limitsError = this.limitEntries.filter((limit) => limit.severity === "error"); + const limitsError = this.limitEntries.filter((limit) => limit.severity === 'error'); const numberLimitsError = limitsError.length; - const limitsWarning = this.limitEntries.filter((limit) => limit.severity === "warning"); + const limitsWarning = this.limitEntries.filter((limit) => limit.severity === 'warning'); const numberLimitsWarning = limitsWarning.length; // Build notifications - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + const orgMarkdown = await getOrgMarkdown(flags['target-org']?.getConnection()?.instanceUrl); const notifButtons = await getNotificationButtons(); - let notifSeverity: NotifSeverity = "log"; + let notifSeverity: NotifSeverity = 'log'; let notifText = `No limit issues detected in ${orgMarkdown}`; const notifAttachments: MessageAttachment[] = []; // Dangerous limits has been found if (numberLimitsError > 0) { - notifSeverity = "error"; + notifSeverity = 'error'; notifText = `Limit severe alerts have been detected in ${orgMarkdown} (error: ${numberLimitsError}, warning: ${numberLimitsWarning})`; const errorText = `*Error Limits*\n${limitsError .map((limit) => { - return `• ${limit.name}: ${limit.percentUsed}% used (${limit.used}/${limit.max})`; + return `• ${limit.name}: *${limit.percentUsed}%* used (${limit.used}/${limit.max})`; }) - .join("\n")}`; + .join('\n')}`; notifAttachments.push({ text: errorText, }); - uxLog(this, c.red(notifText + "\n" + errorText)); + uxLog("error", this, c.red(notifText + '\n' + errorText)); process.exitCode = 1; } // Warning limits detected else if (numberLimitsWarning > 0) { - notifSeverity = "warning"; + notifSeverity = 'warning'; notifText = `Limit warning alerts have been detected in ${orgMarkdown} (${numberLimitsWarning})`; const warningText = `*Warning Limits*\n${limitsWarning .map((limit) => { - return `• ${limit.name}: ${limit.percentUsed}% used (${limit.used}/${limit.max})`; + return `• ${limit.name}: *${limit.percentUsed}%* used (${limit.used}/${limit.max})`; }) - .join("\n")}`; + .join('\n')}`; notifAttachments.push({ text: warningText, }); - uxLog(this, c.yellow(notifText + "\n" + warningText)); + uxLog("warning", this, c.yellow(notifText + '\n' + warningText)); } else { - uxLog(this, c.green("No limit issue has been found")); + uxLog("success", this, c.green('No limit issue has been found')); } const limitEntriesMap = {}; @@ -155,9 +180,9 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co } // Post notifications - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "ORG_LIMITS", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'ORG_LIMITS', text: notifText, buttons: notifButtons, attachments: notifAttachments, @@ -168,6 +193,9 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co metrics: limitMetricsMap, }); - return { outputString: "Limits check on org " + this.org.getConnection().instanceUrl, limitEntries: this.limitEntries }; + return { + outputString: 'Limits check on org ' + flags['target-org'].getConnection().instanceUrl, + limitEntries: this.limitEntries, + }; } } diff --git a/src/commands/hardis/org/multi-org-query.ts b/src/commands/hardis/org/multi-org-query.ts new file mode 100644 index 000000000..951dae807 --- /dev/null +++ b/src/commands/hardis/org/multi-org-query.ts @@ -0,0 +1,268 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { AuthInfo, Connection, Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from "chalk"; +import { makeSureOrgIsConnected, promptOrgList } from '../../../common/utils/orgUtils.js'; +import { isCI, uxLog } from '../../../common/utils/index.js'; +import { bulkQuery } from '../../../common/utils/apiUtils.js'; +import { generateCsvFile, generateReportPath } from '../../../common/utils/filesUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class MultiOrgQuery extends SfCommand { + public static title = 'Multiple Orgs SOQL Query'; + + public static description = ` +**Executes a SOQL query across multiple Salesforce organizations and consolidates the results into a single report.** + +This command is highly valuable for administrators and developers who need to gather consistent data from various Salesforce environments (e.g., sandboxes, production orgs) for reporting, auditing, or comparison purposes. It streamlines the process of querying multiple orgs, eliminating the need to log into each one individually. + +Key functionalities: + +- **Flexible Query Input:** You can provide a custom SOQL query directly using the \`--query\` flag, or select from a list of predefined query templates (e.g., \`active-users\`, \`all-users\`) using the \`--query-template\` flag. +- **Multiple Org Targeting:** Specify a list of Salesforce org usernames or aliases using the \`--target-orgs\` flag. If not provided, an interactive menu will allow you to select multiple authenticated orgs. +- **Consolidated Report:** All query results from the different orgs are combined into a single CSV file, making data analysis and comparison straightforward. +- **Authentication Handling:** For CI/CD jobs, ensure that the target orgs are already authenticated using Salesforce CLI. In interactive mode, it will prompt for authentication if an org is not connected. + +**Visual Demo:** + +[![Use in VsCode SFDX Hardis !](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/multi-org-query-demo.gif)](https://marketplace.visualstudio.com/items?itemName=NicolasVuillamy.vscode-sfdx-hardis) + +
+Technical explanations + +The command's technical implementation involves: + +- **Org Authentication and Connection:** It uses \`AuthInfo.create\` and \`Connection.create\` to establish connections to each target Salesforce org. It also leverages \`makeSureOrgIsConnected\` and \`promptOrgList\` for interactive org selection and authentication checks. +- **SOQL Query Execution (Bulk API):** It executes the specified SOQL query against each connected org using \`bulkQuery\` for efficient data retrieval, especially for large datasets. +- **Data Aggregation:** It collects the records from each org's query result and adds metadata about the source org (instance URL, alias, username) to each record, enabling easy identification of data origin in the consolidated report. +- **Report Generation:** It uses \`generateCsvFile\` to create the final CSV report and \`generateReportPath\` to determine the output file location. +- **Interactive Prompts:** The \`prompts\` library is used to guide the user through selecting a query template or entering a custom query, and for selecting target orgs if not provided as command-line arguments. +- **Error Handling:** It logs errors for any orgs where the query fails, ensuring that the overall process continues and provides a clear summary of successes and failures. +
+`; + + public static examples = [ + '$ sf hardis:org:multi-org-query', + '$ sf hardis:org:multi-org-query --query "SELECT Id,Username FROM User"', + '$ sf hardis:org:multi-org-query --query "SELECT Id,Username FROM User" --target-orgs nico@cloudity.com nico@cloudity.com.preprod nico@cloudity.com.uat', + '$ sf hardis:org:multi-org-query --query-template active-users --target-orgs nico@cloudity.com nico@cloudity.com.preprod nico@cloudity.com.uat', + ]; + + public static flags: any = { + query: Flags.string({ + char: 'q', + description: 'SOQL Query to run on multiple orgs', + exclusive: ["query-template"] + }), + "query-template": Flags.string({ + char: "t", + description: "Use one of predefined SOQL Query templates", + options: [ + "active-users", + "all-users" + ], + exclusive: ["query"] + }), + "target-orgs": Flags.string({ + char: "x", + description: "List of org usernames or aliases.", + multiple: true + }), + outputfile: Flags.string({ + char: 'f', + description: 'Force the path and name of output report file. Must end with .csv', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + }; + + protected allQueryTemplates: any = { + "active-users": { + label: "Active users", + query: `SELECT Id, LastLoginDate, User.LastName, User.Firstname, Profile.UserLicense.Name, Profile.Name, Username, Profile.UserLicense.LicenseDefinitionKey, IsActive, CreatedDate FROM User WHERE IsActive = true ORDER BY Username ASC`, + }, + "all-users": { + label: "All users (including inactive)", + query: `SELECT Id, LastLoginDate, User.LastName, User.Firstname, Profile.UserLicense.Name, Profile.Name, Username, Profile.UserLicense.LicenseDefinitionKey, IsActive, CreatedDate FROM User ORDER BY Username ASC`, + } + }; + protected query: string; + protected queryTemplate: string; + protected targetOrgsIds: string[] = []; + protected targetOrgs: any[] = [] + protected outputFile; + protected debugMode = false; + protected allRecords: any[] = []; + protected successOrgs: any[] = []; + protected errorOrgs: any[] = []; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(MultiOrgQuery); + this.query = flags.query || null; + this.queryTemplate = flags["query-template"] || null; + this.targetOrgsIds = flags["target-orgs"] || []; + this.outputFile = flags.outputfile || null; + this.debugMode = flags.debug || false; + + // Prompt query if not specified as input argument + await this.defineSoqlQuery(); + + // List org if not sent as input parameter + await this.manageSelectOrgs(); + + // Perform the request on orgs + await this.performQueries(); + + // Display results + this.displayResults(); + + // Generate output CSV & XLS + this.outputFile = await generateReportPath('multi-org-query', this.outputFile); + const outputFilesRes = await generateCsvFile(this.allRecords, this.outputFile, { fileTitle: 'Multi Orgs Query Results' }); + + return { + allRecords: this.allRecords, + successOrgs: this.successOrgs, + errorOrgs: this.errorOrgs, + csvLogFile: this.outputFile, + xlsxLogFile: outputFilesRes.xlsxFile, + }; + } + + private displayResults() { + uxLog("action", this, c.cyan(`Query results from ${this.targetOrgsIds.length} orgs`)); + if (this.successOrgs.length > 0) { + uxLog("success", this, c.green(`Successfully performed query on ${this.successOrgs.length} orgs`)); + for (const org of this.successOrgs) { + uxLog("log", this, c.grey(`- ${org.instanceUrl}`)); + } + } + if (this.errorOrgs.length > 0) { + uxLog("success", this, c.green(`Error while performing query on ${this.errorOrgs.length} orgs`)); + for (const org of this.successOrgs) { + uxLog("log", this, c.grey(`- ${org.instanceUrl}: ${org?.error?.message}`)); + } + } + } + + private async performQueries() { + for (const orgId of this.targetOrgsIds) { + const matchOrgs = this.targetOrgs.filter(org => (org.username === orgId || org.alias === orgId) && org.accessToken); + if (matchOrgs.length === 0) { + uxLog("warning", this, c.yellow(`Skipped ${orgId}: Unable to find authentication. Run "sf org login web" to authenticate.`)); + continue; + } + const accessToken = matchOrgs[0].accessToken; + const username = matchOrgs[0].username; + const instanceUrl = matchOrgs[0].instanceUrl; + const loginUrl = matchOrgs[0].loginUrl || instanceUrl; + uxLog("action", this, c.cyan(`Performing query on ${c.bold(orgId)}...`)); + try { + const authInfo = await AuthInfo.create({ + username: username + }); + const connectionConfig: any = { + loginUrl: loginUrl, + instanceUrl: instanceUrl, + accessToken: accessToken + }; + const conn = await Connection.create({ authInfo: authInfo, connectionOptions: connectionConfig }); + const bulkQueryRes = await bulkQuery(this.query, conn, 5); + // Add org info to results + const records = bulkQueryRes.records.map(record => { + record.orgInstanceUrl = matchOrgs[0].instanceUrl; + record.orgAlias = matchOrgs[0].alias || ""; + record.orgUser = matchOrgs[0].username || ""; + return record; + }); + this.allRecords.push(...records); + this.successOrgs.push({ orgId: orgId, instanceUrl: instanceUrl, username: username }) + } catch (e: any) { + uxLog("error", this, c.red(`Error while querying ${orgId}: ${e.message}`)); + this.errorOrgs.push({ org: orgId, error: e }) + } + + } + } + + private async manageSelectOrgs() { + if (this.targetOrgsIds.length === 0) { + if (isCI) { + throw new SfError("You must provide a list of org usernames or aliases in --target-orgs") + } + this.targetOrgs = await promptOrgList(); + this.targetOrgsIds = this.targetOrgs.map(org => org.alias || org.username); + } + + // Check orgs are connected + for (const orgId of this.targetOrgsIds) { + const matchOrgs = this.targetOrgs.filter(org => (org.username === orgId || org.alias === orgId) && org.accessToken && org.connectedStatus === 'Connected'); + if (matchOrgs.length === 0) { + if (isCI) { + throw new SfError(`${orgId} must be authenticated using Salesforce CLI before calling this command`); + } + const orgRes = await makeSureOrgIsConnected(orgId); + this.targetOrgs.push(orgRes); + } + } + } + + private async defineSoqlQuery() { + // Template is sent as input + if (this.queryTemplate) { + this.query = this.allQueryTemplates[this.queryTemplate].query; + } + if (this.query == null) { + if (isCI) { + throw new SfError("You must provide a valid value in --query or --query-template"); + } + const baseQueryPromptRes = await prompts({ + type: "select", + message: "Please select a predefined query, or custom SOQL option", + description: "Choose a ready-made SOQL query template or enter your own custom query", + placeholder: "Select a query template", + choices: [ + ...Object.keys(this.allQueryTemplates).map(templateId => { + return { + title: this.allQueryTemplates[templateId].label, + description: this.allQueryTemplates[templateId].query, + value: this.allQueryTemplates[templateId].query + } + }), + { + title: "Custom SOQL Query", + description: "Enter a custom SOQL query to run", + value: "custom" + } + ] + }); + if (baseQueryPromptRes.value === "custom") { + const queryPromptRes = await prompts({ + type: 'text', + message: 'Please input the SOQL Query to run in multiple orgs', + description: 'Enter a custom SOQL query that will be executed across all selected Salesforce orgs', + placeholder: 'Ex: SELECT Id, Name FROM Account LIMIT 10', + }); + this.query = queryPromptRes.value; + } + else { + this.query = baseQueryPromptRes.value; + } + } + } +} diff --git a/src/commands/hardis/org/purge/apexlog.ts b/src/commands/hardis/org/purge/apexlog.ts index 21e10a428..aba3e3b33 100644 --- a/src/commands/hardis/org/purge/apexlog.ts +++ b/src/commands/hardis/org/purge/apexlog.ts @@ -1,90 +1,115 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { execCommand, uxLog } from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { execCommand, uxLog } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class PurgeApexLogs extends SfCommand { + public static title = 'Purge Apex Logs'; -export default class OrgPurgeFlow extends SfdxCommand { - public static title = "Purge Apex Logs"; + public static description = ` +**Purges Apex debug logs from a Salesforce org.** - public static description = "Purge apex logs in selected org"; +This command provides a quick and efficient way to clear out accumulated Apex debug logs from your Salesforce environment. This is particularly useful for: - public static examples = [`$ sfdx hardis:org:purge:apexlog`, `$ sfdx hardis:org:purge:apexlog --targetusername nicolas.vuillamy@gmail.com`]; +- **Storage Management:** Freeing up valuable data storage space in your Salesforce org. +- **Performance Optimization:** Reducing the overhead associated with large volumes of debug logs. +- **Troubleshooting:** Ensuring that new debug logs are generated cleanly without interference from old, irrelevant logs. + +Key functionalities: + +- **Log Identification:** Queries the \`ApexLog\` object to identify all existing debug logs. +- **Confirmation Prompt:** Before deletion, it prompts for user confirmation, displaying the number of Apex logs that will be deleted. +- **Bulk Deletion:** Uses the Salesforce Bulk API to efficiently delete a large number of Apex logs. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Query:** It executes a SOQL query (\`SELECT Id FROM ApexLog LIMIT 50000\`) to retrieve the IDs of Apex logs to be deleted. The limit is set to 50,000 to handle large volumes of logs. +- **CSV Export:** The retrieved log IDs are temporarily exported to a CSV file (\`ApexLogsToDelete_*.csv\`) in the \`./tmp\` directory. +- **User Confirmation:** It uses the \`prompts\` library to ask for user confirmation before proceeding with the deletion, displaying the count of logs to be purged. +- **Bulk API Deletion:** It then uses the Salesforce CLI's \`sf data delete bulk\` command, pointing to the generated CSV file, to perform the mass deletion of Apex logs. +- **File System Operations:** It uses \`fs-extra\` to create the temporary directory and manage the CSV file. +- **Error Handling:** Includes error handling for the query and deletion operations. +
+`; + + public static examples = [ + `$ sf hardis:org:purge:apexlog`, + `$ sf hardis:org:purge:apexlog --target-org nicolas.vuillamy@gmail.com`, + ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { + public static flags: any = { // flag with a value (-n, --name=VALUE) - prompt: flags.boolean({ - char: "z", + prompt: Flags.boolean({ + char: 'z', default: true, allowNo: true, - description: messages.getMessage("prompt"), + description: messages.getMessage('prompt'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; /* jscpd:ignore-end */ public async run(): Promise { - const prompt = this.flags.prompt === false ? false : true; - const debugMode = this.flags.debug || false; + const { flags } = await this.parse(PurgeApexLogs); + const prompt = flags.prompt === false ? false : true; + const debugMode = flags.debug || false; // Build apex logs query - const tempDir = "./tmp"; + const tempDir = './tmp'; await fs.ensureDir(tempDir); - const apexLogsToDeleteCsv = path.join(tempDir, "ApexLogsToDelete_" + Math.random() + ".csv"); - const queryCommand = `sfdx force:data:soql:query -q "SELECT Id FROM ApexLog" -t -r "csv" > "${apexLogsToDeleteCsv}"`; + const apexLogsToDeleteCsv = path.join(tempDir, 'ApexLogsToDelete_' + Math.random() + '.csv'); + const queryCommand = `sf data query --query "SELECT Id FROM ApexLog LIMIT 50000" -t -r "csv" > "${apexLogsToDeleteCsv}"`; await execCommand(queryCommand, this, { output: true, debug: debugMode, fail: true, }); - const extractFile = (await fs.readFile(apexLogsToDeleteCsv, "utf8")).toString(); - const apexLogsNumber = extractFile.split("\n").filter((line) => line.length > 0).length; + const extractFile = (await fs.readFile(apexLogsToDeleteCsv, 'utf8')).toString(); + const apexLogsNumber = extractFile.split('\n').filter((line) => line.length > 0).length; if (apexLogsNumber === 0) { - uxLog(this, c.cyan(`There are no Apex Logs to delete in org ${c.green(this.org.getUsername())}`)); + uxLog("action", this, c.cyan(`There are no Apex Logs to delete in org ${c.green(flags['target-org'].getUsername())}`)); return {}; } // Prompt confirmation if (prompt) { const confirmRes = await prompts({ - type: "confirm", - name: "value", - message: `Do you want to delete ${c.bold(apexLogsNumber)} Apex Logs of org ${c.green(this.org.getUsername())} ?`, + type: 'confirm', + name: 'value', + message: `Do you want to delete ${c.bold(apexLogsNumber)} Apex Logs of org ${c.green( + flags['target-org'].getUsername() + )} ?`, + description: 'Permanently delete all Apex debug logs from the Salesforce org to free up storage space', }); if (confirmRes.value === false) { return {}; @@ -92,16 +117,22 @@ export default class OrgPurgeFlow extends SfdxCommand { } // Perform delete - const deleteCommand = `sfdx force:data:bulk:delete -s ApexLog -f ${apexLogsToDeleteCsv}`; + const deleteCommand = `sf data delete bulk --sobject ApexLog --file ${apexLogsToDeleteCsv}`; await execCommand(deleteCommand, this, { output: true, debug: debugMode, fail: true, }); - uxLog(this, c.green(`Successfully deleted ${c.bold(apexLogsNumber)} Apex Logs in org ${c.bold(this.org.getUsername())}`)); + uxLog( + "success", + this, + c.green( + `Successfully deleted ${c.bold(apexLogsNumber)} Apex Logs in org ${c.bold(flags['target-org'].getUsername())}` + ) + ); // Return an object to be displayed with --json - return { orgId: this.org.getOrgId() }; + return { orgId: flags['target-org'].getOrgId() }; } } diff --git a/src/commands/hardis/org/purge/flow.ts b/src/commands/hardis/org/purge/flow.ts index 5e0e6efb3..dac786e8b 100644 --- a/src/commands/hardis/org/purge/flow.ts +++ b/src/commands/hardis/org/purge/flow.ts @@ -1,242 +1,373 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as columnify from "columnify"; -import { execSfdxJson, isCI, uxLog } from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; -import { bulkDeleteTooling } from "../../../../common/utils/apiUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { execSfdxJson, extractRegexMatches, isCI, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { bulkDelete, bulkDeleteTooling, bulkQuery } from '../../../../common/utils/apiUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class OrgPurgeFlow extends SfCommand { + public static title = 'Purge Flow versions'; -export default class OrgPurgeFlow extends SfdxCommand { - public static title = "Purge Flow versions"; + public static description = ` +**Purges old or unwanted Flow versions from a Salesforce org, with an option to delete related Flow Interviews.** - public static description = messages.getMessage("orgPurgeFlow"); +This command helps maintain a clean and performant Salesforce org by removing obsolete Flow versions. Over time, multiple versions of Flows can accumulate, consuming storage and potentially impacting performance. This tool provides a controlled way to clean up these versions. + +Key functionalities: + +- **Targeted Flow Selection:** Allows you to filter Flow versions to delete by name (\`--name\`) and status (\`--status\`, e.g., \`Obsolete\`, \`Draft\`, \`Inactive\`). +- **Flow Interview Deletion:** If a Flow version cannot be deleted due to active Flow Interviews, the \`--delete-flow-interviews\` flag (or interactive prompt) allows you to delete these interviews first, then retry the Flow version deletion. +- **Confirmation Prompt:** In interactive mode, it prompts for confirmation before proceeding with the deletion of Flow versions and Flow Interviews. +- **Partial Success Handling:** The \`--allowpurgefailure\` flag (default \`true\`) allows the command to continue even if some deletions fail, reporting the errors. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Tooling API):** It queries the \`Flow\` object (using the Tooling API) to list Flow versions based on the provided filters (name, status, manageable state). +- **Bulk Deletion (Tooling API):** It uses \`bulkDeleteTooling\` to perform mass deletions of Flow versions. If deletion fails due to active interviews, it extracts the interview IDs. +- **Flow Interview Management:** If \`delete-flow-interviews\` is enabled, it queries \`FlowInterview\` objects, performs bulk deletion of the identified interviews using \`bulkDelete\`, and then retries the Flow version deletion. +- **Interactive Prompts:** Uses the \`prompts\` library to interact with the user for selecting Flows, statuses, and confirming deletion actions. +- **Error Reporting:** Logs detailed error messages for failed deletions, including the specific reasons. +- **Command-Line Execution:** Uses \`execSfdxJson\` to execute Salesforce CLI commands for querying Flow data. +
+`; public static examples = [ - `$ sfdx hardis:org:purge:flow --no-prompt`, - `$ sfdx hardis:org:purge:flow --targetusername nicolas.vuillamy@gmail.com - Found 1 records: - ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS - 30109000000kX7uAAE TestFlow 2 test flowwww Obsolete - Are you sure you want to delete this list of records (y/n)?: y - Successfully deleted record: 30109000000kX7uAAE. - Deleted the following list of records: - ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS - 30109000000kX7uAAE TestFlow 2 test flowwww Obsolete - `, - `$ sfdx hardis:org:purge:flow --targetusername nicolas.vuillamy@gmail.com --status "Obsolete,Draft,InvalidDraft --name TestFlow" - Found 4 records: - ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS - 30109000000kX7uAAE TestFlow 2 test flowwww Obsolete - 30109000000kX8EAAU TestFlow 6 test flowwww InvalidDraft - 30109000000kX8AAAU TestFlow 5 test flowwww InvalidDraft - 30109000000kX89AAE TestFlow 4 test flowwww Draft - Are you sure you want to delete this list of records (y/n)?: n - No record deleted - `, + `$ sf hardis:org:purge:flow`, + `$ sf hardis:org:purge:flow --target-org nicolas.vuillamy@gmail.com --no-prompt --delete-flow-interviews`, + `$ sf hardis:org:purge:flow --target-org nicolas.vuillamy@gmail.com --status "Obsolete,Draft,InvalidDraft" --name TestFlow`, ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { + public static flags: any = { // flag with a value (-n, --name=VALUE) - prompt: flags.boolean({ - char: "z", + prompt: Flags.boolean({ + char: 'z', default: true, allowNo: true, - description: messages.getMessage("prompt"), + description: messages.getMessage('prompt'), + }), + name: Flags.string({ + char: 'n', + description: messages.getMessage('nameFilter'), }), - name: flags.string({ - char: "n", - description: messages.getMessage("nameFilter"), + status: Flags.string({ + char: 's', + description: messages.getMessage('statusFilter'), }), - status: flags.string({ - char: "s", - description: messages.getMessage("statusFilter"), + 'delete-flow-interviews': Flags.boolean({ + char: 'w', + default: false, + description: `If the presence of Flow interviews prevent to delete flows versions, delete them before retrying to delete flow versions`, }), - allowpurgefailure: flags.boolean({ - char: "f", + allowpurgefailure: Flags.boolean({ + char: 'f', default: true, allowNo: true, - description: messages.getMessage("allowPurgeFailure"), + description: messages.getMessage('allowPurgeFailure'), }), - instanceurl: flags.string({ - char: "r", - default: "https://login.salesforce.com", - description: messages.getMessage("instanceUrl"), + instanceurl: Flags.string({ + char: 'r', + default: 'https://login.salesforce.com', + description: messages.getMessage('instanceUrl'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; + protected debugMode = false; + protected statusFilter: string[] = []; + protected nameFilter: string | null = null; + protected username: string; + protected promptUser = true; + protected deleteFlowInterviews: boolean; + protected allowPurgeFailure: boolean; + protected flowRecordsRaw: any[]; + protected flowRecords: any[]; + protected deletedRecords: any[] = []; + protected deletedErrors: any[] = []; /* jscpd:ignore-end */ public async run(): Promise { - const prompt = this.flags.prompt === false ? false : true; - let nameFilter = this.flags.name || null; - const allowPurgeFailure = this.flags.allowpurgefailure === false ? false : true; - const debugMode = this.flags.debug || false; - const username = this.org.getUsername(); + const { flags } = await this.parse(OrgPurgeFlow); + this.promptUser = flags.prompt === false ? false : true; + this.nameFilter = flags.name || null; + this.allowPurgeFailure = flags.allowpurgefailure === false ? false : true; + this.deleteFlowInterviews = flags['delete-flow-interviews'] || false; + this.debugMode = flags.debug || false; + this.username = flags['target-org'].getUsername(); - let statusFilter; - const manageableConstraint = "ManageableState IN ('deprecatedEditable','installedEditable','unmanaged')"; - if (this.flags.status) { - // Input parameter used - statusFilter = this.flags.status.split(","); - } else if (isCI) { - // Obsolete by default for CI - statusFilter = ["Obsolete"]; - } else { - // Query all flows definitions - const allFlowQueryCommand = - "sfdx force:data:soql:query " + - ` -q "SELECT Id,DeveloperName,MasterLabel,ManageableState FROM FlowDefinition WHERE ${manageableConstraint} ORDER BY DeveloperName"` + - ` --targetusername ${username}` + - " --usetoolingapi"; - const allFlowQueryRes = await execSfdxJson(allFlowQueryCommand, this, { - output: false, - debug: debugMode, - fail: true, - }); - const flowRecordsRaw = allFlowQueryRes?.result?.records || allFlowQueryRes.records || []; - const flowNamesUnique = [...new Set(flowRecordsRaw.map((flowRecord) => flowRecord.DeveloperName))]; - const flowNamesChoice = flowNamesUnique.map((flowName) => { - return { title: flowName, value: flowName }; - }); - flowNamesChoice.unshift({ title: "All flows", value: "all" }); - - // Manually select status - const selectStatus = await prompts([ - { - type: "select", - name: "name", - message: "Please select the flow you want to clean", - choices: flowNamesChoice, - }, - { - type: "multiselect", - name: "status", - message: "Please select the status(es) you want to delete", - choices: [ - { title: `Draft`, value: "Draft" }, - { title: `Inactive`, value: "Inactive" }, - { title: `Obsolete`, value: "Obsolete" }, - ], - }, - ]); - nameFilter = selectStatus.name; - statusFilter = selectStatus.status; - } + // List flows to delete, prompt user if not in CI and not send as arguments + const manageableConstraint = await this.getFlowsScope(flags); // Check we don't delete active Flows - if (statusFilter.includes("Active")) { - throw new SfdxError("You can not delete active records"); + if (this.statusFilter.includes('Active')) { + throw new SfError('You can not delete active records'); } // Build query with name filter if sent - let query = `SELECT Id,MasterLabel,VersionNumber,Status,Description,Definition.DeveloperName FROM Flow WHERE ${manageableConstraint} AND Status IN ('${statusFilter.join( - "','", - )}')`; - if (nameFilter && nameFilter != "all") { - query += ` AND Definition.DeveloperName = '${nameFilter}'`; - } - query += " ORDER BY Definition.DeveloperName,VersionNumber"; - - const flowQueryCommand = "sfdx force:data:soql:query " + ` -q "${query}"` + ` --targetusername ${username}` + " --usetoolingapi"; - const flowQueryRes = await execSfdxJson(flowQueryCommand, this, { - output: false, - debug: debugMode, - fail: true, - }); - const recordsRaw = flowQueryRes?.result?.records || flowQueryRes.records || []; + await this.listFlowVersionsToDelete(manageableConstraint); // Check empty result - if (recordsRaw.length === 0) { - const outputString = `[sfdx-hardis] No matching Flow records found with query ${query}`; - uxLog(this, c.yellow(outputString)); + if (this.flowRecordsRaw.length === 0) { + const outputString = `[sfdx-hardis] No matching Flow records found`; + uxLog("warning", this, c.yellow(outputString)); return { deleted: [], outputString }; } // Simplify results format & display them - const records = recordsRaw.map((record: any) => { - return { - Id: record.Id, - MasterLabel: record.MasterLabel, - VersionNumber: record.VersionNumber, - DefinitionDevName: record.Definition.DeveloperName, - Status: record.Status, - Description: record.Description, - }; - }); - - uxLog(this, `[sfdx-hardis] Found ${c.bold(records.length)} records:\n${c.yellow(columnify(records))}`); + this.formatFlowRecords(); // Confirm deletion - if (prompt) { + if (this.promptUser) { const confirmDelete = await prompts({ - type: "confirm", - name: "value", - message: c.cyanBright(`Do you confirm you want to delete these ${records.length} flow versions ?`), + type: 'confirm', + name: 'value', + message: c.cyanBright(`Do you confirm you want to delete these ${this.flowRecords.length} flow versions ?`), + description: 'Permanently delete the selected flow versions from the Salesforce org', }); if (confirmDelete.value === false) { - uxLog(this, c.magenta("Action cancelled by user")); - return { outputString: "Action cancelled by user" }; + uxLog("error", this, c.red('Action cancelled by user')); + return { outputString: 'Action cancelled by user' }; } } // Perform deletion - const deleted = []; - const deleteErrors = []; - const conn = this.org.getConnection(); - const deleteResults = await bulkDeleteTooling("Flow", records, conn); + const conn = flags['target-org'].getConnection(); + await this.processDeleteFlowVersions(conn, true); + + const summary = + this.deletedRecords.length > 0 + ? `Deleted the following list of record(s)` + : 'No record(s) to delete'; + uxLog("action", this, c.cyan(summary)); + if (this.deletedRecords.length > 0) { + uxLogTable(this, this.deletedRecords); + } + // Return an object to be displayed with --json + return { orgId: flags['target-org'].getOrgId(), outputString: summary }; + } + + private async processDeleteFlowVersions(conn: any, tryDeleteInterviews: boolean) { + uxLog("action", this, c.cyan(`Deleting Flow versions...`)); + const recordsIds = this.flowRecords.map((record) => record.Id); + const deleteResults = await bulkDeleteTooling('Flow', recordsIds, conn); for (const deleteRes of deleteResults.results) { if (deleteRes.success) { - deleted.push(deleteRes); + this.deletedRecords.push(deleteRes); } else { - this.ux.error(c.red(`[sfdx-hardis] Unable to perform deletion request: ${JSON.stringify(deleteRes)}`)); - deleteErrors.push(deleteRes); + uxLog("error", this, c.red(`[sfdx-hardis] Unable to perform deletion request: ${JSON.stringify(deleteRes)}`)); + this.deletedErrors.push(deleteRes); } } - if (deleteErrors.length > 0) { - const errMsg = `[sfdx-hardis] There have been errors while deleting ${deleteErrors.length} record(s): \n${JSON.stringify(deleteErrors)}`; - if (allowPurgeFailure) { - uxLog(this, c.yellow(errMsg)); + if ( + this.deletedErrors.length > 0 && + (this.deleteFlowInterviews === true || !isCI) && + tryDeleteInterviews === true + ) { + await this.manageDeleteFlowInterviews(conn); + } + + if (this.deletedErrors.length > 0) { + const errMsg = `[sfdx-hardis] There have been errors while deleting ${this.deletedErrors.length + } record(s): \n${JSON.stringify(this.deletedErrors)}`; + if (this.allowPurgeFailure) { + uxLog("warning", this, c.yellow(errMsg)); } else { - throw new SfdxError(c.yellow(`There have been errors while deleting ${deleteErrors.length} record(s): \n${JSON.stringify(deleteErrors)}`)); + throw new SfError( + c.yellow( + `There have been errors while deleting ${this.deletedErrors.length} record(s): \n${JSON.stringify( + this.deletedErrors + )}` + ) + ); } } + } - const summary = - deleted.length > 0 ? `[sfdx-hardis] Deleted the following list of record(s):\n${columnify(deleted)}` : "[sfdx-hardis] No record(s) to delete"; - uxLog(this, c.green(summary)); - // Return an object to be displayed with --json - return { orgId: this.org.getOrgId(), outputString: summary }; + private async manageDeleteFlowInterviews(conn: any) { + // Gather flow interviews that prevent deleting flow versions + const flowInterviewsIds: string[] = []; + this.flowRecords = []; + const extractInterviewsRegex = /Flow Interview - ([a-zA-Z0-9]{15}|[a-zA-Z0-9]{18})/gm; + for (const deletedError of this.deletedErrors) { + this.flowRecords.push({ Id: deletedError.Id }); + const errorflowInterviewIds = await extractRegexMatches(extractInterviewsRegex, deletedError.error); + flowInterviewsIds.push(...[...new Set(errorflowInterviewIds)]); // make interview Ids unique + } + if (flowInterviewsIds.length === 0) { + return; + } + // Display flows & Prompt user if not in CI + await this.displayFlowInterviewToDelete(flowInterviewsIds, conn); + if (!isCI && this.promptUser === true) { + const confirmDelete = await prompts({ + type: 'confirm', + name: 'value', + message: c.cyanBright(`Do you confirm you want to delete ${flowInterviewsIds.length} Flow Interviews ?`), + description: 'Permanently delete the selected flow interview records from the Salesforce org', + }); + if (confirmDelete.value === false) { + uxLog("error", this, c.red('Action cancelled by user')); + return { outputString: 'Action cancelled by user' }; + } + } + // Delete flow interviews + const deleteInterviewResults = await bulkDelete('FlowInterview', flowInterviewsIds, conn); + this.deletedRecords.push(deleteInterviewResults?.successfulResults || []); + this.deletedErrors = deleteInterviewResults?.failedResults || []; + // Try to delete flow versions again + uxLog("action", this, c.cyan(`Trying again to delete flow versions after deleting flow interviews...`)); + this.flowRecords = [...new Set(this.flowRecords)]; // Make list unique + await this.processDeleteFlowVersions(conn, false); + } + + private formatFlowRecords() { + this.flowRecords = this.flowRecordsRaw.map((record: any) => ({ + Id: record.Id, + MasterLabel: record.MasterLabel, + VersionNumber: record.VersionNumber, + DefinitionDevName: record.Definition.DeveloperName, + Status: record.Status, + Description: record.Description, + })); + + if (this.flowRecords.length === 0) { + uxLog("warning", this, c.yellow('No Flow versions found to delete.')); + return; + } + + const flowList = this.flowRecords + .map( + (flow) => + `- ${c.bold(flow.DefinitionDevName)} v${c.green(flow.VersionNumber)} (${c.yellow(flow.Status)})${flow.Description ? ` - ${c.gray(flow.Description)}` : ''}` + ) + .join('\n'); + + uxLog( + "action", + this, + c.cyan( + `Found ${this.flowRecords.length} Flow version(s) to delete:\n${flowList}` + ) + ); + } + + private async listFlowVersionsToDelete(manageableConstraint: string) { + uxLog("action", this, c.cyan('Querying Flow versions to delete...')); + let query = `SELECT Id,MasterLabel,VersionNumber,Status,Description,Definition.DeveloperName FROM Flow WHERE ${manageableConstraint} AND Status IN ('${this.statusFilter.join( + "','" + )}')`; + if (this.nameFilter && this.nameFilter != 'all') { + query += ` AND Definition.DeveloperName = '${this.nameFilter}'`; + } + query += ' ORDER BY Definition.DeveloperName,VersionNumber'; + + const flowQueryCommand = + 'sf data query ' + ` --query "${query}"` + ` --target-org ${this.username}` + ' --use-tooling-api'; + const flowQueryRes = await execSfdxJson(flowQueryCommand, this, { + output: false, + debug: this.debugMode, + fail: true, + }); + this.flowRecordsRaw = flowQueryRes?.result?.records || flowQueryRes.records || []; + } + + private async getFlowsScope(flags) { + const manageableConstraint = "ManageableState IN ('deprecatedEditable','installedEditable','unmanaged')"; + if (flags.status) { + // Input parameter used + this.statusFilter = flags.status.split(','); + } else if (isCI || this.promptUser === false) { + // Obsolete by default for CI + this.statusFilter = ['Obsolete']; + } else { + // Query all flows definitions + uxLog("action", this, c.cyan('Querying all Flow definitions to select from...')); + const allFlowQueryCommand = + 'sf data query ' + + ` --query "SELECT Id,DeveloperName,MasterLabel,ManageableState FROM FlowDefinition WHERE ${manageableConstraint} ORDER BY DeveloperName"` + + ` --target-org ${this.username}` + + ' --use-tooling-api'; + const allFlowQueryRes = await execSfdxJson(allFlowQueryCommand, this, { + output: false, + debug: this.debugMode, + fail: true, + }); + const flowRecordsRaw = allFlowQueryRes?.result?.records || allFlowQueryRes.records || []; + const flowNamesUnique = [...new Set(flowRecordsRaw.map((flowRecord) => flowRecord.DeveloperName))]; + const flowNamesChoice = flowNamesUnique.map((flowName) => { + return { title: flowName, value: flowName }; + }); + flowNamesChoice.unshift({ title: 'All flows', value: 'all' }); + + // Manually select status + const selectStatus = await prompts([ + { + type: 'select', + name: 'name', + message: 'Please select the flow you want to clean', + description: 'Choose a specific flow to clean or select all flows', + placeholder: 'Select a flow', + choices: flowNamesChoice, + }, + { + type: 'multiselect', + name: 'status', + message: 'Please select the status(es) you want to delete', + description: 'Choose which flow version statuses should be deleted', + choices: [ + { title: `Draft`, value: 'Draft' }, + { title: `Inactive`, value: 'Inactive' }, + { title: `Obsolete`, value: 'Obsolete' }, + ], + }, + ]); + this.nameFilter = selectStatus.name; + this.statusFilter = selectStatus.status; + } + return manageableConstraint; + } + + private async displayFlowInterviewToDelete(flowVInterviewIds: string[], conn: any) { + const query = + 'SELECT Name,InterviewLabel,InterviewStatus,CreatedBy.Username,CreatedDate,LastModifiedDate ' + + `FROM FlowInterview WHERE Id IN ('${flowVInterviewIds.join("','")}')` + + ' ORDER BY Name'; + const flowsInterviewsToDelete = (await bulkQuery(query, conn)).records; + if (flowsInterviewsToDelete.length === 0) { + uxLog("warning", this, c.yellow('No Flow Interviews found to delete.')); + return; + } + // Display Flow Interviews to delete + const flowList = flowsInterviewsToDelete + .map( + (flow) => + `- ${c.bold(flow.Name)} (${c.green(flow.InterviewLabel)}) - ${c.yellow(flow.InterviewStatus)}` + ) + .join('\n'); + uxLog("action", this, c.cyan(`Found ${flowsInterviewsToDelete.length} Flow Interviews to delete:\n${flowList}`)); } } diff --git a/src/commands/hardis/org/refresh/after-refresh.ts b/src/commands/hardis/org/refresh/after-refresh.ts new file mode 100644 index 000000000..592fcdefd --- /dev/null +++ b/src/commands/hardis/org/refresh/after-refresh.ts @@ -0,0 +1,800 @@ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Connection, Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import * as path from 'path'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import { execSfdxJson, uxLog } from '../../../../common/utils/index.js'; +import { parsePackageXmlFile, parseXmlFile, writePackageXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; +import { + deleteConnectedApps, + deployConnectedApps, + toConnectedAppFormat, + validateConnectedApps, + selectConnectedAppsForProcessing, + createConnectedAppSuccessResponse, + handleConnectedAppError +} from '../../../../common/utils/refresh/connectedAppUtils.js'; +import { getConfig } from '../../../../config/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; +import { soqlQuery, soqlQueryTooling } from '../../../../common/utils/apiUtils.js'; +import { importData, selectDataWorkspace } from '../../../../common/utils/dataUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +// Interface to track Connected Apps in the project +interface ProjectConnectedApp { + fullName: string; + filePath: string; + type: string; +} + +export default class OrgRefreshAfterRefresh extends SfCommand { + public static title = 'Restore Connected Apps after org refresh'; + + public static description = ` +## Command Behavior + +**Restores all previously backed-up Connected Apps (including Consumer Secrets), certificates, custom settings, records and other metadata to a Salesforce org after a sandbox refresh.** + +This command is the second step in the sandbox refresh process. It scans the backup folder created before the refresh, allows interactive or flag-driven selection of items to restore, and automates cleanup and redeployment to the refreshed org while preserving credentials and configuration. + +Key functionalities: + +- **Choose a backup to restore:** Lets you pick the saved sandbox project that contains the artifacts to restore. +- **Select which items to restore:** Finds Connected App XMLs, certificates, custom settings and other artifacts and lets you pick what to restore (or restore all). +- **Safety checks and validation:** Confirms files exist and prompts before making changes to the target org. +- **Prepare org for restore:** Optionally cleans up existing Connected Apps so saved apps can be re-deployed without conflict. +- **Redeploy saved artifacts:** Restores Connected Apps (with saved secrets), certificates, SAML SSO configs, custom settings and other metadata. +- **Handle SAML configs:** Cleans and updates SAML XML files and helps you choose certificates to wire into restored configs. +- **Restore records:** Optionally runs data import from selected SFDMU workspaces to restore record data. +- **Reporting & persistence:** Sends restore reports and can update project config to record what was restored. + +This command is part of [sfdx-hardis Sandbox Refresh](https://sfdx-hardis.cloudity.com/salesforce-sandbox-refresh/) and is intended to be run after a sandbox refresh to re-apply saved metadata, credentials and data. + +
+Technical explanations + +- **Backup Folder Handling:** Reads the immediate subfolders of \`scripts/sandbox-refresh/\` and validates the chosen project contains the expected \`manifest/\` and \`force-app\` layout. +- **Metadata & Deployment APIs:** Uses \`sf project deploy start --manifest\` for package-based deploys, \`sf project deploy start --metadata-dir\` for MDAPI artifacts (certificates), and utility functions for Connected App deployment that preserve consumer secrets. +- **SAML Handling:** Queries active certificates via tooling API, updates SAML XML files, and deploys using \`sf project deploy start -m SamlSsoConfig\`. +- **Records Handling:** Uses interactive selection of SFDMU workspaces and runs data import utilities to restore records. +- **Error Handling & Summary:** Aggregates results, logs success/warnings/errors, and returns a structured result indicating which items were restored and any failures. + +
+`; + + public static examples = [ + `$ sf hardis:org:refresh:after-refresh`, + `$ sf hardis:org:refresh:after-refresh --name "MyConnectedApp" // Process specific app, no selection prompt`, + `$ sf hardis:org:refresh:after-refresh --name "App1,App2,App3" // Process multiple apps, no selection prompt`, + `$ sf hardis:org:refresh:after-refresh --all // Process all apps, no selection prompt`, + `$ sf hardis:org:refresh:after-refresh --target-org myDevOrg`, + ]; + + public static flags = { + "target-org": Flags.requiredOrg(), + name: Flags.string({ + char: 'n', + summary: messages.getMessage('nameFilter'), + description: 'Connected App name(s) to process (bypasses selection prompt). For multiple apps, separate with commas (e.g., "App1,App2")' + }), + all: Flags.boolean({ + char: 'a', + summary: 'Process all Connected Apps without selection prompt', + description: 'If set, all Connected Apps from the local repository will be processed. Takes precedence over --name if both are specified.' + }), + websocket: Flags.string({ + summary: messages.getMessage('websocket'), + description: 'Websocket host:port for VsCode SFDX Hardis UI integration' + }), + skipauth: Flags.boolean({ + default: false, + summary: 'Skip authentication check when a default username is required', + description: 'Skip authentication check when a default username is required' + }) + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + protected refreshSandboxConfig: any = {}; + protected saveProjectPath: string; + protected result: any; + protected orgUsername: string; + protected nameFilter: string | undefined; + protected processAll: boolean; + protected conn: Connection; + protected instanceUrl: any; + protected orgId: string; + + public async run(): Promise { + const { flags } = await this.parse(OrgRefreshAfterRefresh); + this.orgUsername = flags["target-org"].getUsername() as string; + this.conn = flags["target-org"].getConnection(); + this.orgId = flags["target-org"].getOrgId() as string; + this.instanceUrl = this.conn.instanceUrl; + /* jscpd:ignore-start */ + this.processAll = flags.all || false; + this.nameFilter = this.processAll ? undefined : flags.name; // If --all is set, ignore --name + const config = await getConfig("user"); + this.refreshSandboxConfig = config?.refreshSandboxConfig || {}; + this.result = {} + /* jscpd:ignore-end */ + uxLog("action", this, c.cyan(`This command will restore information after the refresh of org ${this.instanceUrl} +- Certificates +- Other Metadatas +- SAML SSO Config +- Custom Settings +- Records (using SFDMU projects) +- Connected Apps`)); + // Prompt user to select a save project path + const saveProjectPathRoot = path.join(process.cwd(), 'scripts', 'sandbox-refresh'); + // Only get immediate subfolders of saveProjectPathRoot (not recursive) + const subFolders = fs.readdirSync(saveProjectPathRoot, { withFileTypes: true }) + .filter(dirent => dirent.isDirectory()) + .map(dirent => dirent.name); + + const saveProjectPath = await prompts({ + type: 'select', + name: 'path', + message: 'Select the project path where the sandbox info has been saved', + description: 'This is the path where the metadatas were saved before the org refresh', + choices: subFolders.map(folder => ({ + title: folder, + value: path.join(saveProjectPathRoot, folder) + })), + }); + this.saveProjectPath = saveProjectPath.path; + + + // 1. Restore Certificates + await this.restoreCertificates(); + + // 2. Restore Other Metadata + await this.restoreOtherMetadata(); + + // 3. Restore SamlSsoConfig + await this.restoreSamlSsoConfig(); + + // 4. Restore Custom Settings + await this.restoreCustomSettings(); + + // 5. Restore saved records + await this.restoreRecords(); + + // 6. Restore Connected Apps + await this.restoreConnectedApps(); + + return this.result; + } + + private async restoreCertificates(): Promise { + const certsDir = path.join(this.saveProjectPath, 'force-app', 'main', 'default', 'certs'); + const manifestDir = path.join(this.saveProjectPath, 'manifest'); + const certsPackageXml = path.join(manifestDir, 'package-certificates-to-save.xml'); + if (!fs.existsSync(certsDir) || !fs.existsSync(certsPackageXml)) { + uxLog("log", this, c.yellow('No certificates backup found, skipping certificate restore.')); + return; + } + // Copy certs to a temporary folder for deployment + const mdApiCertsRestoreFolder = path.join(this.saveProjectPath, 'mdapi_certs_restore'); + await fs.ensureDir(mdApiCertsRestoreFolder); + await fs.emptyDir(mdApiCertsRestoreFolder); + await fs.copy(certsDir, path.join(mdApiCertsRestoreFolder, "certs"), { overwrite: true }); + // List certificates in the restore folder + const certsFiles = fs.readdirSync(certsDir); + if (certsFiles.length === 0) { + uxLog("log", this, c.yellow('No certificates found in the backup folder, skipping certificate restore.')); + return; + } + // List .crt files and get their name, then check that each cert must have a .crt and a .crt-meta.xml file + const certsToRestoreNames = certsFiles.filter(file => file.endsWith('.crt')).map(file => path.basename(file, '.crt')); + const validCertsToRestoreNames = certsToRestoreNames.filter(name => { + return fs.existsSync(path.join(certsDir, `${name}.crt-meta.xml`)); + }); + if (validCertsToRestoreNames.length === 0) { + uxLog("log", this, c.yellow('No valid certificates found in the backup folder (with .crt + .crt-meta.xml), skipping certificate restore.')); + return; + } + + // Prompt certificates to restore (all by default) + const promptCerts = await prompts({ + type: 'multiselect', + name: 'certs', + message: `Select certificates to restore`, + description: 'Select the certificates you want to restore from the backup. You can select multiple certificates.', + choices: validCertsToRestoreNames.map(name => ({ + title: name, + value: name + })), + initial: validCertsToRestoreNames, // Select all by default + }); + const selectedCerts = promptCerts.certs; + if (selectedCerts.length === 0) { + uxLog("log", this, c.yellow('No certificates selected for restore, skipping certificate restore.')); + return; + } + + // Ask user confirmation before restoring certificates + const prompt = await prompts({ + type: 'confirm', + name: 'restore', + message: `Do you confirm you want to restore ${selectedCerts.length} certificate(s) ?`, + description: 'This will deploy all certificate files and definitions saved before the refresh.', + initial: true + }); + if (!prompt.restore) { + return; + } + + // Create manifest/package.xml within mdApiCertsRestoreFolder + const packageXmlCerts = { + "Certificate": selectedCerts + } + await writePackageXmlFile(path.join(mdApiCertsRestoreFolder, 'package.xml'), packageXmlCerts); + + // Deploy using metadata API + uxLog("log", this, c.grey(`Deploying certificates in org ${this.instanceUrl} using Metadata API (Source Api does not support it)...`)); + await execSfdxJson( + `sf project deploy start --metadata-dir ${mdApiCertsRestoreFolder} --target-org ${this.orgUsername}`, + this, + { output: true, fail: true, cwd: this.saveProjectPath } + ); + uxLog("success", this, c.green(`Certificates restored successfully in org ${this.instanceUrl}`)); + } + + private async restoreOtherMetadata(): Promise { + const manifestDir = path.join(this.saveProjectPath, 'manifest'); + const restorePackageXml = path.join(manifestDir, 'package-metadata-to-restore.xml'); + // Check if the restore package.xml exists + if (!fs.existsSync(restorePackageXml)) { + uxLog("log", this, c.yellow('No package-metadata-to-restore.xml found, skipping metadata restore.')); + return; + } + // Warn user about the restore package.xml that needs to be manually checked + WebSocketClient.sendReportFileMessage(restorePackageXml, "Restore Metadatas package.xml", "report"); + uxLog("action", this, c.cyan(`Now handling the restore of other metadata from ${restorePackageXml}...`)); + const metadataRestore = await parsePackageXmlFile(restorePackageXml); + const metadataSummary = Object.keys(metadataRestore).map(key => { + return `${key}(${Array.isArray(metadataRestore[key]) ? metadataRestore[key].length : 0})`; + }).join(', '); + uxLog("warning", this, c.yellow(`Look at the package-metadata-to-restore.xml file in ${c.bold(this.saveProjectPath)} to see what will be restored.`)); + uxLog("warning", this, c.yellow(`Confirm it's content, or remove/comment part of it if you don't want some metadata to be restored\n${metadataSummary}`)); + + const prompt = await prompts({ + type: 'confirm', + name: 'restore', + message: `Please double check package-metadata-to-restore.xml. Do you confirm you want to restore all these metadatas ?\n${metadataSummary}`, + description: `WARNING: Check and validate/update file ${restorePackageXml} BEFORE it is deployed !`, + initial: true + }); + if (!prompt.restore) { + uxLog("warning", this, c.yellow('Metadata restore cancelled by user.')); + this.result = Object.assign(this.result, { success: false, message: 'Metadata restore cancelled by user' }); + return; + } + // Deploy the metadata using the package.xml + uxLog("action", this, c.cyan('Deploying other metadatas to org...')); + const deployCmd = `sf project deploy start --manifest ${restorePackageXml} --target-org ${this.orgUsername} --json`; + const deployResult = await execSfdxJson(deployCmd, this, { output: true, fail: true, cwd: this.saveProjectPath }); + if (deployResult.status === 0) { + uxLog("success", this, c.green(`Other metadata restored successfully in org ${this.instanceUrl}`)); + } + else { + uxLog("error", this, c.red(`Failed to restore other metadata in org ${this.instanceUrl}: ${deployResult.error}`)); + this.result = Object.assign(this.result, { success: false, message: `Failed to restore other metadata: ${deployResult.error}` }); + throw new Error(`Failed to restore other metadata:\n${JSON.stringify(deployResult, null, 2)}`); + } + } + + private async restoreSamlSsoConfig(): Promise { + // 0. List all samlssoconfigs in the project, prompt user to select which to restore + const samlDir = path.join(this.saveProjectPath, 'force-app', 'main', 'default', 'samlssoconfigs'); + if (!fs.existsSync(samlDir)) { + uxLog("action", this, c.cyan('No SAML SSO Configs found, skipping SAML SSO config restore.')); + return; + } + const allSamlFiles = fs.readdirSync(samlDir).filter(f => f.endsWith('.samlssoconfig-meta.xml')); + if (allSamlFiles.length === 0) { + uxLog("action", this, c.yellow('No SAML SSO Config XML files found., skipping SAML SSO config restore.')); + return; + } + // Prompt user to select which SAML SSO configs to restore + const promptSaml = await prompts({ + type: 'multiselect', + name: 'samlFiles', + message: 'Select SAML SSO Configs to restore', + description: 'Select the SAML SSO Configs you want to restore from the backup. You can select multiple configs.', + choices: allSamlFiles.map(f => ({ title: f.replace('.samlssoconfig-meta.xml', ''), value: f })), + initial: allSamlFiles // select all by default + }); + const selectedSamlFiles: string[] = promptSaml.samlFiles; + if (!selectedSamlFiles || selectedSamlFiles.length === 0) { + uxLog("log", this, c.yellow('No SAML SSO Configs selected for restore, skipping.')); + return; + } + + // 1. Clean up XML and prompt for cert + // Query active certificates + const soql = "SELECT Id, MasterLabel FROM Certificate WHERE ExpirationDate > TODAY LIMIT 200"; + let certs: { Id: string, MasterLabel: string }[] = []; + try { + const res = await soqlQueryTooling(soql, this.conn); + certs = res.records as any; + } catch (e) { + uxLog("error", this, c.red(`Failed to query active certificates: ${e}`)); + return; + } + if (!certs.length) { + uxLog("error", this, c.yellow('No active certificates found in org. You\'ll need to update manually field requestSigningCertId with the id of a valid certificate.')); + return; + } + const updated: string[] = []; + const errors: string[] = []; + for (const samlFile of selectedSamlFiles) { + const samlName = samlFile.replace('.samlssoconfig-meta.xml', ''); + // Prompt user to select a certificate + const certPrompt = await prompts({ + type: 'select', + name: 'certId', + message: `Select the certificate to use for SAML SSO config ${samlName}`, + description: `This will update in ${samlFile}.`, + choices: certs.map(cert => ({ + title: cert.MasterLabel, + value: cert.Id.substring(0, 15) + })), + }); + const selectedCertId = certPrompt.certId; + if (!selectedCertId) { + uxLog("warning", this, c.yellow('No certificate selected. Skipping SAML SSO config update.')); + errors.push(`No certificate selected for ${samlName}`); + continue; + } + const filePath = path.join(samlDir, samlFile); + let xml = await fs.readFile(filePath, 'utf8'); + // Remove ... + xml = xml.replace(/.*?<\/oauthTokenEndpoint>\s*/gs, ''); + // Remove ... + xml = xml.replace(/.*?<\/salesforceLoginUrl>\s*/gs, ''); + // Replace ... + if (/.*?<\/requestSigningCertId>/s.test(xml)) { + xml = xml.replace(/.*?<\/requestSigningCertId>/s, `${selectedCertId}`); + } + await fs.writeFile(filePath, xml, 'utf8'); + uxLog("log", this, c.grey(`Updated SAML SSO config ${samlFile} with certificate ${selectedCertId} and removed readonly tags oauthTokenEndpoint & salesforceLoginUrl`)); + // 2. Prompt user to confirm deployment + const promptDeploy = await prompts({ + type: 'confirm', + name: 'deploy', + message: `Do you confirm you want to deploy ${samlFile} SAML SSO Config to the org?`, + description: 'This will deploy the selected SAML SSO Configs to the org using SFDX', + initial: true + }); + if (!promptDeploy.deploy) { + uxLog("warning", this, c.yellow(`SAML SSO Config ${samlFile} deployment cancelled by user.`)); + errors.push(`Deployment cancelled for ${samlFile}`); + continue; + } + const deployCommand = `sf project deploy start -m SamlSsoConfig:${samlName} --target-org ${this.orgUsername}`; + try { + uxLog("action", this, c.cyan(`Deploying SAML SSO Config ${samlName} to org ${this.instanceUrl}...`)); + const deployResult = await execSfdxJson(deployCommand, this, { output: true, fail: true, cwd: this.saveProjectPath }); + if (deployResult.status === 0) { + uxLog("success", this, c.green(`SAML SSO Config ${samlName} deployed successfully in org ${this.instanceUrl}`)); + updated.push(samlName); + } else { + uxLog("error", this, c.red(`Failed to deploy SAML SSO Config ${samlName}: ${deployResult.error}`)); + errors.push(`Failed to deploy ${samlName}: ${deployResult.error}`); + } + } catch (e: any) { + uxLog("error", this, c.red(`Error deploying SAML SSO Config ${samlName}: ${e.message}`)); + errors.push(`Error deploying ${samlName}: ${e.message}`); + } + } + // 3. Summary of results + uxLog("action", this, c.cyan(`SAML SSO Config processing completed.`)); + if (updated.length > 0) { + uxLog("success", this, c.green(`Successfully updated and deployed SAML SSO Configs: ${updated.join(', ')}`)); + } + if (errors.length > 0) { + uxLog("error", this, c.red(`Errors occurred during SAML SSO Config processing:\n${errors.join('\n')}`)); + this.result = Object.assign(this.result, { success: false, message: `SAML SSO Config processing errors:\n${errors.join('\n')}` }); + } + } + + private async restoreCustomSettings(): Promise { + // Check there are custom settings to restore + const csDir = path.join(this.saveProjectPath, 'savedCustomSettings'); + if (!fs.existsSync(csDir)) { + uxLog("log", this, c.yellow('No savedCustomSettings folder found, skipping custom settings restore.')); + return; + } + const csFolders = fs.readdirSync(csDir).filter(f => fs.statSync(path.join(csDir, f)).isDirectory()); + if (csFolders.length === 0) { + uxLog("log", this, c.yellow('No custom settings data found, skipping custom settings restore.')); + return; + } + // List custom settings to restore so users can select them. Keep only folders that have a .json file + const csToRestore = csFolders.filter(folder => { + const jsonFile = path.join(csDir, folder, `${folder}.json`); + return fs.existsSync(jsonFile); + }); + if (csToRestore.length === 0) { + uxLog("log", this, c.yellow('No custom settings data found to restore, skipping custom settings restore.')); + return; + } + // Prompt custom settings to restore: All by default + const promptRestore = await prompts({ + type: 'multiselect', + name: 'settings', + message: `Select custom settings to restore`, + description: 'Select the custom settings you want to restore from the backup. You can select multiple settings.', + choices: csToRestore.map(folder => ({ + title: folder, + value: folder + })), + initial: csToRestore // Select all by default + }); + const selectedSettings = promptRestore.settings; + if (selectedSettings.length === 0) { + uxLog("log", this, c.yellow('No custom settings selected for restore, skipping custom settings restore.')); + return; + } + + // Ask last confirmation to user + const prompt = await prompts({ + type: 'confirm', + name: 'restore', + message: `Do you confirm you want to restore ${selectedSettings.length} Custom Settings values from backup?`, + description: 'This will import all custom settings data saved before the refresh.', + initial: true + }); + if (!prompt.restore) { + uxLog("warning", this, c.yellow('Custom settings restore cancelled by user.')); + return; + } + uxLog("action", this, c.cyan(`Restoring ${selectedSettings.length} Custom Settings...`)); + const successSettings: string[] = [] + const failedSettings: string[] = [] + for (const folder of selectedSettings) { + const jsonFile = path.join(csDir, folder, `${folder}.json`); + if (!fs.existsSync(jsonFile)) { + uxLog("warning", this, c.yellow(`No data file for custom setting ${folder}`)); + failedSettings.push(folder); + continue; + } + // Remove standard fields from the JSON file and create a new file without them, and replace Org Id with the current org one + const jsonFileForImport = path.join(csDir, folder, `${folder}-without-standard-fields.json`); + const jsonData = await fs.readJson(jsonFile); + const standardFields = ['LastModifiedDate', 'IsDeleted', 'CreatedById', 'CreatedDate', 'LastModifiedById', 'SystemModstamp']; + let deleteExistingCsBefore = false; + jsonData.records = (jsonData?.records || []).map((record: any) => { + const newRecord: any = {}; + for (const key in record) { + // Remove standard fields + if (!standardFields.includes(key)) { + newRecord[key] = record[key]; + } + // Replace Org Id with the current org one + if (key === 'SetupOwnerId') { + newRecord[key] = this.orgId; // Replace with current org Id + deleteExistingCsBefore = true; // Use upsert if SetupOwnerId is present + } + } + return newRecord; + }); + // Write the new JSON file without standard fields + await fs.writeJson(jsonFileForImport, jsonData, { spaces: 2 }); + + // Delete existing custom settings before import if needed + if (deleteExistingCsBefore) { + uxLog("log", this, c.grey(`Deleting existing custom settings for ${folder} in org ${this.orgUsername} before import...`)); + // Query existing custom settings to delete + const query = `SELECT Id FROM ${folder} WHERE SetupOwnerId = '${this.orgId}'`; + const queryRes = await soqlQuery(query, this.conn); + if (queryRes.records.length > 0) { + const idsToDelete = (queryRes?.records.map(record => record.Id) || []).filter((id): id is string => typeof id === 'string'); + uxLog("log", this, c.grey(`Found ${idsToDelete.length} existing custom settings to delete for ${folder} in org ${this.orgUsername}`)); + const deleteResults = await this.conn.sobject(folder).destroy(idsToDelete, { allOrNone: true }); + const deletedSuccessFullyIds = deleteResults.filter(result => result.success).map(result => "- " + result.id).join('\n'); + uxLog("log", this, c.grey(`Deleted ${deletedSuccessFullyIds.length} existing custom settings for ${folder} in org ${this.orgUsername}\n${deletedSuccessFullyIds}`)); + const deletedErrorIds = deleteResults.filter(result => !result.success).map(result => "- " + result.id).join('\n'); + if (deletedErrorIds.length > 0) { + uxLog("warning", this, c.yellow(`Failed to delete existing custom settings for ${folder} in org ${this.orgUsername}\n${deletedErrorIds}`)); + continue; // Skip to next setting if deletion failed + } + } else { + uxLog("log", this, c.grey(`No existing custom settings found for ${folder} in org ${this.orgUsername}.`)); + } + } + // Import the custom setting using sf data tree import + const importCmd = `sf data tree import --files ${jsonFileForImport} --target-org ${this.orgUsername} --json`; + try { + const importRes = await execSfdxJson(importCmd, this, { output: true, fail: true, cwd: this.saveProjectPath }); + if (importRes.status === 0) { + uxLog("success", this, c.green(`Custom setting ${folder} restored.`)); + successSettings.push(folder); + } + else { + uxLog("error", this, c.red(`Failed to restore custom setting ${folder}:\n${JSON.stringify(importRes, null, 2)}`)); + failedSettings.push(folder); + } + } catch (e) { + uxLog("error", this, c.red(`Custom setting ${folder} restore failed:\n${JSON.stringify(e)}`)); + failedSettings.push(folder); + continue; + } + } + uxLog("action", this, c.cyan(`Custom settings restore complete (${successSettings.length} successful, ${failedSettings.length} failed)`)); + if (successSettings.length > 0) { + const successSettingsNames = successSettings.map(name => "- " + name).join('\n'); + uxLog("success", this, c.green(`Successfully restored ${successSettings.length} Custom Setting(s):\n ${successSettingsNames}`)); + } + if (failedSettings.length > 0) { + const failedSettingsNames = failedSettings.map(name => "- " + name).join('\n'); + uxLog("error", this, c.red(`Failed to restore ${failedSettings.length} Custom Setting(s): ${failedSettingsNames}`)); + } + } + + private async restoreRecords(): Promise { + const sfdmuWorkspaces = await selectDataWorkspace({ + selectDataLabel: 'Select data workspaces to use to restore records after sandbox refresh', + multiple: true, + initial: "all", + cwd: this.saveProjectPath + }); + if (!(Array.isArray(sfdmuWorkspaces) && sfdmuWorkspaces.length > 0)) { + uxLog("warning", this, c.yellow('No data workspace found, skipping record restore')); + return; + } + + const confirmRestore = await prompts({ + type: 'confirm', + name: 'confirm', + message: `Before launching the data loading, please make sure your user ${this.orgUsername} has the appropriate ByPasses / Activation Settings / Custom Permissions / Whatever you need to do before starting the data load.`, + initial: true, + description: 'Once confirmed, the data loading will start' + }); + if (!confirmRestore.confirm) { + uxLog("warning", this, c.yellow('Record restore cancelled by user')); + return; + } + + for (const sfdmuPath of sfdmuWorkspaces) { + await importData(sfdmuPath || '', this, { + targetUsername: this.orgUsername, + cwd: this.saveProjectPath, + }); + } + } + + private async restoreConnectedApps(): Promise { + let restoreConnectedApps = false; + const promptRestoreConnectedApps = await prompts({ + type: 'confirm', + name: 'confirmRestore', + message: `Do you want to restore Connected Apps from the backup in ${c.bold(this.saveProjectPath)}?`, + initial: true, + description: 'This will restore all Connected Apps (including Consumer Secrets) from the backup created before the org refresh.' + }); + if (promptRestoreConnectedApps.confirmRestore) { + restoreConnectedApps = true; + } + + if (restoreConnectedApps) { + + try { + // Step 1: Find Connected Apps in the project + const connectedApps = await this.findConnectedAppsInProject(this.nameFilter, this.processAll); + + if (connectedApps.length === 0) { + uxLog("warning", this, c.yellow('No Connected Apps found in the project')); + this.result = Object.assign(this.result, { success: false, message: 'No Connected Apps found in the project' }); + return; + } + + /* jscpd:ignore-start */ + // Step 2: Select which Connected Apps to process + const selectedApps = await this.selectConnectedApps(connectedApps, this.processAll, this.nameFilter); + + if (selectedApps.length === 0) { + uxLog("warning", this, c.yellow('No Connected Apps selected')); + this.result = Object.assign(this.result, { success: false, message: 'No Connected Apps selected' }); + return; + } + /* jscpd:ignore-end */ + + // Step 3: Delete existing Connected Apps from the org for clean deployment + await this.deleteExistingConnectedApps(this.orgUsername, selectedApps); + + // Step 4: Deploy the Connected Apps to the org + await this.deployConnectedApps(this.orgUsername, selectedApps); + + // Return the result + uxLog("action", this, c.cyan(`Summary`)); + const appNames = selectedApps.map(app => `- ${app.fullName}`).join('\n'); + uxLog("success", this, c.green(`Successfully restored ${selectedApps.length} Connected App(s) to ${this.conn.instanceUrl}\n${appNames}`)); + const restoreResult = createConnectedAppSuccessResponse( + `Successfully restored ${selectedApps.length} Connected App(s) to the org`, + selectedApps.map(app => app.fullName) + ); + this.result = Object.assign(this.result, restoreResult); + } catch (error: any) { + const restoreResult = handleConnectedAppError(error, this); + this.result = Object.assign(this.result, restoreResult); + } + } + } + + private async findConnectedAppsInProject( + nameFilter?: string, + processAll?: boolean + ): Promise { + if (processAll) { + uxLog("action", this, c.cyan('Processing all Connected Apps from local repository (selection prompt bypassed)')); + } else if (nameFilter) { + uxLog("action", this, c.cyan(`Processing specified Connected App(s): ${nameFilter} (selection prompt bypassed)`)); + } else { + uxLog("action", this, c.cyan('Scanning project for Connected Apps...')); + } + + try { + // Get all Connected App files in the project once + const connectedAppFilesRaw = await glob('**/*.connectedApp-meta.xml', { + ignore: GLOB_IGNORE_PATTERNS, + cwd: this.saveProjectPath + }) + + const connectedAppFiles = connectedAppFilesRaw.map(file => path.join(this.saveProjectPath, file)); + + if (connectedAppFiles.length === 0) { + uxLog("warning", this, c.yellow('No Connected App files found in the project')); + return []; + } + + // Create ConnectedApp objects from the files + const connectedApps: ProjectConnectedApp[] = []; + const allFoundApps: { fullName: string; filePath: string }[] = []; + + // First, collect all available Connected Apps in the project in one pass + for (const filePath of connectedAppFiles) { + try { + const xmlData = await parseXmlFile(filePath); + if (xmlData && xmlData.ConnectedApp) { + const fullName = xmlData.ConnectedApp.fullName?.[0] || path.basename(filePath, '.connectedApp-meta.xml'); + allFoundApps.push({ fullName, filePath }); + } + } catch (error) { + uxLog("warning", this, c.yellow(`Error parsing ${filePath}: ${error}`)); + // Continue with the next file + } + } + + if (allFoundApps.length === 0) { + uxLog("warning", this, c.yellow('No valid Connected Apps found in the project')); + return []; + } + + // If name filter is specified, validate that all requested apps exist + if (nameFilter) { + const appNames = nameFilter.split(',').map(name => name.trim()); + const availableAppNames = allFoundApps.map(app => app.fullName); + + // Case-insensitive matching for app names + validateConnectedApps(appNames, availableAppNames, this, 'project'); + + // Filter apps based on name filter + for (const app of allFoundApps) { + const matchesFilter = appNames.some(name => + name.toLowerCase() === app.fullName.toLowerCase() + ); + + if (matchesFilter) { + connectedApps.push({ + fullName: app.fullName, + filePath: app.filePath, + type: 'ConnectedApp' + }); + } + } + } else { + // No filter - add all apps + for (const app of allFoundApps) { + connectedApps.push({ + fullName: app.fullName, + filePath: app.filePath, + type: 'ConnectedApp' + }); + } + } + + // Display results + if (connectedApps.length > 0) { + const appNamesAndPaths = connectedApps.map(app => `- ${app.fullName} (${app.filePath})`).join('\n'); + uxLog("log", this, c.cyan(`Found ${connectedApps.length} Connected App(s) in project\n${appNamesAndPaths}`)); + } else if (nameFilter) { + uxLog("warning", this, c.yellow(`No Connected Apps matching the filter "${nameFilter}" found in the project`)); + } + + return connectedApps; + } catch (error) { + uxLog("error", this, c.red(`Error searching for Connected App files: ${error}`)); + return []; + } + } + + /* jscpd:ignore-start */ + private async selectConnectedApps( + connectedApps: ProjectConnectedApp[], + processAll: boolean, + nameFilter?: string + ): Promise { + const initialSelection: string[] = []; + if (this.refreshSandboxConfig.connectedApps && this.refreshSandboxConfig.connectedApps.length > 0) { + initialSelection.push(...this.refreshSandboxConfig.connectedApps); + } + return selectConnectedAppsForProcessing( + connectedApps, + initialSelection, + processAll, + nameFilter, + 'Select Connected Apps to restore', + this + ); + } + /* jscpd:ignore-end */ + + private async deleteExistingConnectedApps( + orgUsername: string, + connectedApps: ProjectConnectedApp[] + ): Promise { + if (connectedApps.length === 0) return; + + const promptResponse = await prompts({ + type: 'confirm', + name: 'confirmDelete', + message: `Now we need to delete ${connectedApps.length} Connected App(s) from the refreshed sandbox, to be able to reupload them with saved credentials. Proceed ?`, + description: 'This step is necessary to ensure that the Connected Apps can be re-deployed with their saved credentials.', + initial: true + }); + if (!promptResponse.confirmDelete) { + throw new Error('Connected Apps deletion cancelled by user'); + } + + // Convert ProjectConnectedApp to the format required by deleteConnectedApps + const appsToDelete = toConnectedAppFormat(connectedApps); + + // Delete the apps without prompting + await deleteConnectedApps(orgUsername, appsToDelete, this, this.saveProjectPath); + uxLog("success", this, c.green('Connected Apps were successfully deleted from the org.')); + } + + private async deployConnectedApps( + orgUsername: string, + connectedApps: ProjectConnectedApp[] + ): Promise { + if (connectedApps.length === 0) return; + + const promptResponse = await prompts({ + type: 'confirm', + name: 'confirmDeploy', + message: `Now we will deploy ${connectedApps.length} Connected App(s) to the org to restore the original credentials. Proceed ?`, + description: 'This step will deploy the Connected Apps with their saved credentials.', + initial: true + }); + + if (!promptResponse.confirmDeploy) { + throw new Error('Connected Apps deployment cancelled by user'); + } + + // Convert ProjectConnectedApp to the format needed by deployConnectedApps + const connectedAppsList = toConnectedAppFormat(connectedApps); + await deployConnectedApps(orgUsername, connectedAppsList, this, this.saveProjectPath); + + uxLog("success", this, c.green(`Deployment of ${connectedApps.length} Connected App(s) completed successfully`)); + } +} diff --git a/src/commands/hardis/org/refresh/before-refresh.ts b/src/commands/hardis/org/refresh/before-refresh.ts new file mode 100644 index 000000000..74439db9e --- /dev/null +++ b/src/commands/hardis/org/refresh/before-refresh.ts @@ -0,0 +1,1029 @@ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Connection, Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import fs from 'fs-extra'; +import c from 'chalk'; +import open from 'open'; +import axios from 'axios'; +import path from 'path'; +import puppeteer, { Browser, Page } from 'puppeteer-core'; +import { execCommand, execSfdxJson, isCI, uxLog } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { parsePackageXmlFile, parseXmlFile, writePackageXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { getChromeExecutablePath } from '../../../../common/utils/orgConfigUtils.js'; +import { + deleteConnectedApps, + retrieveConnectedApps, + validateConnectedApps, + findConnectedAppFile, + selectConnectedAppsForProcessing, + createConnectedAppSuccessResponse, + handleConnectedAppError +} from '../../../../common/utils/refresh/connectedAppUtils.js'; +import { CONSTANTS, getConfig, setConfig } from '../../../../config/index.js'; +import { soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; +import { PACKAGE_ROOT_DIR } from '../../../../settings.js'; +import { exportData, hasDataWorkspaces, selectDataWorkspace } from '../../../../common/utils/dataUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +// Define interface for Connected App metadata +interface ConnectedApp { + fullName: string; + fileName: string; + type: string; + consumerKey?: string; + consumerSecret?: string; +} + +// Interface for browser-related operations +interface BrowserContext { + browser: Browser; + instanceUrl: string; + accessToken: string; +} + +export default class OrgRefreshBeforeRefresh extends SfCommand { + public static description = ` +## Command Behavior + +**Backs up all Connected Apps (including Consumer Secrets), certificates, custom settings, records and other metadata from a Salesforce org before a sandbox refresh, enabling full restoration after the refresh.** + +This command prepares a complete backup prior to a sandbox refresh. It creates a dedicated project under \`scripts/sandbox-refresh/\`, retrieves metadata and data, attempts to capture Connected App consumer secrets, and can optionally delete the apps so they can be reuploaded after the refresh. + +Key functionalities: + +- **Create a save project:** Generates a dedicated project folder to store all artifacts for the sandbox backup. +- **Find and select Connected Apps:** Lists Connected Apps in the org and lets you pick specific apps, use a name filter, or process all apps. +- **Save metadata for restore:** Builds a manifest and retrieves the metadata types you choose so they can be restored after the refresh. +- **Capture Consumer Secrets:** Attempts to capture Connected App consumer secrets automatically (opens a browser session when possible) and falls back to a short manual prompt when needed. +- **Collect certificates:** Saves certificate files and their definitions so they can be redeployed later. +- **Export custom settings & records:** Lets you pick custom settings to export as JSON and optionally export records using configured data workspaces. +- **Persist choices & report:** Stores your backup choices in project config and sends report files for traceability. +- **Optional cleanup:** Can delete backed-up Connected Apps from the org so they can be re-uploaded cleanly after the refresh. +- **Interactive safety checks:** Prompts you to confirm package contents and other potentially destructive actions; sensible defaults are chosen where appropriate. + +This command is part of [sfdx-hardis Sandbox Refresh](https://sfdx-hardis.cloudity.com/salesforce-sandbox-refresh/) and is intended to be run before a sandbox refresh so that all credentials, certificates, metadata and data can be restored afterwards. + +
+Technical explanations + +- **Salesforce CLI Integration:** Uses \`sf org list metadata\`, \`sf project retrieve start\`, \`sf project generate\`, \`sf project deploy start\`, and \`sf data tree export\`/\`import\` where applicable. +- **Metadata Handling:** Writes and reads package XML files under the generated project (\`manifest/\`), copies MDAPI certificate artifacts into \`force-app/main/default/certs\`, and produces \`package-metadata-to-restore.xml\` for post-refresh deployment. +- **Consumer Secret Handling:** Uses \`puppeteer-core\` with an executable path from \`getChromeExecutablePath()\` (env var \`PUPPETEER_EXECUTABLE_PATH\` may be required). Falls back to manual prompt when browser automation cannot be used. +- **Data & Records:** Exports custom settings to JSON and supports exporting records through SFDMU workspaces chosen interactively. +- **Config & Reporting:** Updates project/user config under \`config/.sfdx-hardis.yml#refreshSandboxConfig\` and reports artifacts to the WebSocket client. +- **Error Handling:** Provides clear error messages and a summary response object indicating success/failure and which secrets were captured. + +
+`; + + + public static examples: string[] = [ + "$ sf hardis:org:refresh:before-refresh", + "$ sf hardis:org:refresh:before-refresh --name \"MyConnectedApp\"", + "$ sf hardis:org:refresh:before-refresh --name \"App1,App2,App3\"", + "$ sf hardis:org:refresh:before-refresh --all", + "$ sf hardis:org:refresh:before-refresh --delete", + ]; + + public static flags = { + "target-org": Flags.requiredOrg(), + delete: Flags.boolean({ + char: 'd', + summary: 'Delete Connected Apps from org after saving', + description: 'By default, Connected Apps are not deleted from the org after saving. Set this flag to force their deletion so they will be able to be reuploaded again after refreshing the org.', + default: false + }), + name: Flags.string({ + char: 'n', + summary: messages.getMessage('nameFilter'), + description: 'Connected App name(s) to process. For multiple apps, separate with commas (e.g., "App1,App2")' + }), + all: Flags.boolean({ + char: 'a', + summary: 'Process all Connected Apps without selection prompt', + description: 'If set, all Connected Apps from the org will be processed. Takes precedence over --name if both are specified.' + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }) + }; + + public static requiresProject = true; + + protected conn: Connection; + protected saveProjectPath: string = ''; + protected orgUsername: string = ''; + protected instanceUrl: string = ''; + protected refreshSandboxConfig: any = {}; + protected result: any; + protected processAll: boolean; + protected nameFilter: string | undefined; + protected deleteApps: boolean; + + + public async run(): Promise { + const { flags } = await this.parse(OrgRefreshBeforeRefresh); + this.conn = flags["target-org"].getConnection(); + this.orgUsername = flags["target-org"].getUsername() as string; // Cast to string to avoid TypeScript error + this.instanceUrl = this.conn.instanceUrl; + this.deleteApps = flags.delete || false; + const accessToken = this.conn.accessToken; // Ensure accessToken is a string + this.processAll = flags.all || false; + this.nameFilter = this.processAll ? undefined : flags.name; // If --all is set, ignore --name + const config = await getConfig("user"); + this.refreshSandboxConfig = config?.refreshSandboxConfig || {}; + this.result = { success: true, message: 'before-refresh command performed successfully' }; + + uxLog("action", this, c.cyan(`This command will save information that must be restored after org refresh, in the following order: +- Certificates +- Other Metadatas +- Custom Settings +- Records (using SFDMU projects) +- Connected Apps + `)); + + // Check org is connected + if (!accessToken) { + throw new SfError(c.red('Access token is required to retrieve Connected Apps from the org. Please authenticate to a default org.')); + } + + this.saveProjectPath = await this.createSaveProject(); + + await this.retrieveCertificates(); + + await this.saveMetadatas(); + + await this.saveCustomSettings(); + + await this.saveRecords(); + + await this.retrieveDeleteConnectedApps(accessToken); + + return this.result; + } + + private async createSaveProject(): Promise { + const folderName = this.conn.instanceUrl.replace(/https?:\/\//, '').replace("my.salesforce.com", "").replace(/\//g, '-').replace(/[^a-zA-Z0-9-]/g, ''); + const sandboxRefreshRootFolder = path.join(process.cwd(), 'scripts', 'sandbox-refresh'); + const projectPath = path.join(sandboxRefreshRootFolder, folderName); + if (fs.existsSync(projectPath)) { + uxLog("log", this, c.cyan(`Project folder ${projectPath} already exists. Reusing it.\n(Delete it and run again this command if you want to start fresh)`)); + return projectPath; + } + await fs.ensureDir(projectPath); + uxLog("action", this, c.cyan(`Creating sfdx-project for sandbox info storage`)); + const createCommand = `sf project generate --name "${folderName}"`; + await execCommand(createCommand, this, { + output: true, + fail: true, + }); + uxLog("log", this, c.grey('Moving sfdx-project to root...')); + await fs.copy(folderName, projectPath, { overwrite: true }); + await fs.remove(folderName); + uxLog("log", this, c.grey(`Save Project created in folder ${projectPath}`)); + return projectPath; + } + + private async retrieveDeleteConnectedApps(accessToken: string): Promise { + // If metadatas folder is not empty, ask if we want to retrieve them again + let retrieveConnectedApps = true; + const connectedAppsFolder = path.join(this.saveProjectPath, 'force-app', 'main', 'default', 'connectedApps'); + if (fs.existsSync(connectedAppsFolder) && fs.readdirSync(connectedAppsFolder).length > 0) { + const confirmRetrieval = await prompts({ + type: 'confirm', + name: 'retrieveAgain', + message: `Connected Apps folder is not empty. Do you want to retrieve Connected Apps again?`, + description: `If you do not retrieve them again, the Connected Apps will not be updated with the latest changes from the org.`, + initial: false + }); + + if (!confirmRetrieval.retrieveAgain) { + retrieveConnectedApps = false; + } + } + + if (retrieveConnectedApps) { + try { + // Step 1: Get Connected Apps from org or based on provided name filter + const connectedApps = await this.getConnectedApps(this.orgUsername, this.nameFilter, this.processAll); + + if (connectedApps.length === 0) { + uxLog("warning", this, c.yellow('No Connected Apps found')); + this.result = Object.assign(this.result, { success: false, message: 'No Connected Apps found' }) + return; + } + + // Step 2: Determine which apps to process (all, filtered, or user-selected) + const selectedApps = await this.selectConnectedApps(connectedApps, this.processAll, this.nameFilter); + + if (selectedApps.length === 0) { + uxLog("warning", this, c.yellow('No Connected Apps selected')); + this.result = Object.assign(this.result, { success: false, message: 'No Connected Apps selected' }); + return; + } + this.refreshSandboxConfig.connectedApps = selectedApps.map(app => app.fullName).sort(); + await this.saveConfig(); + + // Step 3: Process the selected Connected Apps + const updatedApps = await this.processConnectedApps(this.orgUsername, selectedApps, this.instanceUrl, accessToken); + + // Step 4: Delete Connected Apps from org if required (default behavior) + + if (!isCI && !this.deleteApps) { + const connectedAppNames = updatedApps.map(app => app.fullName).join(', '); + const deletePrompt = await prompts({ + type: 'confirm', + name: 'delete', + message: `Do you want to delete the Connected Apps from the org after saving? ${connectedAppNames}`, + description: 'If you do not delete them, they will remain in the org and can be re-uploaded after refreshing the org.', + initial: false + }); + this.deleteApps = deletePrompt.delete; + } + + if (this.deleteApps) { + uxLog("action", this, c.cyan(`Deleting ${updatedApps.length} Connected Apps from ${this.conn.instanceUrl} ...`)); + await deleteConnectedApps(this.orgUsername, updatedApps, this, this.saveProjectPath); + uxLog("success", this, c.green('Connected Apps were successfully deleted from the org.')); + } + + const summaryMessage = this.deleteApps + ? `You are now ready to refresh your sandbox org, as you will be able to re-upload the Connected Apps after the refresh.` + : `Dry-run successful, run again the command with Connected Apps deletion to be able to refresh your org and re-upload the Connected Apps after the refresh.`; + uxLog("action", this, c.cyan(summaryMessage)); + // Add a summary message at the end + if (updatedApps.length > 0) { + uxLog("success", this, c.green(`Successfully saved locally ${updatedApps.length} Connected App(s) with their Consumer Secrets`)); + } + + uxLog("success", this, c.cyan('Saved refresh sandbox configuration in config/.sfdx-hardis.yml')); + WebSocketClient.sendReportFileMessage(path.join(process.cwd(), 'config', '.sfdx-hardis.yml#refreshSandboxConfig'), "Sandbox refresh configuration", 'report'); + + const connectedAppRes = createConnectedAppSuccessResponse( + `Successfully processed ${updatedApps.length} Connected App(s)`, + updatedApps.map(app => app.fullName), + { + consumerSecretsAdded: updatedApps.map(app => app.consumerSecret ? app.fullName : null).filter(Boolean) + } + ); + this.result = Object.assign(this.result || {}, connectedAppRes); + + } catch (error: any) { + this.result = Object.assign(this.result || {}, handleConnectedAppError(error, this)); + } + } + } + + private async getConnectedApps( + orgUsername: string, + nameFilter: string | undefined, + processAll: boolean + ): Promise { + // Set appropriate log message based on flags + if (processAll) { + uxLog("action", this, c.cyan('Processing all Connected Apps from org (selection prompt bypassed)')); + } else if (nameFilter) { + uxLog("action", this, c.cyan(`Processing specified Connected App(s): ${nameFilter} (selection prompt bypassed)`)); + } else { + uxLog("action", this, c.cyan(`Listing Connected Apps in org ${this.conn.instanceUrl} ...`)); + } + + const command = `sf org list metadata --metadata-type ConnectedApp --target-org ${orgUsername}`; + const result = await execSfdxJson(command, this, { output: true }); + + const availableApps: ConnectedApp[] = result?.result && Array.isArray(result.result) ? result.result : []; + + if (availableApps.length === 0) { + uxLog("warning", this, c.yellow('No Connected Apps were found in the org.')); + return []; + } + availableApps.sort((a, b) => a.fullName.localeCompare(b.fullName)); + + const availableAppNames = availableApps.map(app => app.fullName); + uxLog("log", this, c.grey(`Found ${availableApps.length} Connected App(s) in the org`)); + + // If name filter is provided, validate and filter the requested apps + if (nameFilter) { + const appNames = nameFilter.split(',').map(name => name.trim()); + uxLog("action", this, c.cyan(`Validating specified Connected App(s): ${appNames.join(', ')}`)); + + validateConnectedApps(appNames, availableAppNames, this, 'org'); + + // Filter available apps to only include the ones specified in the name filter (case-insensitive) + const connectedApps = availableApps.filter(app => + appNames.some(name => name.toLowerCase() === app.fullName.toLowerCase()) + ); + + uxLog("success", this, c.green(`Successfully validated ${connectedApps.length} Connected App(s) in the org`)); + return connectedApps; + } + + // If no name filter, return all available apps + return availableApps; + } + + private async selectConnectedApps( + connectedApps: ConnectedApp[], + processAll: boolean, + nameFilter: string | undefined + ): Promise { + const initialSelection: string[] = []; + if (this.refreshSandboxConfig.connectedApps && this.refreshSandboxConfig.connectedApps.length > 0) { + initialSelection.push(...this.refreshSandboxConfig.connectedApps); + } + return selectConnectedAppsForProcessing( + connectedApps, + initialSelection, + processAll, + nameFilter, + 'Select Connected Apps that you will want to restore after org refresh', + this + ); + } + + private async processConnectedApps( + orgUsername: string | undefined, + connectedApps: ConnectedApp[], + instanceUrl: string, + accessToken: string = '' + ): Promise { + if (!orgUsername) { + throw new Error('Organization username is required'); + } + + const updatedApps: ConnectedApp[] = []; + let browserContext: BrowserContext | null = null; + + try { + // Step 1: Retrieve the Connected Apps from org + await this.retrieveConnectedAppsFromOrg(orgUsername, connectedApps, this.saveProjectPath); + + // Step 2: Query for applicationIds for all Connected Apps + const connectedAppIdMap = await this.queryConnectedAppIds(orgUsername, connectedApps); + + // Step 3: Initialize browser for automation if access token is available + uxLog("action", this, c.cyan('Initializing browser for automated Connected App Secrets extraction...')); + try { + browserContext = await this.initializeBrowser(instanceUrl, accessToken); + } catch (e: any) { + uxLog("error", this, c.red(`Error initializing browser for automated Consumer Secret extraction: ${e.message}. +You might need to set variable PUPPETEER_EXECUTABLE_PATH with the target of a Chrome/Chromium path. example: /usr/bin/chromium-browser`)); + // Continue without browser automation - will fall back to manual entry + } + + // Step 4: Process each Connected App + for (const app of connectedApps) { + try { + const updatedApp = await this.processIndividualApp( + app, + connectedAppIdMap, + browserContext, + instanceUrl, + this.saveProjectPath + ); + + if (updatedApp) { + updatedApps.push(updatedApp); + } + } catch (error: any) { + uxLog("warning", this, c.yellow(`Error processing ${app.fullName}: ${error.message || error}`)); + } + } + + return updatedApps; + } finally { + // Close browser if it was opened + if (browserContext?.browser) { + uxLog("log", this, c.cyan('Closing browser...')); + await browserContext.browser.close(); + } + } + } + + private async retrieveConnectedAppsFromOrg( + orgUsername: string, + connectedApps: ConnectedApp[], + saveProjectPath: string + ): Promise { + uxLog("action", this, c.cyan(`Retrieving ${connectedApps.length} Connected App(s) from ${orgUsername}`)); + await retrieveConnectedApps(orgUsername, connectedApps, this, saveProjectPath); + this.verifyConnectedAppsRetrieval(connectedApps); + } + + private verifyConnectedAppsRetrieval(connectedApps: ConnectedApp[]): void { + if (connectedApps.length === 0) return; + + // Check if the Connected App files exist in the project + const missingApps: string[] = []; + + for (const app of connectedApps) { + // Try to find the app in the standard location + const appPath = path.join(this.saveProjectPath, `force-app/main/default/connectedApps/${app.fullName}.connectedApp-meta.xml`); + + if (!fs.existsSync(appPath)) { + // Also check in alternative locations where it might have been retrieved + const altPaths = [ + path.join(this.saveProjectPath, `force-app/main/default/connectedApps/${app.fileName}.connectedApp-meta.xml`), + path.join(this.saveProjectPath, `force-app/main/default/connectedApps/${app.fullName.replace(/\s/g, '_')}.connectedApp-meta.xml`) + ]; + + const found = altPaths.some(path => fs.existsSync(path)); + if (!found) { + missingApps.push(app.fullName); + } + } + } + + // If any apps are missing, throw an error + if (missingApps.length > 0) { + const errorMsg = `Failed to retrieve the following Connected App(s): ${missingApps.join(', ')}`; + uxLog("error", this, c.red(errorMsg)); + const dtlErrorMsg = "This could be due to:\n" + + " - Temporary Salesforce API issues\n" + + " - Permissions or profile issues in the org\n" + + " - Connected Apps that exist but are not accessible\n" + + "Please exclude the app or check your permissions in the org then try again."; + uxLog("warning", this, c.yellow(dtlErrorMsg)); + throw new Error(errorMsg); + } + } + + private async queryConnectedAppIds( + orgUsername: string, + connectedApps: ConnectedApp[] + ): Promise> { + const connectedAppIdMap: Record = {}; + const appNamesForQuery = connectedApps.map(app => `'${app.fullName}'`).join(','); + + if (appNamesForQuery.length === 0) { + return connectedAppIdMap; + } + + uxLog("action", this, c.cyan('Retrieving applicationIds for all Connected Apps...')); + const queryCommand = `SELECT Id, Name FROM ConnectedApplication WHERE Name IN (${appNamesForQuery})`; + + try { + const appQueryRes = await soqlQuery(queryCommand, this.conn); + + if (appQueryRes?.records?.length > 0) { + // Populate the map with applicationIds + let logMsg = `Found ${appQueryRes.records.length} applicationId(s) for Connected Apps:`; + for (const record of appQueryRes.records) { + connectedAppIdMap[record.Name] = record.Id; + logMsg += `\n - ${record.Name}: ${record.Id}`; + } + uxLog("log", this, c.grey(logMsg)); + } else { + uxLog("warning", this, c.yellow('No applicationIds found in the org. Will use the fallback URL.')); + } + } catch (queryError) { + uxLog("error", this, c.yellow(`Error retrieving applicationIds: ${queryError}`)); + } + + return connectedAppIdMap; + } + + private async initializeBrowser( + instanceUrl: string, + accessToken: string + ): Promise { + // Get chrome/chromium executable path using shared utility + const chromeExecutablePath = getChromeExecutablePath(); + uxLog("log", this, c.cyan(`chromeExecutablePath: ${chromeExecutablePath}`)); + + const browser = await puppeteer.launch({ + args: ['--no-sandbox', '--disable-setuid-sandbox'], + headless: false, // Always show the browser window + executablePath: chromeExecutablePath, + timeout: 60000 // Increase timeout for browser launch + }); + + // Log in once for the session + const loginUrl = `${instanceUrl}/secur/frontdoor.jsp?sid=${accessToken}`; + uxLog("log", this, c.cyan(`Log in via browser using frontdoor.jsp...`)); + const page = await browser.newPage(); + await page.goto(loginUrl, { waitUntil: ['domcontentloaded', 'networkidle0'] }); + await page.close(); + + return { browser, instanceUrl, accessToken }; + } + + private async processIndividualApp( + app: ConnectedApp, + connectedAppIdMap: Record, + browserContext: BrowserContext | null, + instanceUrl: string, + saveProjectPath: string + ): Promise { + const connectedAppFile = await findConnectedAppFile(app.fullName, this, saveProjectPath); + + if (!connectedAppFile) { + uxLog("warning", this, c.yellow(`Connected App file not found for ${app.fullName}`)); + return undefined; + } + + const connectedAppId = connectedAppIdMap[app.fullName]; + let consumerSecretValue: string | null = null; + let viewLink: string; + + // Try to extract application ID and view link + if (connectedAppId) { + try { + uxLog("action", this, c.cyan(`Extracting info for Connected App ${app.fullName}...`)); + const applicationId = await this.extractApplicationId(instanceUrl, connectedAppId, app.fullName, browserContext?.accessToken ?? ''); + viewLink = `${instanceUrl}/app/mgmt/forceconnectedapps/forceAppDetail.apexp?applicationId=${applicationId}`; + uxLog("success", this, c.green(`Successfully extracted application ID: ${applicationId} (viewLink: ${viewLink})`)); + + // Try automated extraction if browser is available + if (browserContext?.browser) { + uxLog("log", this, c.cyan(`Attempting to automatically extract Consumer Secret for ${app.fullName}...`)); + try { + consumerSecretValue = await this.extractConsumerSecret( + browserContext.browser, + viewLink + ); + } catch (puppeteerError) { + uxLog("warning", this, c.yellow(`Error extracting Consumer Secret with Puppeteer: ${puppeteerError}`)); + consumerSecretValue = null; + } + } + } catch (error) { + uxLog("error", this, c.red(`Could not extract application ID for : ${app.fullName}. Error message : ${error}`)); + viewLink = `${instanceUrl}/lightning/setup/NavigationMenus/home`; + uxLog("action", this, c.cyan(`Opening application list page. Please manually find ${app.fullName}.`)); + } + } else { + // Fallback to the connected apps list page if applicationId can't be found + uxLog("warning", this, c.yellow(`No applicationId found for ${app.fullName}, opening application list page instead`)); + viewLink = `${instanceUrl}/lightning/setup/NavigationMenus/home`; + } + + try { + // If consumer secret was automatically extracted + if (consumerSecretValue) { + const xmlData = await parseXmlFile(connectedAppFile); + if (xmlData && xmlData.ConnectedApp) { + const consumerKey = xmlData.ConnectedApp.consumerKey ? xmlData.ConnectedApp.consumerKey[0] : 'unknown'; + return await this.updateConnectedAppWithSecret( + connectedAppFile, + xmlData, + consumerSecretValue, + app, + consumerKey + ); + } + } else { + // Manual entry flow - open browser and prompt for secret + const msg = [ + `Unable to automatically extract Consumer Secret for Connected App ${app.fullName}.`, + `- Open Connected App detail page of ${app.fullName} (Contextual menu -> View)`, + '- Click "Manage Consumer Details" button', + `- Copy the ${c.green('Consumer Secret')} value` + ].join('\n'); + uxLog("action", this, c.cyan(msg)); + await open(viewLink); + + // Prompt for the Consumer Secret (manual entry) + const secretPromptResponse = await prompts({ + type: 'text', + name: 'consumerSecret', + message: `Enter the Consumer Secret for ${app.fullName}:`, + description: 'You can find this in the browser after clicking "Manage Consumer Details"', + validate: (value) => value && value.trim() !== '' ? true : 'Consumer Secret is required' + }); + + if (!secretPromptResponse.consumerSecret) { + uxLog("warning", this, c.yellow(`Skipping ${app.fullName} due to missing Consumer Secret`)); + return undefined; + } + + // Parse the Connected App XML file + const xmlData = await parseXmlFile(connectedAppFile); + if (xmlData && xmlData.ConnectedApp) { + // Store the consumer secret + const consumerSecret = secretPromptResponse.consumerSecret; + const consumerKey = xmlData.ConnectedApp.consumerKey ? xmlData.ConnectedApp.consumerKey[0] : 'unknown'; + return await this.updateConnectedAppWithSecret( + connectedAppFile, + xmlData, + consumerSecret, + app, + consumerKey + ); + } else { + uxLog("warning", this, c.yellow(`Could not parse XML for ${app.fullName}`)); + } + } + } catch (error: any) { + uxLog("warning", this, c.yellow(`Error processing ${app.fullName}: ${error.message}`)); + } + + return undefined; + } + + private async extractApplicationId( + instanceUrl: string, + connectedAppId: string, + connectedAppName: string, + accessToken: string + ): Promise { + uxLog("log", this, c.cyan(`Extracting application ID for Connected App with ID: ${connectedAppName}`)); + + const url = `${instanceUrl}/${connectedAppId}`; + const response = await axios.get(url, { + headers: { + Cookie: `sid=${accessToken}` + } + }); + const html = response.data; + const appIdMatch = html.match(/applicationId=([a-zA-Z0-9]+)/i); + + if (!appIdMatch || !appIdMatch[1]) { + throw new Error('Could not extract application ID from HTML'); + } + + return appIdMatch[1]; + } + + private async extractConsumerSecret( + browser: Browser, + appUrl: string + ): Promise { + let page: Page | undefined; + try { + page = await browser.newPage(); + + uxLog("log", this, c.grey(`Navigating to Connected App detail page...`)); + await page.goto(appUrl, { waitUntil: ['domcontentloaded', 'networkidle0'] }); + uxLog("log", this, c.grey(`Attempting to extract Consumer Secret...`)); + + // Click Manage Consumer Details button + const manageBtnId = 'input[id="appsetup:setupForm:details:oauthSettingsSection:manageConsumerKeySecretSection:manageConsumer"]'; + await page.waitForSelector(manageBtnId, { timeout: 60000 }); + await page.click(manageBtnId); + await page.waitForNavigation(); + + // Extract Consumer Secret value + const consumerSecretSpanId = '#appsetup\\:setupForm\\:consumerDetails\\:oauthConsumerSection\\:consumerSecretSection\\:consumerSecret'; + await page.waitForSelector(consumerSecretSpanId, { timeout: 60000 }); + const consumerSecretValue = await page.$eval(consumerSecretSpanId, element => element.textContent); + uxLog("success", this, c.green(`Successfully extracted Consumer Secret`)); + + return consumerSecretValue || null; + } catch (error) { + uxLog("error", this, c.red(`Error extracting Consumer Secret: ${error}`)); + return null; + } finally { + if (page) await page.close(); + } + } + + private async updateConnectedAppWithSecret( + connectedAppFile: string, + xmlData: any, + consumerSecret: string, + app: ConnectedApp, + consumerKey: string + ): Promise { + const xmlString = await fs.readFile(connectedAppFile, 'utf8'); + + if (xmlString.includes('')) { + const updatedXmlString = xmlString.replace( + /.*?<\/consumerSecret>/, + `${consumerSecret}` + ); + await fs.writeFile(connectedAppFile, updatedXmlString); + } else { + // Insert consumerSecret right after consumerKey + const updatedXmlString = xmlString.replace( + /.*?<\/consumerKey>/, + `$&\n ${consumerSecret}` + ); + await fs.writeFile(connectedAppFile, updatedXmlString); + } + + xmlData.ConnectedApp.consumerSecret = [consumerSecret]; + + uxLog("success", this, c.green(`Successfully added Consumer Secret to ${app.fullName} in ${connectedAppFile}`)); + + return { + ...app, + consumerKey: consumerKey, + consumerSecret: consumerSecret + }; + } + + private async saveConfig(): Promise { + const config = await getConfig("project"); + if (!config.refreshSandboxConfig) { + config.refreshSandboxConfig = {}; + } + if (JSON.stringify(this.refreshSandboxConfig) !== JSON.stringify(config.refreshSandboxConfig)) { + await setConfig("project", { refreshSandboxConfig: this.refreshSandboxConfig }); + uxLog("log", this, c.cyan('Refresh sandbox configuration has been saved successfully.')); + } + } + + private async saveMetadatas(): Promise { + const metadataToSave = path.join(this.saveProjectPath, "manifest", 'package-metadatas-to-save.xml'); + if (fs.existsSync(metadataToSave)) { + const promptResponse = await prompts({ + type: 'confirm', + name: 'retrieveAgain', + message: `It seems you already have metadatas saved from a previous run.\nDo you want to retrieve certificates and metadata again ?`, + description: 'This will overwrite the existing package-metadatas-to-save.xml file and related certificates and metadatas.', + initial: false + }); + if (!promptResponse.retrieveAgain) { + uxLog("log", this, c.grey(`Skipping metadata retrieval as it already exists at ${this.saveProjectPath}`)); + return; + } + } + + // Metadata package.Xml for backup + uxLog("action", this, c.cyan('Saving metadata files before sandbox refresh...')); + const savePackageXml = await this.createSavePackageXml(); + + // Retrieve metadata from org using the package XML + if (!savePackageXml) { + uxLog("log", this, c.grey(`Skipping metadata retrieval as per user choice`)); + return; + } + + // Retrieve metadatas to save + await this.retrieveMetadatasToSave(savePackageXml); + + // Generate new package.xml from saveProjectPath, and remove ConnectedApps from it + await this.generatePackageXmlToRestore(); + } + + private async createSavePackageXml(): Promise { + uxLog("log", this, c.cyan(`Managing "package-metadatas-to-save.xml" file, that will be used to retrieve the metadatas before refreshing the org.`)); + // Copy default package xml to the save project path + const sourceFile = path.join(PACKAGE_ROOT_DIR, 'defaults/refresh-sandbox', 'package-metadatas-to-save.xml'); + const targetFile = path.join(this.saveProjectPath, "manifest", 'package-metadatas-to-save.xml'); + await fs.ensureDir(path.dirname(targetFile)); + if (fs.existsSync(targetFile)) { + const promptResponse = await prompts({ + type: 'confirm', + name: 'overwrite', + message: `The file ${targetFile} already exists. Do you want to overwrite it?`, + description: 'This file is used to save the metadata that will be restored after org refresh.', + initial: false + }); + if (promptResponse.overwrite) { + uxLog("log", this, c.grey(`Overwriting default save package xml to ${targetFile}`)); + await fs.copy(sourceFile, targetFile, { overwrite: true }); + } + } + else { + uxLog("log", this, c.grey(`Copying default package xml to ${targetFile}`)); + await fs.copy(sourceFile, targetFile, { overwrite: true }); + } + uxLog("log", this, c.grey(`Save package XML is located at ${targetFile}`)); + WebSocketClient.sendReportFileMessage(targetFile, "Save package XML", 'report'); + // Prompt user to check packageXml content and update it if necessary + const promptRes = await prompts({ + type: 'confirm', + name: 'checkPackageXml', + message: `Please check package XML file ${targetFile} before retrieving, update it to add metadata if necessary then continue`, + description: 'You can add or remove metadata types to save before proceeding.', + initial: true + }); + if (!promptRes.checkPackageXml) { + uxLog("log", this, c.grey(`Skipping package XML retrieve`)); + return null; + } + return targetFile; + } + + private async retrieveMetadatasToSave(savePackageXml: string) { + uxLog("action", this, c.cyan(`Retrieving metadatas to save...`)); + await execCommand( + `sf project retrieve start --manifest ${savePackageXml} --target-org ${this.orgUsername} --ignore-conflicts --json`, + this, + { output: true, fail: true, cwd: this.saveProjectPath } + ); + } + + private async generatePackageXmlToRestore() { + uxLog("action", this, c.cyan(`Generating new package.xml from saved project path ${this.saveProjectPath}...`)); + const restorePackageXmlFileName = 'package-metadata-to-restore.xml'; + const restorePackageXmlFile = path.join(this.saveProjectPath, 'manifest', restorePackageXmlFileName); + await execCommand( + `sf project generate manifest --source-dir force-app --output-dir manifest --name ${restorePackageXmlFileName} --json`, + this, + { output: true, fail: true, cwd: this.saveProjectPath } + ); + uxLog("success", this, c.grey(`Generated package.xml for restore at ${restorePackageXmlFile}`)); + const restorePackage = await parsePackageXmlFile(restorePackageXmlFile); + if (restorePackage?.["ConnectedApp"]) { + delete restorePackage["ConnectedApp"]; + await writePackageXmlFile(restorePackageXmlFile, restorePackage); + uxLog("log", this, c.grey(`Removed ConnectedApps from ${restorePackageXmlFileName} as they will be handled separately`)); + } + if (restorePackage?.["Certificate"]) { + delete restorePackage["Certificate"]; + await writePackageXmlFile(restorePackageXmlFile, restorePackage); + uxLog("log", this, c.grey(`Removed Certificates from ${restorePackageXmlFileName} as they will be handled separately`)); + } + if (restorePackage?.["SamlSsoConfig"]) { + delete restorePackage["SamlSsoConfig"]; + await writePackageXmlFile(restorePackageXmlFile, restorePackage); + uxLog("log", this, c.grey(`Removed SamlSsoConfig from ${restorePackageXmlFileName} as they will be handled separately`)); + } + } + + private async retrieveCertificates() { + const promptCerts = await prompts({ + type: 'confirm', + name: 'retrieveCerts', + message: `Do you want to retrieve Certificates from ${this.instanceUrl} before refreshing it ?`, + description: 'Certificates cannot be retrieved using Source API, so we will use Metadata API for that.', + initial: true + }); + if (!promptCerts.retrieveCerts) { + uxLog("log", this, c.grey(`Skipping Certificates retrieval as per user choice`)); + return; + } + + uxLog("action", this, c.cyan('Retrieving certificates (.crt) from org...')); + // Retrieve certificates using metadata api coz with source api it does not work + const certificatesPackageXml = path.join(PACKAGE_ROOT_DIR, 'defaults/refresh-sandbox', 'package-certificates-to-save.xml'); + const packageCertsXml = path.join(this.saveProjectPath, 'manifest', 'package-certificates-to-save.xml'); + uxLog("log", this, c.grey(`Copying default package XML for certificates to ${packageCertsXml}`)); + await fs.copy(certificatesPackageXml, packageCertsXml, { overwrite: true }); + uxLog("log", this, c.grey(`Retrieving certificates from org ${this.instanceUrl} using Metadata API (Source APi does not support it)...`)); + await execSfdxJson( + `sf project retrieve start --manifest ${packageCertsXml} --target-org ${this.orgUsername} --target-metadata-dir ./mdapi_certs --unzip`, + this, + { output: true, fail: true, cwd: this.saveProjectPath } + ); + // Copy the extracted certificates to the main directory + const mdapiCertsDir = path.join(this.saveProjectPath, 'mdapi_certs', 'unpackaged', 'unpackaged', 'certs'); + const certsDir = path.join(this.saveProjectPath, 'force-app', 'main', 'default', 'certs'); + uxLog("log", this, c.grey(`Copying certificates from ${mdapiCertsDir} to ${certsDir}`)); + await fs.ensureDir(certsDir); + await fs.copy(mdapiCertsDir, certsDir, { overwrite: true }); + await fs.remove(path.join(this.saveProjectPath, 'mdapi_certs')); + uxLog("success", this, c.green(`Successfully retrieved certificates from org and saved them to ${certsDir}`)); + uxLog("action", this, c.cyan('Retrieving certificates definitions (.crt-meta.xml) from org...')); + // Retrieve certificates definitions using source api + await execCommand( + `sf project retrieve start -m Certificate --target-org ${this.orgUsername} --ignore-conflicts --json`, + this, + { output: true, fail: true, cwd: this.saveProjectPath } + ); + } + + private async saveCustomSettings(): Promise { + const customSettingsFolder = path.join(this.saveProjectPath, 'savedCustomSettings'); + // If savedCustomSettings is not empty, ask if we want to retrieve them again + if (fs.existsSync(customSettingsFolder) && fs.readdirSync(customSettingsFolder).length > 0) { + const confirmRetrieval = await prompts({ + type: 'confirm', + name: 'retrieveAgain', + message: `Custom Settings folder is not empty. Do you want to retrieve Custom Settings again?`, + description: `If you do not retrieve them again, the Custom Settings will not be updated with the latest changes from the org.`, + initial: false + }); + + if (!confirmRetrieval.retrieveAgain) { + uxLog("log", this, c.grey(`Skipping Custom Settings retrieval as it already exists at ${customSettingsFolder}`)); + return; + } + } + // List custom settings in the org + uxLog("action", this, c.cyan(`Listing Custom Settings in the org...`)); + const globalDesc = await this.conn.describeGlobal(); + const customSettings = globalDesc.sobjects.filter(sobject => sobject.customSetting); + if (customSettings.length === 0) { + uxLog("warning", this, c.yellow('No Custom Settings found in the org.')); + return; + } + const customSettingsNames = customSettings.map(cs => `- ${cs.name}`).sort().join('\n'); + uxLog("log", this, c.grey(`Found ${customSettings.length} Custom Setting(s) in the org:\n${customSettingsNames}`)); + // Ask user to select which Custom Settings to retrieve + const initialCs = this.refreshSandboxConfig.customSettings || customSettings.map(cs => cs.name); + const selectedSettings = await prompts({ + type: 'multiselect', + name: 'settings', + message: 'Select Custom Settings to retrieve', + description: 'You can select multiple Custom Settings to retrieve.', + choices: customSettings.map(cs => ({ title: cs.name, value: cs.name })), + initial: initialCs, + }); + if (selectedSettings.settings.length === 0) { + uxLog("warning", this, c.yellow('No Custom Settings selected for retrieval')); + return; + } + this.refreshSandboxConfig.customSettings = selectedSettings.settings.sort(); + await this.saveConfig(); + uxLog("log", this, c.cyan(`Retrieving ${selectedSettings.settings.length} selected Custom Settings`)); + const successCs: any = []; + const errorCs: any = []; + // Retrieve each selected Custom Setting + for (const settingName of selectedSettings.settings) { + try { + uxLog("action", this, c.cyan(`Retrieving values of Custom Setting: ${settingName}`)); + + // List all fields of the Custom Setting using globalDesc + const customSettingDesc = globalDesc.sobjects.find(sobject => sobject.name === settingName); + if (!customSettingDesc) { + uxLog("error", this, c.red(`Custom Setting ${settingName} not found in the org.`)); + errorCs.push(settingName); + continue; + } + const csDescribe = await this.conn.sobject(settingName).describe(); + const fieldList = csDescribe.fields.map(field => field.name).join(', '); + uxLog("log", this, c.grey(`Fields in Custom Setting ${settingName}: ${fieldList}`)); + + // Use data tree export to retrieve the Custom Setting + uxLog("log", this, c.cyan(`Running tree export for Custom Setting ${settingName}...`)); + const retrieveCommand = `sf data tree export --query "SELECT ${fieldList} FROM ${settingName}" --target-org ${this.orgUsername} --json`; + const csFolder = path.join(customSettingsFolder, settingName); + await fs.ensureDir(csFolder); + const result = await execSfdxJson(retrieveCommand, this, { + output: true, + fail: true, + cwd: csFolder + }); + if (!(result?.status === 0)) { + uxLog("error", this, c.red(`Failed to retrieve Custom Setting ${settingName}: ${JSON.stringify(result)}`)); + continue; + } + const resultFile = path.join(csFolder, `${settingName}.json`); + if (fs.existsSync(resultFile)) { + uxLog("log", this, c.grey(`Custom Setting ${settingName} has been downloaded to ${resultFile}`)); + successCs.push(settingName); + } + else { + uxLog("warning", this, c.red(`Custom Setting ${settingName} was not retrieved correctly, or has no values. No file found at ${resultFile}`)); + errorCs.push(settingName); + continue; + } + } catch (error: any) { + errorCs.push(settingName); + uxLog("error", this, c.red(`Error retrieving Custom Setting ${settingName}: ${error.message || error}`)); + } + } + uxLog("action", this, c.cyan(`Custom Settings retrieval completed (${successCs.length} successful, ${errorCs.length} failed)`)); + if (successCs.length > 0) { + const successCsNames = successCs.map(cs => "- " + cs).join('\n'); + uxLog("success", this, c.green(`Successfully retrieved Custom Settings:\n${successCsNames}`)); + } + if (errorCs.length > 0) { + const errorCsNames = errorCs.map(cs => "- " + cs).join('\n'); + uxLog("error", this, c.red(`Failed to retrieve Custom Settings:\n${errorCsNames}`)); + } + } + + private async saveRecords(): Promise { + const hasDataWs = await hasDataWorkspaces(); + if (!hasDataWs) { + uxLog("action", this, c.yellow('No data workspaces found in the project, skipping record saving')); + uxLog("log", this, c.grey(`You can create data workspaces using ${CONSTANTS.DOC_URL_ROOT}/hardis/org/configure/data/`)); + return; + } + + const sfdmuWorkspaces = await selectDataWorkspace({ + selectDataLabel: 'Select data workspaces to use to export records before refreshing sandbox', + multiple: true, + initial: this?.refreshSandboxConfig?.dataWorkspaces || [], + }); + if (!(Array.isArray(sfdmuWorkspaces) && sfdmuWorkspaces.length > 0)) { + uxLog("warning", this, c.yellow('No data workspace selected, skipping record saving')); + return; + } + this.refreshSandboxConfig.dataWorkspaces = sfdmuWorkspaces.sort(); + await this.saveConfig(); + + // Copy data templates in saveProjectPath + for (const sfdmuPath of sfdmuWorkspaces) { + const sourcePath = path.join(process.cwd(), sfdmuPath); + const targetPath = path.join(this.saveProjectPath, sfdmuPath); + await fs.ensureDir(path.dirname(targetPath)); + if (fs.existsSync(targetPath)) { + uxLog("log", this, c.grey(`Overwriting data workspace from ${sourcePath} to ${targetPath}`)); + await fs.copy(sourcePath, targetPath, { overwrite: true }); + } else { + uxLog("log", this, c.grey(`Copying data workspace from ${sourcePath} to ${targetPath}`)); + await fs.copy(sourcePath, targetPath, { overwrite: true }); + } + } + + for (const sfdmuPath of sfdmuWorkspaces) { + await exportData(sfdmuPath || '', this, { + sourceUsername: this.orgUsername, + cwd: this.saveProjectPath + }); + } + } +} diff --git a/src/commands/hardis/org/retrieve/packageconfig.ts b/src/commands/hardis/org/retrieve/packageconfig.ts index ec52ea5ad..a77828038 100644 --- a/src/commands/hardis/org/retrieve/packageconfig.ts +++ b/src/commands/hardis/org/retrieve/packageconfig.ts @@ -1,76 +1,104 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { MetadataUtils } from "../../../../common/metadata-utils"; -import { uxLog } from "../../../../common/utils"; -import { managePackageConfig, promptOrg } from "../../../../common/utils/orgUtils"; -import { prompts } from "../../../../common/utils/prompts"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { MetadataUtils } from '../../../../common/metadata-utils/index.js'; +import { uxLog } from '../../../../common/utils/index.js'; +import { managePackageConfig, promptOrg } from '../../../../common/utils/orgUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class RetrievePackageConfig extends SfCommand { + public static title = 'Retrieve package configuration from an org'; -export default class RetrievePackageConfig extends SfdxCommand { - public static title = "Retrieve package configuration from an org"; + public static description = ` +**Retrieves the installed package configuration from a Salesforce org and optionally updates the local project configuration.** - public static description = "Retrieve package configuration from an org"; +This command is useful for maintaining an accurate record of installed packages within your Salesforce project, which is crucial for managing dependencies and ensuring consistent deployments across environments. - public static examples = ["$ sfdx hardis:org:retrieve:packageconfig", "sfdx hardis:org:retrieve:packageconfig -u myOrg"]; +Key functionalities: - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", +- **Package Listing:** Connects to a specified Salesforce org (or prompts for one if not provided) and retrieves a list of all installed packages. +- **Configuration Update:** Offers the option to update your local project's configuration with the retrieved list of installed packages. This can be beneficial for automating package installations during environment setup or CI/CD processes. + +
+Technical explanations + +The command's technical implementation involves: + +- **Org Connection:** It establishes a connection to the target Salesforce org using the provided or prompted username. +- **Metadata Retrieval:** It utilizes \`MetadataUtils.listInstalledPackages\` to query the Salesforce org and obtain details about the installed packages. +- **Interactive Prompt:** It uses the \`prompts\` library to ask the user whether they want to update their local project configuration with the retrieved package list. +- **Configuration Management:** If the user confirms, it calls \`managePackageConfig\` to update the project's configuration file (likely \`.sfdx-hardis.yml\`) with the new package information. +- **User Feedback:** Provides clear messages to the user about the success of the package retrieval and configuration update. +
+`; + + public static examples = ['$ sf hardis:org:retrieve:packageconfig', 'sf hardis:org:retrieve:packageconfig -u myOrg']; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static supportsUsername = true; - protected static requiresUsername = false; - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; /* jscpd:ignore-end */ public async run(): Promise { - let targetUsername = this.flags.targetusername || null; + const { flags } = await this.parse(RetrievePackageConfig); + let targetUsername = flags['target-org'].getUsername() || null; // Prompt for organization if not sent if (targetUsername == null) { - const org = await promptOrg(this, { setDefault: false }); + const org = await promptOrg(this, { setDefault: false, defaultOrgUsername: flags['target-org']?.getUsername() }); targetUsername = org.username; } // Retrieve list of installed packages - const installedPackages = await MetadataUtils.listInstalledPackages(targetUsername, this); + uxLog("action", this, c.cyan('Retrieving installed packages from org ' + targetUsername + '...')); + const installedPackages = await MetadataUtils.listInstalledPackages(targetUsername || '', this); + + const packageNames = installedPackages + .map((pkg: any) => `- ${pkg.SubscriberPackageName} (${pkg.SubscriberPackageVersionNumber})`) + .sort((a: string, b: string) => a.localeCompare(b)) + .join('\n'); + + if (installedPackages.length === 0) { + uxLog("warning", this, c.yellow(`No installed packages found in org ${targetUsername}.`)); + throw new SfError('No installed packages found in the target org. Maybe an auth issue ?'); + } + uxLog("action", this, c.cyan(`Successfully retrieved ${installedPackages.length} installed packages from org ${targetUsername}.\n${packageNames}`)); // Store list in config const updateConfigRes = await prompts({ - type: "confirm", - name: "value", - message: c.cyanBright("Do you want to update your project configuration with this list of packages ?"), + type: 'confirm', + name: 'value', + message: c.cyanBright('Do you want to update your project configuration with this list of packages ?'), + description: 'Update your local project files with the list of installed packages for deployment automation', }); if (updateConfigRes.value === true) { - await managePackageConfig(installedPackages, installedPackages); + await managePackageConfig(installedPackages, installedPackages, true); } - const message = `[sfdx-hardis] Successfully retrieved package config`; - uxLog(this, c.green(message)); - return { orgId: this.org.getOrgId(), outputString: message }; + WebSocketClient.sendRefreshPipelineMessage(); + const message = `Successfully retrieved installed packages configuration`; + uxLog("success", this, c.green(message)); + return { orgId: flags['target-org'].getOrgId(), outputString: message }; } } diff --git a/src/commands/hardis/org/retrieve/sources/analytics.ts b/src/commands/hardis/org/retrieve/sources/analytics.ts index 5c0be6e65..e099575c6 100644 --- a/src/commands/hardis/org/retrieve/sources/analytics.ts +++ b/src/commands/hardis/org/retrieve/sources/analytics.ts @@ -1,76 +1,99 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as path from "path"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import * as path from 'path'; // import * as path from "path"; -import { uxLog, isCI, createTempDir, execCommand } from "../../../../../common/utils"; +import { uxLog, isCI, createTempDir, execCommand } from '../../../../../common/utils/index.js'; -import { promptOrgUsernameDefault } from "../../../../../common/utils/orgUtils"; -import { buildOrgManifest } from "../../../../../common/utils/deployUtils"; -import { parsePackageXmlFile, writePackageXmlFile } from "../../../../../common/utils/xmlUtils"; +import { promptOrgUsernameDefault } from '../../../../../common/utils/orgUtils.js'; +import { buildOrgManifest } from '../../../../../common/utils/deployUtils.js'; +import { parsePackageXmlFile, writePackageXmlFile } from '../../../../../common/utils/xmlUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class RetrieveAnalytics extends SfCommand { + public static title = 'Retrieve CRM Analytics configuration from an org'; -export default class Retrofit extends SfdxCommand { - public static title = "Retrieve CRM Analytics configuration from an org"; + public static description = ` +## Command Behavior - public static description = `Retrieve all CRM Analytics sources from an org, with workarounds for SFDX bugs`; +**Retrieves all CRM Analytics (formerly Tableau CRM or Einstein Analytics) sources from a Salesforce org, including workarounds for known SFDX bugs.** - public static examples = ["$ sfdx hardis:org:retrieve:sources:analytics"]; +This command is designed to extract the complete configuration of your CRM Analytics assets, such as dashboards, dataflows, lenses, and recipes. It's essential for version controlling your Analytics development, migrating assets between environments, or backing up your Analytics configurations. - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", +Key functionalities: + +- **Comprehensive Retrieval:** Fetches all supported CRM Analytics metadata types. +- **SFDX Bug Workarounds:** Incorporates internal logic to handle common issues or limitations encountered when retrieving CRM Analytics metadata using standard Salesforce CLI commands. +- **Target Org Selection:** Allows you to specify the Salesforce org from which to retrieve the Analytics sources. If not provided, it will prompt for selection. + +
+Technical explanations + +The command's technical implementation involves: + +- **Full Org Manifest Generation:** It first generates a complete \`package.xml\` for the target org using \`buildOrgManifest\`. This ensures that all available metadata, including CRM Analytics components, are identified. +- **Analytics Metadata Filtering:** It then filters this comprehensive \`package.xml\` to include only the CRM Analytics-related metadata types (e.g., \`WaveApplication\`, \`WaveDashboard\`, \`WaveDataflow\`, \`WaveLens\`, \`WaveRecipe\`, \`WaveXmd\`). +- **Filtered \`package.xml\` Creation:** A new \`package.xml\` file containing only the filtered CRM Analytics metadata is created temporarily. +- **Salesforce CLI Retrieval:** It executes the \`sf project retrieve start\` command, using the newly created Analytics-specific \`package.xml\` to retrieve the sources to your local project. +- **Temporary File Management:** It uses \`createTempDir\` to manage temporary files and directories created during the process. +- **Interactive Org Selection:** Uses \`promptOrgUsernameDefault\` to guide the user in selecting the target Salesforce org if not provided via flags. +
+`; + + public static examples = ['$ sf hardis:org:retrieve:sources:analytics']; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + 'target-org': requiredOrgFlagWithDeprecations, + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; protected configInfo: any = {}; protected debugMode = false; /* jscpd:ignore-end */ - protected analyticsMetadataTypes = ["WaveApplication", "WaveDashboard", "WaveDataflow", "WaveDataset", "WaveLens", "WaveRecipe", "WaveXmd"]; + protected analyticsMetadataTypes = [ + 'WaveApplication', + 'WaveDashboard', + 'WaveDataflow', + 'WaveDataset', + 'WaveLens', + 'WaveRecipe', + 'WaveXmd', + ]; // Retrieves locally all items corresponding to CRM Analytics configuration public async run(): Promise { + const { flags } = await this.parse(RetrieveAnalytics); // Manage user selection for org if we are not in CI - let orgUsername = this.org.getUsername(); - if (!isCI && !this.flags.targetusername) { - orgUsername = await promptOrgUsernameDefault(this, orgUsername, { devHub: false, setDefault: false }); + let orgUsername = flags['target-org'].getUsername(); + if (!isCI && !flags['target-org']) { + orgUsername = await promptOrgUsernameDefault(this, orgUsername || '', { devHub: false, setDefault: false }); } // List all metadatas of target org const tmpDir = await createTempDir(); - const packageXmlAllFile = path.join(tmpDir, "packageXmlAll.xml"); - await buildOrgManifest(orgUsername, packageXmlAllFile, this.org.getConnection()); - uxLog(this, c.cyan(`Retrieved full package XML from org ${orgUsername}: ${packageXmlAllFile}`)); + const packageXmlAllFile = path.join(tmpDir, 'packageXmlAll.xml'); + await buildOrgManifest(orgUsername, packageXmlAllFile, flags['target-org'].getConnection()); + uxLog("action", this, c.cyan(`Retrieved full package XML from org ${orgUsername}: ${packageXmlAllFile}`)); // Filter to keep only analytics metadatas const parsedPackageXmlAll = await parsePackageXmlFile(packageXmlAllFile); - const packageXmlAnalyticsFile = path.join(tmpDir, "packageXmlAnalytics.xml"); + const packageXmlAnalyticsFile = path.join(tmpDir, 'packageXmlAnalytics.xml'); const analyticsPackageXml = {}; for (const type of Object.keys(parsedPackageXmlAll)) { if (this.analyticsMetadataTypes.includes(type)) { @@ -78,12 +101,16 @@ export default class Retrofit extends SfdxCommand { } } await writePackageXmlFile(packageXmlAnalyticsFile, analyticsPackageXml); - uxLog(this, c.cyan(`Filtered and completed analytics metadatas in analytics package XML: ${packageXmlAnalyticsFile}`)); + uxLog( + "action", + this, + c.cyan(`Filtered and completed analytics metadatas in analytics package XML: ${packageXmlAnalyticsFile}`) + ); // Retrieve locally Analytics sources - const retrieveCommand = `sfdx force:source:retrieve -x "${packageXmlAnalyticsFile}" -u ${orgUsername}`; + const retrieveCommand = `sf project retrieve start -x "${packageXmlAnalyticsFile}" -o ${orgUsername}`; await execCommand(retrieveCommand, this, { fail: true, debug: this.debugMode, output: true }); - uxLog(this, c.cyan(`Retrieved all analytics source items using package XML: ${packageXmlAnalyticsFile}`)); + uxLog("action", this, c.cyan(`Retrieved all analytics source items using package XML: ${packageXmlAnalyticsFile}`)); return { outputString: `Retrieved analytics sources from org ${orgUsername}` }; } diff --git a/src/commands/hardis/org/retrieve/sources/dx.ts b/src/commands/hardis/org/retrieve/sources/dx.ts index df8eca0cf..c83a5e0bf 100644 --- a/src/commands/hardis/org/retrieve/sources/dx.ts +++ b/src/commands/hardis/org/retrieve/sources/dx.ts @@ -1,105 +1,129 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as child from "child_process"; -import * as fs from "fs-extra"; -import * as path from "path"; -import * as util from "util"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import * as child from 'child_process'; +import fs from 'fs-extra'; +import * as path from 'path'; +import * as util from 'util'; const exec = util.promisify(child.exec); -import { MetadataUtils } from "../../../../../common/metadata-utils"; -import { createTempDir, uxLog } from "../../../../../common/utils"; -import { WebSocketClient } from "../../../../../common/websocketClient"; -import { setConfig } from "../../../../../config"; +import { MetadataUtils } from '../../../../../common/metadata-utils/index.js'; +import { uxLog } from '../../../../../common/utils/index.js'; +import { WebSocketClient } from '../../../../../common/websocketClient.js'; +import { setConfig } from '../../../../../config/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DxSources extends SfCommand { + public static title = 'Retrieve sfdx sources from org'; -export default class DxSources extends SfdxCommand { - public static title = "Retrieve sfdx sources from org"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("retrieveDx"); +**Retrieves Salesforce metadata from an org and converts it into Salesforce DX (SFDX) source format.** - public static examples = ["$ sfdx hardis:org:retrieve:sources:dx"]; +This command provides a flexible way to pull metadata from any Salesforce org into your local SFDX project. It's particularly useful for: - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: ".", - description: messages.getMessage("folder"), +- **Initial Project Setup:** Populating a new SFDX project with existing org metadata. +- **Environment Synchronization:** Bringing changes from a Salesforce org (e.g., a sandbox) into your local development environment. +- **Selective Retrieval:** Allows you to specify which metadata types to retrieve, or to filter out certain types. +- **Org Shape Creation:** Can optionally create an org shape, which is useful for defining the characteristics of scratch orgs. + +Key functionalities: + +- **Metadata Retrieval:** Connects to a target Salesforce org and retrieves metadata based on specified filters. +- **MDAPI to SFDX Conversion:** Converts the retrieved metadata from Metadata API format to SFDX source format. +- **Org Shape Generation (Optional):** If the \`--shape\` flag is used, it also captures the org's shape and stores installed package information. +- **Temporary File Management:** Uses temporary folders for intermediate steps, ensuring a clean working directory. + +
+Technical explanations + +The command's technical implementation involves: + +- **Temporary Directory Management:** It creates and manages temporary directories (\`./tmp\`, \`mdapipkg\`, \`sfdx-project\`) to stage the retrieved metadata and the converted SFDX sources. +- **\`MetadataUtils.retrieveMetadatas\`:** This utility is used to connect to the Salesforce org and retrieve metadata in Metadata API format. It supports filtering by metadata types and excluding certain items. +- **SFDX Project Creation:** It executes \`sf project generate\` to create a new SFDX project structure within a temporary directory. +- **MDAPI to SFDX Conversion:** It then uses \`sf project convert mdapi\` to convert the retrieved metadata from the MDAPI format to the SFDX source format. +- **File System Operations:** It uses \`fs-extra\` to copy the converted SFDX sources to the main project folder, while preserving important project files like \`.gitignore\` and \`sfdx-project.json\`. +- **Org Shape Handling:** If \`--shape\` is enabled, it copies the generated \`package.xml\` and stores information about installed packages using \`setConfig\`. +- **Error Handling:** Includes robust error handling for Salesforce CLI commands and file system operations. +- **WebSocket Communication:** Uses \`WebSocketClient.sendRefreshCommandsMessage\` to notify connected VS Code clients about changes to the project. +
+`; + + public static examples = ['$ sf hardis:org:retrieve:sources:dx']; + + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: '.', + description: messages.getMessage('folder'), }), - tempfolder: flags.string({ - char: "t", - default: "./tmp", - description: messages.getMessage("tempFolder"), + tempfolder: Flags.string({ + char: 't', + default: './tmp', + description: messages.getMessage('tempFolder'), }), - keepmetadatatypes: flags.string({ - char: "k", - description: "Comma separated list of metadatas types that will be the only ones to be retrieved", + keepmetadatatypes: Flags.string({ + char: 'k', + description: 'Comma separated list of metadatas types that will be the only ones to be retrieved', }), - filteredmetadatas: flags.string({ - char: "m", - description: messages.getMessage("filteredMetadatas"), + filteredmetadatas: Flags.string({ + char: 'm', + description: messages.getMessage('filteredMetadatas'), }), - shape: flags.boolean({ - char: "o", + shape: Flags.boolean({ + char: 's', default: false, - description: messages.getMessage("createOrgShape"), + description: messages.getMessage('createOrgShape'), }), - instanceurl: flags.string({ - char: "r", - description: messages.getMessage("instanceUrl"), + instanceurl: Flags.string({ + char: 'r', + description: messages.getMessage('instanceUrl'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; - - // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials"]; + public static requiresProject = false; /* jscpd:ignore-end */ public async run(): Promise { - const folder = path.resolve(this.flags.folder || "."); - const tempFolder = path.resolve(this.flags.tempfolder || "./tmp"); - const keepMetadataTypes = this.flags.keepmetadatatypes ? this.flags.keepmetadatatypes.split(",") : []; - const filteredMetadatas = this.flags.filteredmetadatas ? this.flags.filteredmetadatas.split(",") : MetadataUtils.listMetadatasNotManagedBySfdx(); - const shapeFlag = this.flags.shape || false; - const debug = this.flags.debug || false; + const { flags } = await this.parse(DxSources); + const folder = path.resolve(flags.folder || '.'); + const tempFolder = path.resolve(flags.tempfolder || './tmp'); + const keepMetadataTypes = flags.keepmetadatatypes ? flags.keepmetadatatypes.split(',') : []; + const filteredMetadatas = flags.filteredmetadatas + ? flags.filteredmetadatas.split(',') + : MetadataUtils.listMetadatasNotManagedBySfdx(); + const shapeFlag = flags.shape || false; + const debug = flags.debug || false; // Create working temp folders and define it as cwd const prevCwd = process.cwd(); await fs.ensureDir(tempFolder); await fs.emptyDir(tempFolder); process.chdir(tempFolder); - const metadataFolder = path.join(tempFolder, "mdapipkg"); + const metadataFolder = path.join(tempFolder, 'mdapipkg'); await fs.ensureDir(metadataFolder); await fs.emptyDir(metadataFolder); - const sfdxFolder = path.join(tempFolder, "sfdx-project"); + const sfdxFolder = path.join(tempFolder, 'sfdx-project'); await fs.ensureDir(sfdxFolder); await fs.emptyDir(sfdxFolder); @@ -108,43 +132,56 @@ export default class DxSources extends SfdxCommand { if (keepMetadataTypes) { retrieveOptions.keepMetadataTypes = keepMetadataTypes; } - const packageXml = path.resolve(path.join(tempFolder, "package.xml")); - await MetadataUtils.retrieveMetadatas(packageXml, metadataFolder, true, filteredMetadatas, retrieveOptions, this, debug); + const packageXml = path.resolve(path.join(tempFolder, 'package.xml')); + await MetadataUtils.retrieveMetadatas( + packageXml, + metadataFolder, + true, + filteredMetadatas, + retrieveOptions, + this, + flags['target-org'].getUsername(), + debug + ); // Create sfdx project if (fs.readdirSync(sfdxFolder).length === 0) { - uxLog(this, c.cyan("Creating SFDX project...")); - const projectCreateCommand = 'sfdx force:project:create --projectname "sfdx-project"'; - uxLog(this, `[command] ${c.bold(c.grey(projectCreateCommand))}`); + uxLog("action", this, c.cyan('Creating SFDX project...')); + const projectCreateCommand = 'sf project generate --name "sfdx-project"'; + uxLog("other", this, `[command] ${c.bold(c.grey(projectCreateCommand))}`); const createProjectRes = await exec(projectCreateCommand, { maxBuffer: 1024 * 2000 }); if (debug) { - uxLog(this, createProjectRes.stdout + createProjectRes.stderr); + uxLog("other", this, createProjectRes.stdout + createProjectRes.stderr); } } // Converting metadatas to sfdx - uxLog(this, c.cyan(`Converting metadatas into SFDX sources in ${c.green(sfdxFolder)}...`)); + uxLog("action", this, c.cyan(`Converting metadatas into SFDX sources in ${c.green(sfdxFolder)}...`)); process.chdir(sfdxFolder); - const mdapiConvertCommand = `sfdx force:mdapi:convert --rootdir ${path.join(metadataFolder, "unpackaged")} ${debug ? "--verbose" : ""}`; - uxLog(this, `[command] ${c.bold(c.grey(mdapiConvertCommand))}`); + const mdapiConvertCommand = `sf project convert mdapi --root-dir ${path.join(metadataFolder, 'unpackaged')} ${debug ? '--verbose' : '' + }`; + uxLog("other", this, `[command] ${c.bold(c.grey(mdapiConvertCommand))}`); try { const convertRes = await exec(mdapiConvertCommand, { maxBuffer: 10000 * 10000, }); if (debug) { - uxLog(this, convertRes.stdout + convertRes.stderr); + uxLog("other", this, convertRes.stdout + convertRes.stderr); } } catch (e) { - throw new SfdxError(JSON.stringify(e, null, 2)); + throw new SfError(JSON.stringify(e, null, 2)); } // Move sfdx sources in main folder - uxLog(this, `[sfdx-hardis] Moving temp files to main folder ${c.green(path.resolve(folder))}...`); + uxLog("other", this, `[sfdx-hardis] Moving temp files to main folder ${c.green(path.resolve(folder))}...`); process.chdir(prevCwd); // Do not replace files if already defined - const filesToNotReplace = [".gitignore", ".forceignore", "sfdx-project.json", "README.md"]; + const filesToNotReplace = ['.gitignore', '.forceignore', 'sfdx-project.json', 'README.md']; for (const fileToNotReplace of filesToNotReplace) { - if (fs.existsSync(path.join(path.resolve(folder), fileToNotReplace)) && fs.existsSync(path.join(sfdxFolder, fileToNotReplace))) { + if ( + fs.existsSync(path.join(path.resolve(folder), fileToNotReplace)) && + fs.existsSync(path.join(sfdxFolder, fileToNotReplace)) + ) { await fs.remove(path.join(sfdxFolder, fileToNotReplace)); } } @@ -154,55 +191,34 @@ export default class DxSources extends SfdxCommand { // Manage org shape if requested if (shapeFlag === true) { // Copy package.xml - const packageXmlInConfig = path.resolve(folder) + "/manifest/package.xml"; // '/config/package.xml'; + const packageXmlInConfig = path.resolve(folder) + '/manifest/package.xml'; // '/config/package.xml'; if (!fs.existsSync(packageXmlInConfig)) { await fs.ensureDir(path.dirname(packageXmlInConfig)); - uxLog(this, `[sfdx-hardis] Copying package.xml manifest ${c.green(packageXmlInConfig)}...`); + uxLog("other", this, `[sfdx-hardis] Copying package.xml manifest ${c.green(packageXmlInConfig)}...`); await fs.copy(packageXml, packageXmlInConfig); } // Store list of installed packages const installedPackages = await MetadataUtils.listInstalledPackages(null, this); - await setConfig("project", { + await setConfig('project', { installedPackages, }); - // Try to get org shape - const projectScratchDefFile = "./config/project-scratch-def.json"; - uxLog(this, `[sfdx-hardis] Getting org shape in ${c.green(path.resolve(projectScratchDefFile))}...`); - const shapeFile = path.join(await createTempDir(), "project-scratch-def.json"); - try { - await exec(`sfdx force:org:shape:create -f "${shapeFile} -u `); - const orgShape = await fs.readFile(shapeFile, "utf-8"); - const projectScratchDef = await fs.readFile(projectScratchDefFile, "utf-8"); - const newShape = Object.assign(projectScratchDef, orgShape); - await fs.writeFile(projectScratchDefFile, JSON.stringify(newShape, null, 2)); - // eslint-disable-next-line @typescript-eslint/no-unused-vars - } catch (e) { - uxLog(this, c.yellow("[sfdx-hardis][ERROR] Unable to create org shape")); - uxLog(this, c.yellow("[sfdx-hardis] You need to manually update config/project-scratch-def.json")); - uxLog( - this, - c.yellow( - "[sfdx-hardis] See documentation at https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_scratch_orgs_def_file.htm", - ), - ); - } } // Remove temporary files - uxLog(this, `Remove temporary folder ${tempFolder} ...`); + uxLog("other", this, `Remove temporary folder ${tempFolder} ...`); try { await fs.rm(tempFolder, { recursive: true }); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { - uxLog(this, c.yellow(`Unable to remove folder ${tempFolder}, please delete it manually`)); + uxLog("warning", this, c.yellow(`Unable to remove folder ${tempFolder}, please delete it manually`)); } // Trigger commands refresh on VsCode WebSocket Client - WebSocketClient.sendMessage({ event: "refreshCommands" }); + WebSocketClient.sendRefreshCommandsMessage(); // Set bac initial cwd const message = `[sfdx-hardis] Successfully retrieved sfdx project in ${folder}`; - uxLog(this, c.green(message)); - return { orgId: this.org.getOrgId(), outputString: message }; + uxLog("success", this, c.green(message)); + return { orgId: flags['target-org'].getOrgId(), outputString: message }; } } diff --git a/src/commands/hardis/org/retrieve/sources/dx2.ts b/src/commands/hardis/org/retrieve/sources/dx2.ts index 5b62b67c0..27e2bb1a5 100644 --- a/src/commands/hardis/org/retrieve/sources/dx2.ts +++ b/src/commands/hardis/org/retrieve/sources/dx2.ts @@ -1,71 +1,87 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; -import { execCommand, uxLog } from "../../../../../common/utils"; -import { promptOrg } from "../../../../../common/utils/orgUtils"; -import { prompts } from "../../../../../common/utils/prompts"; -import { PACKAGE_ROOT_DIR } from "../../../../../settings"; +import { execCommand, uxLog } from '../../../../../common/utils/index.js'; +import { promptOrg } from '../../../../../common/utils/orgUtils.js'; +import { prompts } from '../../../../../common/utils/prompts.js'; +import { PACKAGE_ROOT_DIR } from '../../../../../settings.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DxSources2 extends SfCommand { + public static title = 'Retrieve sfdx sources from org (2)'; -export default class DxSources2 extends SfdxCommand { - public static title = "Retrieve sfdx sources from org (2)"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("retrieveDx"); +**Retrieves Salesforce metadata from an org into SFDX source format, offering flexible input options for specifying metadata to retrieve.** - public static examples = ["$ sfdx hardis:org:retrieve:sources:dx2"]; +This command provides an alternative and enhanced way to pull metadata from any Salesforce org into your local SFDX project. It's particularly useful when you need fine-grained control over which metadata components are retrieved, either by providing a custom \`package.xml\` or by using predefined templates. - protected static flagsConfig = { - packagexml: flags.string({ - char: "x", - description: "Path to package.xml file", +Key functionalities: + +- **\`package.xml\` Input:** You can specify the path to a \`package.xml\` file using the \`--packagexml\` flag, which defines the exact metadata components to retrieve. +- **Template-Based Retrieval:** Use the \`--template\` flag to leverage predefined \`package.xml\` templates provided by sfdx-hardis (e.g., \`wave\` for CRM Analytics metadata), simplifying common retrieval scenarios. +- **Interactive Input:** If neither \`--packagexml\` nor \`--template\` is provided, the command will interactively prompt you to select a \`package.xml\` file or a template. +- **Target Org Selection:** Allows you to specify the Salesforce org from which to retrieve the sources. If not provided, it will prompt for selection. + +
+Technical explanations + +The command's technical implementation involves: + +- **Org Selection:** It uses \`promptOrg\` to guide the user in selecting the target Salesforce org if not provided via flags. +- **\`package.xml\` Resolution:** It determines the \`package.xml\` to use based on the provided flags (\`--packagexml\` or \`--template\`). If a template is used, it resolves the path to the corresponding template file within the sfdx-hardis installation. +- **File System Operations:** It checks if the specified \`package.xml\` file exists. If the file is outside the current project directory, it copies it to a temporary location within the project to ensure proper handling by the Salesforce CLI. +- **Salesforce CLI Retrieval:** It executes the \`sf project retrieve start\` command, passing the resolved \`package.xml\` path and the target username to retrieve the sources. +- **User Feedback:** Provides clear messages to the user about the retrieval process and its success. +
+`; + + public static examples = ['$ sf hardis:org:retrieve:sources:dx2']; + + public static flags: any = { + packagexml: Flags.string({ + char: 'x', + description: 'Path to package.xml file', }), - template: flags.string({ - char: "t", - description: "sfdx-hardis package.xml Template name. ex: wave", + template: Flags.string({ + char: 't', + description: 'sfdx-hardis package.xml Template name. ex: wave', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static supportsUsername = true; - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - let packageXml = this.flags.packagexml || null; - let targetUsername = this.flags.targetusername || null; - const template = this.flags.template || null; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(DxSources2); + let packageXml = flags.packagexml || null; + let targetUsername = flags['target-org']?.getUsername() || null; + const template = flags.template || null; + this.debugMode = flags.debug || false; // Prompt for organization if not sent if (targetUsername == null) { @@ -81,33 +97,37 @@ export default class DxSources2 extends SfdxCommand { // Prompt for package.xml if not sent if (packageXml === null) { const packageXmlRes = await prompts({ - message: c.cyanBright("Please input the path to the package.xml file to use force sfdx force:source:retrieve"), - type: "text", - name: "value", + message: c.cyanBright('Please input the path to the package.xml file'), + description: 'Specify the package.xml file that defines which metadata to retrieve from the org', + placeholder: 'Ex: manifest/package.xml', + type: 'text', + name: 'value', }); packageXml = packageXmlRes.value; } // Check package.xml file exists - if (!fs.existsSync(packageXml)) { - throw new SfdxError(c.red("Package.xml file not found at " + packageXml)); + if (!fs.existsSync(packageXml || '')) { + throw new SfError(c.red('Package.xml file not found at ' + packageXml)); } // Copy package.xml in /tmp if provided value is not within project - if (!path.resolve(packageXml).includes(path.resolve(process.cwd()))) { - const packageXmlTmp = path.join(process.cwd(), "tmp", "retrievePackage.xml"); + if (!path.resolve(packageXml || '').includes(path.resolve(process.cwd()))) { + const packageXmlTmp = path.join(process.cwd(), 'tmp', 'retrievePackage.xml'); await fs.ensureDir(path.dirname(packageXmlTmp)); - await fs.copy(packageXml, packageXmlTmp); - uxLog(this, c.grey(`Copied ${packageXml} to ${packageXmlTmp}`)); + await fs.copy(packageXml || '', packageXmlTmp); + uxLog("log", this, c.grey(`Copied ${packageXml} to ${packageXmlTmp}`)); packageXml = path.relative(process.cwd(), packageXmlTmp); } // Retrieve sources - const retrieveCommand = "sfdx force:source:retrieve" + ` -x "${packageXml}"` + ` --targetusername ${targetUsername}`; + const retrieveCommand = 'sf project retrieve start' + ` -x "${packageXml}"` + ` -o ${targetUsername}`; await execCommand(retrieveCommand, this, { fail: false, debug: this.debugMode, output: true }); // Set bac initial cwd - const message = `[sfdx-hardis] Successfully retrieved sfdx sources from ${c.bold(targetUsername)} using ${c.bold(packageXml)}`; - uxLog(this, c.green(message)); + const message = `[sfdx-hardis] Successfully retrieved sfdx sources from ${c.bold(targetUsername)} using ${c.bold( + packageXml + )}`; + uxLog("success", this, c.green(message)); return { outputString: message }; } } diff --git a/src/commands/hardis/org/retrieve/sources/metadata.ts b/src/commands/hardis/org/retrieve/sources/metadata.ts index bf4f7f190..ca3cb501f 100644 --- a/src/commands/hardis/org/retrieve/sources/metadata.ts +++ b/src/commands/hardis/org/retrieve/sources/metadata.ts @@ -1,78 +1,96 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as child from "child_process"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { MetadataUtils } from "../../../../../common/metadata-utils"; -import { ensureGitRepository, execCommand, isMonitoringJob, uxLog } from "../../../../../common/utils"; -import LegacyApi from "../../diagnose/legacyapi"; -import OrgTestApex from "../../test/apex"; -import * as util from "util"; -import { PACKAGE_ROOT_DIR } from "../../../../../settings"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import * as child from 'child_process'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { MetadataUtils } from '../../../../../common/metadata-utils/index.js'; +import { ensureGitRepository, execCommand, isMonitoringJob, uxLog } from '../../../../../common/utils/index.js'; +import LegacyApi from '../../diagnose/legacyapi.js'; +import OrgTestApex from '../../test/apex.js'; +import * as util from 'util'; +import { PACKAGE_ROOT_DIR } from '../../../../../settings.js'; +import { CONSTANTS } from '../../../../../config/index.js'; const exec = util.promisify(child.exec); -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class DxSources extends SfCommand { + public static title = 'Retrieve sfdx sources from org'; -export default class DxSources extends SfdxCommand { - public static title = "Retrieve sfdx sources from org"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("retrieveDx"); +**Retrieves Salesforce metadata from an org into a local directory, primarily for backup and monitoring purposes.** + +This command is designed to pull metadata from any Salesforce org, providing a snapshot of its configuration. It's particularly useful in monitoring contexts where you need to track changes in an org's metadata over time. + +Key functionalities: + +- **Metadata Retrieval:** Connects to a target Salesforce org and retrieves metadata based on a specified \`package.xml\`. +- **Managed Package Filtering:** By default, it filters out metadata from managed packages to reduce the volume of retrieved data. This can be overridden with the \`--includemanaged\` flag. +- **Monitoring Integration:** Designed to be used within a monitoring CI/CD job, it performs additional post-retrieval actions like running Apex tests and checking for legacy API usage. + +
+Technical explanations + +The command's technical implementation involves: + +- **Git Repository Check:** Ensures the current directory is a Git repository and initializes it if necessary. +- **\`MetadataUtils.retrieveMetadatas\`:** This utility is the core of the retrieval process. It connects to the Salesforce org, retrieves metadata based on the provided \`package.xml\` and filtering options (e.g., \`filterManagedItems\`), and places the retrieved files in a specified folder. +- **File System Operations:** Uses \`fs-extra\` to manage directories and copy retrieved files to the target folder. +- **Post-Retrieval Actions (for Monitoring Jobs):** If the command detects it's running within a monitoring CI/CD job (\`isMonitoringJob()\`): + - It updates the \`.gitlab-ci.yml\` file if \`AUTO_UPDATE_GITLAB_CI_YML\` is set. + - It converts the retrieved metadata into SFDX format using \`sf project convert mdapi\`. + - It executes \`sf hardis:org:test:apex\` to run Apex tests. + - It executes \`sf hardis:org:diagnose:legacyapi\` to check for legacy API usage. + - It logs warnings if post-actions fail or if the monitoring version is deprecated. +- **Error Handling:** Includes robust error handling for retrieval failures and post-action execution. +
+`; public static examples = [ - "$ sfdx hardis:org:retrieve:sources:metadata", - "$ SFDX_RETRIEVE_WAIT_MINUTES=200 sfdx hardis:org:retrieve:sources:metadata", + '$ sf hardis:org:retrieve:sources:metadata', + '$ SFDX_RETRIEVE_WAIT_MINUTES=200 sf hardis:org:retrieve:sources:metadata', ]; - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: ".", - description: messages.getMessage("folder"), + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: '.', + description: messages.getMessage('folder'), }), - packagexml: flags.string({ - char: "p", - description: messages.getMessage("packageXml"), + packagexml: Flags.string({ + char: 'p', + description: messages.getMessage('packageXml'), }), - includemanaged: flags.boolean({ + includemanaged: Flags.boolean({ default: false, - description: "Include items from managed packages", + description: 'Include items from managed packages', }), - instanceurl: flags.string({ - char: "r", - description: messages.getMessage("instanceUrl"), + instanceurl: Flags.string({ + char: 'r', + description: messages.getMessage('instanceUrl'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; - - // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials"]; + public static requiresProject = false; // Trigger notification(s) to MsTeams channel protected static triggerNotification = true; @@ -82,74 +100,88 @@ export default class DxSources extends SfdxCommand { /* jscpd:ignore-end */ public async run(): Promise { - const folder = path.resolve(this.flags.folder || "."); - const packageXml = path.resolve(this.flags.packagexml || "package.xml"); - const includeManaged = this.flags.includemanaged || false; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(DxSources); + const folder = path.resolve(flags.folder || '.'); + const packageXml = path.resolve(flags.packagexml || 'package.xml'); + const includeManaged = flags.includemanaged || false; + this.debugMode = flags.debug || false; // Check required pre-requisites await ensureGitRepository({ init: true }); const isMonitoring = await isMonitoringJob(); // Retrieve metadatas - let message = ""; + let message = ''; try { const filterManagedItems = includeManaged === false; - await MetadataUtils.retrieveMetadatas(packageXml, folder, false, [], { filterManagedItems: filterManagedItems }, this, this.debugMode); + await MetadataUtils.retrieveMetadatas( + packageXml, + folder, + false, + [], + { filterManagedItems: filterManagedItems }, + this, + flags['target-org'].getUsername(), + this.debugMode + ); // Copy to destination - await fs.copy(path.join(folder, "unpackaged"), path.resolve(folder)); + await fs.copy(path.join(folder, 'unpackaged'), path.resolve(folder)); // Remove temporary files - await fs.rm(path.join(folder, "unpackaged"), { recursive: true }); + await fs.rm(path.join(folder, 'unpackaged'), { recursive: true }); message = `[sfdx-hardis] Successfully retrieved metadatas in ${folder}`; - uxLog(this, message); + uxLog("other", this, message); } catch (e) { if (!isMonitoring) { throw e; } - message = "[sfdx-hardis] Error retrieving metadatas"; + message = '[sfdx-hardis] Error retrieving metadatas'; } // Post actions for monitoring CI job if (isMonitoring) { try { - return await this.processPostActions(message); + return await this.processPostActions(message, flags); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { - uxLog(this, c.yellow("Post actions have failed !")); + uxLog("warning", this, c.yellow('Post actions have failed !')); } - uxLog(this, c.yellow(c.bold("This version of sfdx-hardis monitoring is deprecated and will not be maintained anymore"))); - uxLog(this, c.yellow(c.bold("Switch to new sfdx-hardis monitoring that is enhanced !"))); - uxLog(this, c.yellow(c.bold("Info: https://sfdx-hardis.cloudity.com/salesforce-monitoring-home/"))); + uxLog( + "warning", + this, + c.yellow(c.bold('This version of sfdx-hardis monitoring is deprecated and will not be maintained anymore')) + ); + uxLog("warning", this, c.yellow(c.bold('Switch to new sfdx-hardis monitoring that is enhanced !'))); + uxLog("warning", this, c.yellow(c.bold(`Info: ${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-home/`))); } - return { orgId: this.org.getOrgId(), outputString: message }; + return { orgId: flags['target-org'].getOrgId(), outputString: message }; } - private async processPostActions(message) { - uxLog(this, c.cyan("Monitoring repo detected")); + private async processPostActions(message, flags) { + uxLog("action", this, c.cyan('Monitoring repo detected')); // Update default .gitlab-ci.yml within the monitoring repo - const localGitlabCiFile = path.join(process.cwd(), ".gitlab-ci.yml"); + const localGitlabCiFile = path.join(process.cwd(), '.gitlab-ci.yml'); if (fs.existsSync(localGitlabCiFile) && process.env?.AUTO_UPDATE_GITLAB_CI_YML) { - const localGitlabCiContent = await fs.readFile(localGitlabCiFile, "utf8"); - const latestGitlabCiFile = path.join(PACKAGE_ROOT_DIR, "defaults/monitoring/.gitlab-ci.yml"); - const latestGitlabCiContent = await fs.readFile(latestGitlabCiFile, "utf8"); + const localGitlabCiContent = await fs.readFile(localGitlabCiFile, 'utf8'); + const latestGitlabCiFile = path.join(PACKAGE_ROOT_DIR, 'defaults/monitoring/.gitlab-ci.yml'); + const latestGitlabCiContent = await fs.readFile(latestGitlabCiFile, 'utf8'); if (localGitlabCiContent !== latestGitlabCiContent) { await fs.writeFile(localGitlabCiFile, latestGitlabCiContent); - uxLog(this, c.cyan("Updated .gitlab-ci.yml file")); + uxLog("action", this, c.cyan('Updated .gitlab-ci.yml file')); } } // Also trace updates with sfdx sources, for better readability - uxLog(this, c.cyan("Convert into sfdx format...")); - if (fs.existsSync("metadatas")) { + uxLog("action", this, c.cyan('Convert into sfdx format...')); + if (fs.existsSync('metadatas')) { // Create sfdx project if not existing yet - if (!fs.existsSync("sfdx-project")) { - const createCommand = "sfdx force:project:create" + ` --projectname "sfdx-project"`; - uxLog(this, c.cyan("Creating sfdx-project...")); + if (!fs.existsSync('sfdx-project')) { + const createCommand = 'sf project generate' + ` --name "sfdx-project"`; + uxLog("action", this, c.cyan('Creating sfdx-project...')); await execCommand(createCommand, this, { output: true, fail: true, @@ -157,20 +189,18 @@ export default class DxSources extends SfdxCommand { }); } // Convert metadatas into sfdx sources - const mdapiConvertCommand = `sfdx force:mdapi:convert --rootdir "../metadatas"`; - uxLog(this, c.cyan("Converting metadata to source formation into sfdx-project...")); - uxLog(this, `[command] ${c.bold(c.grey(mdapiConvertCommand))}`); + const mdapiConvertCommand = `sf project convert mdapi --root-dir "../metadatas"`; + uxLog("action", this, c.cyan('Converting metadata to source formation into sfdx-project...')); + uxLog("other", this, `[command] ${c.bold(c.grey(mdapiConvertCommand))}`); const prevCwd = process.cwd(); - process.chdir(path.join(process.cwd(), "./sfdx-project")); + process.chdir(path.join(process.cwd(), './sfdx-project')); try { const convertRes = await exec(mdapiConvertCommand, { maxBuffer: 10000 * 10000, }); - if (this.debug) { - uxLog(this, convertRes.stdout + convertRes.stderr); - } + uxLog("other", this, convertRes.stdout + convertRes.stderr); } catch (e) { - uxLog(this, c.yellow("Error while converting metadatas to sources:\n" + e.message)); + uxLog("warning", this, c.yellow('Error while converting metadatas to sources:\n' + (e as Error).message)); } process.chdir(prevCwd); } @@ -180,21 +210,21 @@ export default class DxSources extends SfdxCommand { const prevExitCode = process.exitCode || 0; try { // Run test classes - uxLog(this, c.cyan("Running Apex tests...")); + uxLog("action", this, c.cyan('Running Apex tests...')); orgTestRes = await new OrgTestApex([], this.config)._run(); // Check usage of Legacy API versions - uxLog(this, c.cyan("Running Legacy API Use checks...")); + uxLog("action", this, c.cyan('Running Legacy API Use checks...')); legacyApiRes = await new LegacyApi([], this.config)._run(); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { - uxLog(this, c.yellow("Issues found when running Apex tests or Legacy API, please check messages")); + uxLog("warning", this, c.yellow('Issues found when running Apex tests or Legacy API, please check messages')); } process.exitCode = prevExitCode; // Delete report files //const reportFiles = await glob("**/hardis-report/**", { cwd: process.cwd() }); //reportFiles.map(async (file) => await fs.remove(file)); - return { orgId: this.org.getOrgId(), outputString: message, orgTestRes, legacyApiRes }; + return { orgId: flags['target-org'].getOrgId(), outputString: message, orgTestRes, legacyApiRes }; } } diff --git a/src/commands/hardis/org/retrieve/sources/retrofit.ts b/src/commands/hardis/org/retrieve/sources/retrofit.ts index 63af3dfc4..0cdc99659 100644 --- a/src/commands/hardis/org/retrieve/sources/retrofit.ts +++ b/src/commands/hardis/org/retrieve/sources/retrofit.ts @@ -1,49 +1,52 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { getConfig } from "../../../../../config"; -import * as c from "chalk"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { getConfig } from '../../../../../config/index.js'; +import c from 'chalk'; // import * as path from "path"; -import { ensureGitRepository, gitHasLocalUpdates, execCommand, git, uxLog, isCI } from "../../../../../common/utils"; -import { CleanOptions } from "simple-git"; -import CleanReferences from "../../../project/clean/references"; -import SaveTask from "../../../work/save"; -import CleanXml from "../../../project/clean/xml"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class Retrofit extends SfdxCommand { +import { + ensureGitRepository, + gitHasLocalUpdates, + execCommand, + git, + uxLog, + isCI, +} from '../../../../../common/utils/index.js'; +import { CleanOptions } from 'simple-git'; +import CleanReferences from '../../../project/clean/references.js'; +import SaveTask from '../../../work/save.js'; +import CleanXml from '../../../project/clean/xml.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class Retrofit extends SfCommand { public static DEFAULT_SOURCES_TO_RETROFIT = [ - "CompactLayout", - "CustomApplication", - "CustomField", - "CustomLabel", - "CustomLabels", - "CustomMetadata", - "CustomObject", - "CustomObjectTranslation", - "CustomTab", - "DuplicateRule", - "EmailTemplate", - "FlexiPage", - "GlobalValueSet", - "Layout", - "ListView", - "MatchingRules", - "PermissionSet", - "RecordType", - "StandardValueSet", - "Translations", - "ValidationRule", + 'CompactLayout', + 'CustomApplication', + 'CustomField', + 'CustomLabel', + 'CustomLabels', + 'CustomMetadata', + 'CustomObject', + 'CustomObjectTranslation', + 'CustomTab', + 'DuplicateRule', + 'EmailTemplate', + 'FlexiPage', + 'GlobalValueSet', + 'Layout', + 'ListView', + 'MatchingRules', + 'PermissionSet', + 'RecordType', + 'StandardValueSet', + 'Translations', + 'ValidationRule', ]; - public static title = "Retrofit changes from an org"; + public static title = 'Retrofit changes from an org'; public static description = `Retrieve changes from org link to a ref branch not present in sources @@ -58,7 +61,7 @@ export default class Retrofit extends SfdxCommand { - \`CI_SOURCES_TO_RETROFIT\`: env variable (can be defined in CI context) - \`sourcesToRetrofit\` property in \`.sfdx-hardis.yml\` - - Default list:\n\n - ${Retrofit.DEFAULT_SOURCES_TO_RETROFIT.join("\n - ")} + - Default list:\n\n - ${Retrofit.DEFAULT_SOURCES_TO_RETROFIT.join('\n - ')} You can also ignore some files even if they have been updated in production. To do that, define property **retrofitIgnoredFiles** in .sfdx-hardis.yml @@ -75,200 +78,201 @@ export default class Retrofit extends SfdxCommand { `; public static examples = [ - "$ sfdx hardis:org:retrieve:sources:retrofit", - "sfdx hardis:org:retrieve:sources:retrofit --productionbranch master --commit --commitmode updated", - "sfdx hardis:org:retrieve:sources:retrofit --productionbranch master --retrofitbranch preprod --commit --commitmode updated --push --pushmode mergerequest", + '$ sf hardis:org:retrieve:sources:retrofit', + 'sf hardis:org:retrieve:sources:retrofit --productionbranch master --commit --commitmode updated', + 'sf hardis:org:retrieve:sources:retrofit --productionbranch master --retrofitbranch preprod --commit --commitmode updated --push --pushmode mergerequest', ]; - protected static flagsConfig = { - commit: flags.boolean({ + public static flags: any = { + commit: Flags.boolean({ default: false, - description: "If true, a commit will be performed after the retrofit", + description: 'If true, a commit will be performed after the retrofit', }), - commitmode: flags.enum({ - default: "updated", - options: ["updated", "all"], - description: "Defines if we commit all retrieved updates, or all updates including creations", + commitmode: Flags.string({ + default: 'updated', + options: ['updated', 'all'], + description: 'Defines if we commit all retrieved updates, or all updates including creations', }), - push: flags.boolean({ + push: Flags.boolean({ default: false, - description: "If true, a push will be performed after the retrofit", + description: 'If true, a push will be performed after the retrofit', }), - pushmode: flags.enum({ - default: "default", - options: ["default", "mergerequest"], - description: "Defines if we send merge request options to git push arguments", + pushmode: Flags.string({ + default: 'default', + options: ['default', 'mergerequest'], + description: 'Defines if we send merge request options to git push arguments', }), - productionbranch: flags.string({ + productionbranch: Flags.string({ description: - "Name of the git branch corresponding to the org we want to perform the retrofit on.\nCan be defined in productionBranch property in .sfdx-hardis.yml", + 'Name of the git branch corresponding to the org we want to perform the retrofit on.\nCan be defined in productionBranch property in .sfdx-hardis.yml', }), - retrofittargetbranch: flags.string({ - description: "Name of branch the merge request will have as target\nCan be defined in retrofitBranch property in .sfdx-hardis.yml", + retrofittargetbranch: Flags.string({ + description: + 'Name of branch the merge request will have as target\nCan be defined in retrofitBranch property in .sfdx-hardis.yml', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + 'target-org': requiredOrgFlagWithDeprecations, + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; protected configInfo: any = {}; protected debugMode = false; protected commit = false; - protected commitMode = "updated"; + protected commitMode: string | boolean = 'updated'; protected push = false; - protected pushMode = "default"; - protected productionBranch: string; - protected retrofitTargetBranch: string; + protected pushMode = 'default'; + protected productionBranch: string | null; + protected retrofitTargetBranch: string | null; /* jscpd:ignore-end */ public async run(): Promise { - this.commit = this.flags.commit || false; - this.commitMode = this.flags.commitmode || false; - this.push = this.flags.push || false; - this.pushMode = this.flags.pushmode || "default"; - this.productionBranch = this.flags.productionbranch || null; - this.retrofitTargetBranch = this.flags.retrofittargetbranch || null; - this.debugMode = this.flags.debug || false; - this.configInfo = await getConfig("branch"); + const { flags } = await this.parse(Retrofit); + this.commit = flags.commit || false; + this.commitMode = flags.commitmode || false; + this.push = flags.push || false; + this.pushMode = flags.pushmode || 'default'; + this.productionBranch = flags.productionbranch || null; + this.retrofitTargetBranch = flags.retrofittargetbranch || null; + this.debugMode = flags.debug || false; + this.configInfo = await getConfig('branch'); // check git repo before processing await ensureGitRepository(); // set commit & merge request author await this.setDefaultGitConfig(); // checkout to retrofit branch, retrieve changes & push them if any - await this.processRetrofit(); + await this.processRetrofit(flags); - return { outputString: "Merge request created/updated" }; + return { outputString: 'Merge request created/updated' }; } - async processRetrofit() { - const config = await getConfig("branch"); - this.productionBranch = this.productionBranch || config.productionBranch || process.env.CI_COMMIT_REF_NAME || "master"; + async processRetrofit(flags) { + const config = await getConfig('branch'); + this.productionBranch = + this.productionBranch || config.productionBranch || process.env.CI_COMMIT_REF_NAME || 'master'; const retrofitWorkBranch = `retrofit/${this.productionBranch}`; - this.retrofitTargetBranch = this.retrofitTargetBranch || config.retrofitBranch || "retrofitTargetBranch MUST BE SET"; + this.retrofitTargetBranch = + this.retrofitTargetBranch || config.retrofitBranch || 'retrofitTargetBranch MUST BE SET'; - await git().fetch(["--prune"]); + await git().fetch(['--prune']); const branches = await git().branch(); if (branches.all.find((branch) => branch.includes(retrofitWorkBranch))) { // If manual command (not CI), force user to remove previous retrofit branches if (!isCI) { - throw new SfdxError(`You must delete local and remote branch ${c.yellow(retrofitWorkBranch)} before running this command`); + throw new SfError( + `You must delete local and remote branch ${c.yellow(retrofitWorkBranch)} before running this command` + ); } - uxLog(this, c.cyan(`Checkout to existing branch ${retrofitWorkBranch}`)); - await git().checkout(retrofitWorkBranch, ["--force"]); + uxLog("action", this, c.cyan(`Checkout to existing branch ${retrofitWorkBranch}`)); + await git().checkout(retrofitWorkBranch, ['--force']); } else { - uxLog(this, c.cyan(`Create a new branch ${retrofitWorkBranch} from ${this.productionBranch}`)); + uxLog("action", this, c.cyan(`Create a new branch ${retrofitWorkBranch} from ${this.productionBranch}`)); await git().checkoutBranch(retrofitWorkBranch, `origin/${this.productionBranch}`); } - const currentHash = await git().revparse(["HEAD"]); - uxLog(this, c.grey(`HEAD currently at ${currentHash}`)); + const currentHash = await git().revparse(['HEAD']); + uxLog("log", this, c.grey(`HEAD currently at ${currentHash}`)); // Retrieve sources from target org - const hasChangedSources = await this.retrieveSources(); + const hasChangedSources = await this.retrieveSources(flags); if (hasChangedSources) { // Commit and push if requested if (this.commit) { - await this.commitChanges(); + await this.commitChanges(flags); // Update package.xml files and clean if necessary - await SaveTask.run(["--targetbranch", this.retrofitTargetBranch, "--auto"]); + await SaveTask.run(['--targetbranch', this.retrofitTargetBranch || '', '--auto']); if (this.push) { await this.pushChanges(retrofitWorkBranch); } } } else { - uxLog(this, c.yellow("No changes to commit")); + uxLog("warning", this, c.yellow('No changes to commit')); // Delete locally created branch if we are within CI process if (isCI) { - uxLog(this, c.yellow("Deleting local retrofit branch...")); + uxLog("warning", this, c.yellow('Deleting local retrofit branch...')); await git().branch([`-D ${retrofitWorkBranch}`]); } } } // Commit all changes or only updated files - async commitChanges() { - if (this.commitMode === "updated") { - uxLog(this, c.cyan("Stage and commit only updated files... ")); - await git().add(["--update"]); - await this.doCommit(); - uxLog(this, c.cyan("Removing created files... ")); - await git().reset(["--hard"]); + async commitChanges(flags) { + if (this.commitMode === 'updated') { + uxLog("action", this, c.cyan('Stage and commit only updated files... ')); + await git().add(['--update']); + await this.doCommit(flags); + uxLog("action", this, c.cyan('Removing created files... ')); + await git().reset(['--hard']); await git().clean([CleanOptions.FORCE, CleanOptions.RECURSIVE]); } else { - uxLog(this, c.cyan("Stage and commit all files... ")); - await git().add(["--all"]); - await this.doCommit(); + uxLog("action", this, c.cyan('Stage and commit all files... ')); + await git().add(['--all']); + await this.doCommit(flags); } } - async doCommit() { - await git().commit(`[sfdx-hardis] Changes retrofited from ${this.org.getUsername()}`); + async doCommit(flags) { + await git().commit(`[sfdx-hardis] Changes retrofited from ${flags['target-org'].getUsername()}`); } // Push changes and add merge request options if requested async pushChanges(retrofitWorkBranch: string) { const origin = `https://root:${process.env.CI_TOKEN}@${process.env.CI_SERVER_HOST}/${process.env.CI_PROJECT_PATH}.git`; - const pushOptions = []; - if (this.pushMode === "mergerequest") { + const pushOptions: any[] = []; + if (this.pushMode === 'mergerequest') { const mrOptions = [ - "-o merge_request.create", + '-o merge_request.create', `-o merge_request.target ${this.retrofitTargetBranch}`, `-o merge_request.title='[sfdx-hardis][RETROFIT] Created by pipeline #${process.env.CI_PIPELINE_ID}'`, - "-o merge_request.merge_when_pipeline_succeeds", - "-o merge_request.remove_source_branch", + '-o merge_request.merge_when_pipeline_succeeds', + '-o merge_request.remove_source_branch', ]; pushOptions.push(...mrOptions); } - const pushResult = await execCommand(`git push ${origin} ${retrofitWorkBranch} ${pushOptions.join(" ")}`, this, { + const pushResult = await execCommand(`git push ${origin} ${retrofitWorkBranch} ${pushOptions.join(' ')}`, this, { fail: true, debug: this.debugMode, output: true, }); - uxLog(this, c.yellow(JSON.stringify(pushResult))); + uxLog("warning", this, c.yellow(JSON.stringify(pushResult))); } async setDefaultGitConfig() { // Just do that in CI, because this config should already exist in local if (isCI) { // either use values from variables from CI or use predefined variables from gitlab - const USERNAME = process.env.CI_USER_NAME || process.env.GITLAB_USER_NAME; - const EMAIL = process.env.CI_USER_EMAIL || process.env.GITLAB_USER_EMAIL; - await git().addConfig("user.name", USERNAME, false, "local"); - await git().addConfig("user.email", EMAIL, false, "local"); + const USERNAME = process.env.CI_USER_NAME || process.env.GITLAB_USER_NAME || ''; + const EMAIL = process.env.CI_USER_EMAIL || process.env.GITLAB_USER_EMAIL || ''; + await git().addConfig('user.name', USERNAME, false, 'local'); + await git().addConfig('user.email', EMAIL, false, 'local'); } } - async retrieveSources() { - uxLog(this, c.cyan(`Retrieving sources from ${c.green(this.org.getUsername())} ...`)); + async retrieveSources(flags) { + uxLog("action", this, c.cyan(`Retrieving sources from ${c.green(flags['target-org'].getUsername())} ...`)); const RETROFIT_MDT: Array = process.env.CI_SOURCES_TO_RETROFIT || this.configInfo.sourcesToRetrofit || Retrofit.DEFAULT_SOURCES_TO_RETROFIT; - const retrieveCommand = `sfdx force:source:retrieve -m "${RETROFIT_MDT.join(",")}" -u ${this.org.getUsername()}`; + const retrieveCommand = `sf project retrieve start -m "${RETROFIT_MDT.join(',')}" -o ${flags[ + 'target-org' + ].getUsername()}`; await execCommand(retrieveCommand, this, { fail: true, debug: this.debugMode, output: true }); // Discard ignored changes await this.discardIgnoredChanges(); // Clean sources - await CleanReferences.run(["--type", "all"]); + await CleanReferences.run(['--type', 'all']); await CleanXml.run([]); // display current changes to commit @@ -277,13 +281,17 @@ export default class Retrofit extends SfdxCommand { // Discard ignored changes from retrofitIgnoredFiles async discardIgnoredChanges() { - const config = await getConfig("branch"); + const config = await getConfig('branch'); const ignoredFiles = config.retrofitIgnoredFiles || []; if (ignoredFiles.length > 0) { - uxLog(this, c.cyan(`Discarding ignored changes from .sfdx-hardis.yml ${c.bold("retrofitIgnoredFiles")} property...`)); + uxLog( + "action", + this, + c.cyan(`Discarding ignored changes from .sfdx-hardis.yml ${c.bold('retrofitIgnoredFiles')} property...`) + ); for (const ignoredFile of ignoredFiles) { // Reset file state - await git().checkout(["--", ignoredFile]); + await git().checkout(['--', ignoredFile]); } } } diff --git a/src/commands/hardis/org/select.ts b/src/commands/hardis/org/select.ts index 58f9ef0b6..73d78bb44 100644 --- a/src/commands/hardis/org/select.ts +++ b/src/commands/hardis/org/select.ts @@ -1,70 +1,140 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { promptOrg } from "../../../common/utils/orgUtils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from "chalk"; +import { makeSureOrgIsConnected, promptOrg } from '../../../common/utils/orgUtils.js'; +import { execSfdxJson, uxLog } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class OrgSelect extends SfCommand { + public static title = 'Select org'; -export default class OrgSelect extends SfdxCommand { - public static title = "Select org"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("selectOrg"); +**Allows you to select a Salesforce org and set it as your default, optionally filtering by Dev Hub or scratch orgs.** - public static examples = ["$ sfdx hardis:org:select"]; +This command simplifies switching between different Salesforce environments. It presents an interactive list of your authenticated orgs, enabling you to quickly set a new default org for subsequent Salesforce CLI commands. + +Key functionalities: + +- **Interactive Org Selection:** Displays a list of your authenticated Salesforce orgs, allowing you to choose one. +- **Default Org Setting:** Sets the selected org as the default for your Salesforce CLI environment. +- **Dev Hub Filtering:** The \`--devhub\` flag filters the list to show only Dev Hub orgs. +- **Scratch Org Filtering:** The \`--scratch\` flag filters the list to show only scratch orgs related to your default Dev Hub. +- **Connection Verification:** Ensures that the selected org is connected and prompts for re-authentication if necessary. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Org Prompt:** Uses the \`promptOrg\` utility to display a list of available Salesforce orgs and allows the user to select one. It passes the \`devHub\` and \`scratch\` flags to \`promptOrg\` to filter the displayed list. +- **Default Org Configuration:** The \`promptOrg\` utility (internally) handles setting the selected org as the default using Salesforce CLI's configuration mechanisms. +- **Connection Check:** It calls \`makeSureOrgIsConnected\` to verify the connection status of the selected org and guides the user to re-authenticate if the org is not connected. +- **Salesforce CLI Integration:** It leverages Salesforce CLI's underlying commands for org listing and authentication. +
+`; + + public static examples = ['$ sf hardis:org:select']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - devhub: flags.boolean({ - char: "h", + public static flags: any = { + devhub: Flags.boolean({ + char: 'h', + default: false, + description: messages.getMessage('withDevHub'), + }), + scratch: Flags.boolean({ + char: 's', default: false, - description: messages.getMessage("withDevHub"), + description: 'Select scratch org related to default DevHub', }), - scratch: flags.boolean({ - char: "s", + username: Flags.string({ + char: 't', + description: "Username of the org you want to authenticate (overrides the interactive prompt)", + },), + "prompt-default": Flags.boolean({ + char: 'e', default: false, - description: "Select scratch org related to default DevHub", + description: 'Prompt to set the selected org as default', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - const devHub = this.flags.devhub || false; - const scratch = this.flags.scratch; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(OrgSelect); + const devHub = flags.devhub || false; + const scratch = flags.scratch; + const promptDefault = flags["prompt-default"] || false; + const username = flags.username; + this.debugMode = flags.debug || false; - const org = await promptOrg(this, { devHub: devHub, setDefault: true, scratch: scratch }); + let setDefault = true; + if (promptDefault) { + const promptDefaultRes = await prompts({ + type: 'confirm', + name: 'setDefault', + message: 'Do you want to set the selected org as your default org?', + description: "If you choose 'No', the org will be connected but not set as default.", + default: true, + }); + if (!promptDefaultRes) { + setDefault = false; + } + } - // Return an object to be displayed with --json - return { outputString: `Selected org ${org.username}` }; + let org: any = {}; + if (username) { + uxLog("action", this, c.cyan(`Getting info about ${username} ...`)); + const displayOrgCommand = `sf org display --target-org ${username}`; + const displayResult = await execSfdxJson(displayOrgCommand, this, { + fail: false, + output: false, + }); + org = displayResult?.result; + } + else { + // Prompt user to select an org + org = await promptOrg(this, { devHub: devHub, setDefault: setDefault, scratch: scratch, useCache: false }); + } + // If the org is not connected, ask the user to authenticate again + uxLog("action", this, c.cyan(`Checking that user ${org.username} is connected to org ${org.instanceUrl} ...`)); + await makeSureOrgIsConnected(org.username); + if (setDefault) { + const setDefaultCommand = `sf config set target-org ${org.username}`; + await execSfdxJson(setDefaultCommand, this, { output: false }); + uxLog("action", this, c.cyan(`Your default org is now ${org.instanceUrl} (${org.username})`)); + WebSocketClient.sendRefreshStatusMessage(); + return { outputString: `Selected org ${org.username}` }; + } + else { + uxLog("action", this, c.cyan(`Org ${org.instanceUrl} (${org.username}) connected`)); + WebSocketClient.sendRefreshStatusMessage(); + return { outputString: `Connected org ${org.username}` }; + } } } diff --git a/src/commands/hardis/org/test/apex.ts b/src/commands/hardis/org/test/apex.ts index fa1a2dc85..105e7e271 100644 --- a/src/commands/hardis/org/test/apex.ts +++ b/src/commands/hardis/org/test/apex.ts @@ -1,23 +1,21 @@ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { execCommand, extractRegexMatchesMultipleGroups, uxLog } from "../../../../common/utils"; -import { getNotificationButtons, getOrgMarkdown } from "../../../../common/utils/notifUtils"; -import { getConfig, getReportDirectory } from "../../../../config"; -import { NotifProvider, NotifSeverity } from "../../../../common/notifProvider"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { execCommand, extractRegexMatchesMultipleGroups, uxLog } from '../../../../common/utils/index.js'; +import { getNotificationButtons, getOrgMarkdown } from '../../../../common/utils/notifUtils.js'; +import { CONSTANTS, getConfig, getReportDirectory } from '../../../../config/index.js'; +import { NotifProvider, NotifSeverity } from '../../../../common/notifProvider/index.js'; +import { generateApexCoverageOutputFile } from '../../../../common/utils/deployUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class OrgTestApex extends SfdxCommand { - public static title = "Run apex tests"; +export default class OrgTestApex extends SfCommand { + public static title = 'Run apex tests'; public static description = `Run apex tests in Salesforce org @@ -28,37 +26,32 @@ If following configuration is defined, it will fail if apex coverage target is n You can override env var SFDX_TEST_WAIT_MINUTES to wait more than 60 minutes. -This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.com/salesforce-monitoring-apex-tests/) and can output Grafana, Slack and MsTeams Notifications. +This command is part of [sfdx-hardis Monitoring](${CONSTANTS.DOC_URL_ROOT}/salesforce-monitoring-apex-tests/) and can output Grafana, Slack and MsTeams Notifications. `; - public static examples = ["$ sfdx hardis:org:test:apex"]; + public static examples = ['$ sf hardis:org:test:apex']; - protected static flagsConfig = { - testlevel: flags.enum({ - char: "l", - default: "RunLocalTests", - options: ["NoTestRun", "RunSpecifiedTests", "RunLocalTests", "RunAllTestsInOrg"], - description: messages.getMessage("testLevel"), + public static flags: any = { + testlevel: Flags.string({ + char: 'l', + default: 'RunLocalTests', + options: ['NoTestRun', 'RunSpecifiedTests', 'RunLocalTests', 'RunAllTestsInOrg'], + description: messages.getMessage('testLevel'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default // protected static requiresProject = true; @@ -68,44 +61,51 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co protected statusMessage: string; protected coverageTarget = 75.0; protected coverageValue = 0.0; - protected failingTestClasses = []; - private notifSeverity: NotifSeverity = "log"; + protected failingTestClasses: any[] = []; + private notifSeverity: NotifSeverity = 'log'; private notifText: string; - private notifAttachments = []; - private notifAttachedFiles = []; - private orgMarkdown = ""; - private notifButtons = []; + private notifAttachments: any = []; + private notifAttachedFiles: any = []; + private orgMarkdown = ''; + private notifButtons: any[] = []; /* jscpd:ignore-start */ public async run(): Promise { - const check = this.flags.check || false; - const testlevel = this.flags.testlevel || "RunLocalTests"; - const debugMode = this.flags.debug || false; + const { flags } = await this.parse(OrgTestApex); + const testlevel = flags.testlevel || 'RunLocalTests'; + const debugMode = flags.debug || false; - this.configInfo = await getConfig("branch"); - this.orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl); + this.configInfo = await getConfig('branch'); + const orgInstanceUrl = flags['target-org']?.getConnection()?.instanceUrl || ''; + this.orgMarkdown = await getOrgMarkdown(orgInstanceUrl); this.notifButtons = await getNotificationButtons(); /* jscpd:ignore-end */ - await this.runApexTests(testlevel, check, debugMode); + uxLog("action", this, c.cyan(`Running Apex tests in org ${orgInstanceUrl} with test level: ${testlevel}`)); + await this.runApexTests(testlevel, debugMode, flags['target-org']?.getUsername()); // No Apex - if (this.testRunOutcome === "NoApex") { - this.notifSeverity = "log"; - this.statusMessage = "No Apex found in the org"; + if (this.testRunOutcome === 'NoApex') { + this.notifSeverity = 'log'; + this.statusMessage = 'No Apex found in the org'; this.notifText = `No Apex found in org ${this.orgMarkdown}`; + uxLog("log", this, c.grey(this.statusMessage)); } // Failed tests - else if (this.testRunOutcome === "Failed") { + else if (this.testRunOutcome === 'Failed') { await this.processApexTestsFailure(); } + else if (this.testRunOutcome === 'Passed') { + uxLog("success", this, c.green(`Apex tests passed (${this.testRunOutcome})`)); + } // Get test coverage (and fail if not reached) await this.checkOrgWideCoverage(); await this.checkTestRunCoverage(); - uxLog(this, `Apex coverage: ${this.coverageValue}% (target: ${this.coverageTarget}%)`); + // uxLog("log", this, c.grey(this.statusMessage)); + uxLog("other", this, `Apex coverage: ${this.coverageValue}% (target: ${this.coverageTarget}%)`); - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "APEX_TESTS", + await setConnectionVariables(flags['target-org']?.getConnection());// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'APEX_TESTS', text: this.notifText, attachments: this.notifAttachments, buttons: this.notifButtons, @@ -124,28 +124,28 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co }); // Handle output message & exit code - if (this.notifSeverity === "error") { + if (this.notifSeverity === 'error') { process.exitCode = 1; - uxLog(this, c.red(this.statusMessage)); + uxLog("error", this, c.red(this.statusMessage)); } else { - uxLog(this, c.green(this.statusMessage)); + uxLog("success", this, c.green(this.statusMessage)); } - return { orgId: this.org.getOrgId(), outputString: this.statusMessage, statusCode: process.exitCode }; + return { orgId: flags['target-org'].getOrgId(), outputString: this.statusMessage, statusCode: process.exitCode }; } - private async runApexTests(testlevel: any, check: any, debugMode: any) { + private async runApexTests(testlevel: any, debugMode: any, orgUsername: string | null) { // Run tests with SFDX commands const reportDir = await getReportDirectory(); const testCommand = - "sfdx force:apex:test:run" + - " --codecoverage" + - " --resultformat human" + - ` --outputdir ${reportDir}` + - ` --wait ${process.env.SFDX_TEST_WAIT_MINUTES || "60"}` + - ` --testlevel ${testlevel}` + - (check ? " --checkonly" : "") + - (debugMode ? " --verbose" : ""); + 'sf apex run test' + + ' --code-coverage' + + ' --result-format human' + + ` --output-dir ${reportDir}` + + ` --wait ${process.env.SFDX_TEST_WAIT_MINUTES || '60'}` + + ` --test-level ${testlevel}` + + (orgUsername ? ` --target-org ${orgUsername}` : '') + + (debugMode ? ' --verbose' : ''); try { const execCommandRes = await execCommand(testCommand, this, { output: true, @@ -153,37 +153,36 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co fail: true, }); // Parse outcome value from logs with Regex - this.testRunOutcome = /Outcome *(.*) */.exec(execCommandRes.stdout + execCommandRes.stderr)[1].trim(); + this.testRunOutcome = (/Outcome *(.*) */.exec(execCommandRes.stdout + execCommandRes.stderr) || '')[1].trim(); this.testRunOutputString = execCommandRes.stdout + execCommandRes.stderr; + await generateApexCoverageOutputFile(); } catch (e) { // No Apex in the org if ( - e.message.includes("Toujours fournir une propriété classes, suites, tests ou testLevel") || - e.message.includes("Always provide a classes, suites, tests, or testLevel property") + (e as Error).message.includes('Toujours fournir une propriété classes, suites, tests ou testLevel') || + (e as Error).message.includes('Always provide a classes, suites, tests, or testLevel property') ) { - this.testRunOutcome = "NoApex"; + this.testRunOutcome = 'NoApex'; } else { // Failing Apex tests - this.testRunOutputString = e.message; - this.testRunOutcome = "Failed"; + this.testRunOutputString = (e as Error).message; + this.testRunOutcome = 'Failed'; + await generateApexCoverageOutputFile(); } } } private async processApexTestsFailure() { - this.notifSeverity = "error"; - this.statusMessage = `Org apex tests failure (Outcome: ${this.testRunOutcome})`; - this.notifText = `Org apex tests failure in org ${this.orgMarkdown} (Outcome: ${this.testRunOutcome})`; + this.notifSeverity = 'error'; const reportDir = await getReportDirectory(); // Parse log from external file - const sfReportFile = path.join(reportDir, "/test-result.txt"); + const sfReportFile = path.join(reportDir, '/test-result.txt'); if (fs.existsSync(sfReportFile)) { this.notifAttachedFiles = [sfReportFile]; } // Parse failing test classes const failuresRegex = /(.*) Fail (.*)/gm; const regexMatches = await extractRegexMatchesMultipleGroups(failuresRegex, this.testRunOutputString); - uxLog(this, c.yellow("Failing tests:")); for (const match of regexMatches) { this.failingTestClasses.push({ name: match[1].trim(), error: match[2].trim() }); } @@ -191,81 +190,92 @@ This command is part of [sfdx-hardis Monitoring](https://sfdx-hardis.cloudity.co { text: this.failingTestClasses .map((failingTestClass) => { - return "• " + failingTestClass.name + " / " + failingTestClass.error; + return '• *' + failingTestClass.name + '*: ' + failingTestClass.error; }) - .join("\n"), + .join('\n'), }, ]; - console.table(this.failingTestClasses); + this.statusMessage = `Apex tests failed (${this.failingTestClasses.length}). (Outcome: ${this.testRunOutcome})`; + this.notifText = `Apex tests failed (${this.failingTestClasses.length}) in org ${this.orgMarkdown} (Outcome: ${this.testRunOutcome})`; + const failedTestsString = this.failingTestClasses + .map((failingTestClass) => { + return `- ${failingTestClass.name}: ${failingTestClass.error}`; + }) + .join('\n'); + uxLog("warning", this, c.yellow("Failing Apex tests:\n" + failedTestsString)); } private async checkOrgWideCoverage() { - const coverageOrgWide = parseFloat(/Org Wide Coverage *(.*)/.exec(this.testRunOutputString)[1].replace("%", "")); + const coverageOrgWide = parseFloat( + (/Org Wide Coverage *(.*)/.exec(this.testRunOutputString) || '')[1].replace('%', '') + ); const minCoverageOrgWide = parseFloat( process.env.APEX_TESTS_MIN_COVERAGE_ORG_WIDE || - process.env.APEX_TESTS_MIN_COVERAGE || - this.configInfo.apexTestsMinCoverageOrgWide || - this.configInfo.apexTestsMinCoverage || - 75.0, + process.env.APEX_TESTS_MIN_COVERAGE || + this.configInfo.apexTestsMinCoverageOrgWide || + this.configInfo.apexTestsMinCoverage || + 75.0 ); this.coverageTarget = minCoverageOrgWide; this.coverageValue = coverageOrgWide; // Do not test if tests failed - if (this.testRunOutcome !== "Passed") { + if (this.testRunOutcome !== 'Passed') { return; } // Developer tried to cheat in config ^^ if (minCoverageOrgWide < 75.0) { - this.notifSeverity = "error"; + this.notifSeverity = 'error'; this.statusMessage = `Don't try to cheat with configuration: Minimum org wide coverage must be 75% ;)`; this.notifText = this.statusMessage; } // Min coverage not reached else if (coverageOrgWide < minCoverageOrgWide) { - this.notifSeverity = "error"; - this.statusMessage = `Test run coverage (org wide) ${coverageOrgWide}% should be > to ${minCoverageOrgWide}%`; + this.notifSeverity = 'error'; + this.statusMessage = `Test run coverage (org wide) *${coverageOrgWide}%* should be > to ${minCoverageOrgWide}%`; this.notifText = `${this.statusMessage} in ${this.orgMarkdown}`; } // We are good ! else { - this.notifSeverity = "log"; - this.statusMessage = `Test run coverage (org wide) ${coverageOrgWide}% is > to ${minCoverageOrgWide}%`; + this.notifSeverity = 'log'; + this.statusMessage = `Test run coverage (org wide) *${coverageOrgWide}%* is > to ${minCoverageOrgWide}%`; this.notifText = `${this.statusMessage} in ${this.orgMarkdown}`; } } private async checkTestRunCoverage() { - if (this.testRunOutputString.includes("Test Run Coverage")) { + if (this.testRunOutputString.includes('Test Run Coverage')) { // const coverageTestRun = parseFloat(testRes.result.summary.testRunCoverage.replace('%', '')); - const coverageTestRun = parseFloat(/Test Run Coverage *(.*)/.exec(this.testRunOutputString)[1].replace("%", "")); + const coverageTestRun = parseFloat( + (/Test Run Coverage *(.*)/.exec(this.testRunOutputString) || '')[1].replace('%', '') + ); const minCoverageTestRun = parseFloat( process.env.APEX_TESTS_MIN_COVERAGE_TEST_RUN || - process.env.APEX_TESTS_MIN_COVERAGE || - this.configInfo.apexTestsMinCoverage || - this.coverageTarget, + process.env.APEX_TESTS_MIN_COVERAGE || + this.configInfo.apexTestsMinCoverage || + this.coverageTarget ); this.coverageTarget = minCoverageTestRun; this.coverageValue = coverageTestRun; // Do not test if tests failed - if (this.testRunOutcome !== "Passed") { + if (this.testRunOutcome !== 'Passed') { return; } // Developer tried to cheat in config ^^ if (minCoverageTestRun < 75.0) { - this.notifSeverity = "error"; + this.notifSeverity = 'error'; this.statusMessage = `Don't try to cheat with configuration: Minimum test run coverage must be 75% ;)`; this.notifText = this.statusMessage; } // Min coverage not reached else if (coverageTestRun < minCoverageTestRun) { - this.notifSeverity = "error"; - this.statusMessage = `Test run coverage ${coverageTestRun}% should be > to ${minCoverageTestRun}%`; + this.notifSeverity = 'error'; + this.statusMessage = `Test run coverage *${coverageTestRun}%* should be > to ${minCoverageTestRun}%`; this.notifText = `${this.statusMessage} in ${this.orgMarkdown}`; } // We are good ! else { - this.notifSeverity = "log"; - this.statusMessage = `Test run coverage ${coverageTestRun}% is > to ${minCoverageTestRun}%`; + this.notifSeverity = 'log'; + this.statusMessage = `Test run coverage *${coverageTestRun}%* is > to ${minCoverageTestRun}%`; this.notifText = `${this.statusMessage} in ${this.orgMarkdown}`; } } diff --git a/src/commands/hardis/org/user/activateinvalid.ts b/src/commands/hardis/org/user/activateinvalid.ts index 76e680b88..f2171ffc7 100644 --- a/src/commands/hardis/org/user/activateinvalid.ts +++ b/src/commands/hardis/org/user/activateinvalid.ts @@ -1,24 +1,19 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as columnify from "columnify"; -import * as sortArray from "sort-array"; -import { isCI, uxLog } from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; -import { bulkQuery, bulkUpdate, soqlQuery } from "../../../../common/utils/apiUtils"; -import { promptProfiles } from "../../../../common/utils/orgUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class OrgUserActiveInvalid extends SfdxCommand { - public static title = "Reactivate sandbox invalid users"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import sortArray from 'sort-array'; +import { isCI, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { bulkQuery, bulkUpdate, soqlQuery } from '../../../../common/utils/apiUtils.js'; +import { promptProfiles } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class OrgUserActiveInvalid extends SfCommand { + public static title = 'Reactivate sandbox invalid users'; public static description = `Update sandbox users so their email is valid @@ -30,55 +25,52 @@ See article below `; public static examples = [ - `$ sfdx hardis:org:user:activateinvalid`, - `$ sfdx hardis:org:user:activateinvalid --targetusername myuser@myorg.com`, - `$ sfdx hardis:org:user:activateinvalid --profiles 'System Administrator,MyCustomProfile' --targetusername myuser@myorg.com`, + `$ sf hardis:org:user:activateinvalid`, + `$ sf hardis:org:user:activateinvalid --target-org my-user@myorg.com`, + `$ sf hardis:org:user:activateinvalid --profiles 'System Administrator,MyCustomProfile' --target-org my-user@myorg.com`, ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - profiles: flags.string({ - char: "p", - description: "Comma-separated list of profiles names that you want to reactive users assigned to and with a .invalid email", + public static flags: any = { + profiles: Flags.string({ + char: 'p', + description: + 'Comma-separated list of profiles names that you want to reactive users assigned to and with a .invalid email', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; - protected profiles = []; + protected profiles: any[] = []; protected maxUsersDisplay = 100; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - this.profiles = this.flags.profiles ? this.flags.profiles.split(",") : null; - const hasProfileConstraint = this.profiles !== null; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(OrgUserActiveInvalid); + this.profiles = flags.profiles ? flags.profiles.split(',') : []; + const hasProfileConstraint = this.profiles !== null && this.profiles.length > 0; + this.debugMode = flags.debug || false; - const conn = this.org.getConnection(); + const conn = flags['target-org'].getConnection(); // Query users that we want to freeze - uxLog(this, c.cyan(`Querying User records with email ending with .invalid...`)); + uxLog("action", this, c.cyan(`Querying User records with email ending with .invalid...`)); let userQuery = `SELECT Id,Name,Username,Email,ProfileId FROM User WHERE Email LIKE '%.invalid' and IsActive=true`; if (hasProfileConstraint) { const profilesQuery = `SELECT Id FROM Profile WHERE Name IN ('${this.profiles.join("','")}')`; @@ -92,7 +84,7 @@ See article below // Check empty result if (usersToActivate.length === 0) { const outputString = `No matching user records found with email ending with .invalid`; - uxLog(this, c.yellow(outputString)); + uxLog("warning", this, c.yellow(outputString)); return { outputString }; } @@ -100,72 +92,90 @@ See article below // Request confirmation or selection from user if (!isCI && !hasProfileConstraint) { const confirmSelect = await prompts({ - type: "select", - name: "value", + type: 'select', + name: 'value', initial: true, message: c.cyanBright( - `Do you want to replace invalid mails by valid mails for all ${c.bold(usersToActivate.length)} found users in org ${c.green( - this.org.getUsername(), - )} ?`, + `Do you want to replace invalid mails by valid mails for all ${c.bold( + usersToActivate.length + )} found users in org ${c.green(flags['target-org'].getUsername())} ?` ), + description: 'Choose whether to update email addresses for all found users or select specific ones', + placeholder: 'Select an option', choices: [ - { title: `Yes, all ${c.bold(usersToActivate.length)} users`, value: "all" }, - { title: "No, i want to manually select by profile(s)", value: "selectProfiles" }, - { title: "No, i want to manually select user(s)", value: "select" }, + { title: `Yes, all ${c.bold(usersToActivate.length)} users`, value: 'all' }, + { title: 'No, i want to manually select by profile(s)', value: 'selectProfiles' }, + { title: 'No, i want to manually select user(s)', value: 'select' }, ], }); // Let users select profiles to reactivate users - if (confirmSelect.value === "selectProfiles") { - const selectedProfileIds = await promptProfiles(this.org.getConnection(), { + if (confirmSelect.value === 'selectProfiles') { + const selectedProfileIds = await promptProfiles(flags['target-org'].getConnection(), { multiselect: true, - returnField: "Id", - message: "Please select profiles that you want to reactivate users with .invalid emails", + returnField: 'Id', + message: 'Please select profiles that you want to reactivate users with .invalid emails', }); usersToActivateFinal = usersToActivateFinal.filter((user) => selectedProfileIds.includes(user.ProfileId)); } // Let users select users to reactivate - else if (confirmSelect.value === "select") { + else if (confirmSelect.value === 'select') { const usersSorted = sortArray(usersToActivate, { - by: ["Name", "Email"], - order: ["asc"], + by: ['Name', 'Email'], + order: ['asc'], }); const selectUsers = await prompts({ - type: "multiselect", - name: "value", - message: "Please select users that you want to remove the .invalid from emails", - choices: usersSorted.map((user) => { + type: 'multiselect', + name: 'value', + message: 'Please select users that you want to remove the .invalid from emails', + description: 'Choose specific users to reactivate by removing .invalid suffix from their email addresses', + choices: usersSorted.map((user: any) => { return { title: `${user.Name} - ${user.Email}`, value: user }; }), }); usersToActivateFinal = selectUsers.value; - } else if (confirmSelect.value !== "all") { - const outputString = "Script cancelled by user"; - uxLog(this, c.yellow(outputString)); + } else if (confirmSelect.value !== 'all') { + const outputString = 'Script cancelled by user'; + uxLog("warning", this, c.yellow(outputString)); return { outputString }; } } // Process invalid users reactivation const userToActivateUpdated = usersToActivateFinal.map((user) => { - const emailReplaced = user.Email.replace(".invalid", ""); + const emailReplaced = user.Email.replace('.invalid', ''); return { Id: user.Id, Email: emailReplaced }; }); - const bulkUpdateRes = await bulkUpdate("User", "update", userToActivateUpdated, conn); - - uxLog(this, "\n" + c.white(columnify(this.debugMode ? userToActivateUpdated : userToActivateUpdated.slice(0, this.maxUsersDisplay)))); + const bulkUpdateRes = await bulkUpdate('User', 'update', userToActivateUpdated, conn); + + uxLog("action", this, c.cyan(`Results of the reactivation of ${userToActivateUpdated.length} users by removing the .invalid from their email`)); + uxLogTable( + this, + this.debugMode ? userToActivateUpdated : userToActivateUpdated.slice(0, this.maxUsersDisplay) + ); + if (!this.debugMode && userToActivateUpdated.length > this.maxUsersDisplay) { + uxLog("warning", this, c.yellow(c.italic(`(list truncated to the first ${this.maxUsersDisplay} users)`))); + } - const activateSuccessNb = bulkUpdateRes.successRecordsNb; - const activateErrorNb = bulkUpdateRes.errorRecordsNb; + const activateSuccessNb = bulkUpdateRes.successfulResults.length; + const activateErrorNb = bulkUpdateRes.failedResults.length; if (activateErrorNb > 0) { - uxLog(this, c.yellow(`Warning: ${c.red(c.bold(activateErrorNb))} users has not been reactivated (bulk API errors)`)); + uxLog( + "warning", + this, + c.yellow(`Warning: ${c.red(c.bold(activateErrorNb))} users has not been reactivated (bulk API errors)`) + ); } // Build results summary - uxLog(this, c.green(`${c.bold(activateSuccessNb)} users has been be reactivated by removing the .invalid of their email`)); + uxLog( + "success", + this, + c.green(`${c.bold(activateSuccessNb)} users has been be reactivated by removing the .invalid of their email`) + ); // Return an object to be displayed with --json return { - orgId: this.org.getOrgId(), + orgId: flags['target-org'].getOrgId(), activateSuccessNb: activateSuccessNb, activateErrorNb: activateErrorNb, outputString: `${activateSuccessNb} sandbox users has been be reactivated by removing the .invalid of their email`, diff --git a/src/commands/hardis/org/user/freeze.ts b/src/commands/hardis/org/user/freeze.ts index 1f60512ee..f9f9a82b8 100644 --- a/src/commands/hardis/org/user/freeze.ts +++ b/src/commands/hardis/org/user/freeze.ts @@ -1,76 +1,94 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as columnify from "columnify"; -import { generateReports, isCI, uxLog } from "../../../../common/utils"; -import { promptProfiles } from "../../../../common/utils/orgUtils"; -//import { executeApex } from "../../../../common/utils/deployUtils"; -import { prompts } from "../../../../common/utils/prompts"; -import { soqlQuery, bulkQuery, bulkUpdate } from "../../../../common/utils/apiUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { generateReports, isCI, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { promptProfiles } from '../../../../common/utils/orgUtils.js'; +//import { executeApex } from "../../../../common/utils/deployUtils.js"; +import { prompts } from '../../../../common/utils/prompts.js'; +import { soqlQuery, bulkQuery, bulkUpdate } from '../../../../common/utils/apiUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class OrgFreezeUser extends SfCommand { + public static title = 'Freeze user logins'; -export default class OrgFreezeUser extends SfdxCommand { - public static title = "Freeze user logins"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("orgfreezeUser"); +**Freezes Salesforce user logins, temporarily revoking access for selected users.** + +This command allows administrators to freeze Salesforce user logins. It provides a controlled way to temporarily revoke user access without deactivating the user record itself. This is useful for managing user access during leaves, security incidents, or when a user's access needs to be temporarily suspended. + +Key functionalities: + +- **User Selection:** You can select users to freeze based on their assigned profiles. + - \`--includeprofiles\`: Freeze users belonging to a comma-separated list of specified profiles. + - \`--excludeprofiles\`: Freeze users belonging to all profiles *except* those specified in a comma-separated list. + - If no profile flags are provided, an interactive menu will allow you to select profiles. +- **Interactive Confirmation:** In non-CI environments, it prompts for confirmation before freezing the selected users. +- **Bulk Freezing:** Efficiently freezes multiple user logins using Salesforce's Bulk API. +- **Reporting:** Generates CSV and XLSX reports of the users that are about to be frozen. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Bulk API):** It executes SOQL queries against the \`User\` and \`Profile\` objects to identify active users based on the provided profile filters. It then queries the \`UserLogin\` object to find active login sessions for these users. +- **Interactive Prompts:** Uses the \`prompts\` library to guide the user through profile selection and to confirm the freezing operation. +- **Bulk Update:** It constructs an array of \`UserLogin\` records with their \`Id\` and \`IsFrozen\` set to \`true\`, then uses \`bulkUpdate\` to perform the mass update operation on the Salesforce org. +- **Reporting:** It uses \`generateReports\` to create CSV and XLSX files containing details of the users to be frozen. +- **Logging:** Provides clear messages about the number of users found and the success of the freezing process. +
+`; public static examples = [ - `$ sfdx hardis:org:user:freeze`, - `$ sfdx hardis:org:user:freeze --targetusername myuser@myorg.com`, - `$ sfdx hardis:org:user:freeze --includeprofiles 'Standard'`, - `$ sfdx hardis:org:user:freeze --excludeprofiles 'System Administrator,Some Other Profile'`, + `$ sf hardis:org:user:freeze`, + `$ sf hardis:org:user:freeze --target-org my-user@myorg.com`, + `$ sf hardis:org:user:freeze --includeprofiles 'Standard'`, + `$ sf hardis:org:user:freeze --excludeprofiles 'System Administrator,Some Other Profile'`, ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { + public static flags: any = { // flag with a value (-n, --name=VALUE) - name: flags.string({ - char: "n", - description: messages.getMessage("nameFilter"), + name: Flags.string({ + char: 'n', + description: messages.getMessage('nameFilter'), }), - includeprofiles: flags.string({ - char: "p", - description: "List of profiles that you want to freeze, separated by commas", + includeprofiles: Flags.string({ + char: 'p', + description: 'List of profiles that you want to freeze, separated by commas', }), - excludeprofiles: flags.string({ - char: "e", - description: "List of profiles that you want to NOT freeze, separated by commas", + excludeprofiles: Flags.string({ + char: 'e', + description: 'List of profiles that you want to NOT freeze, separated by commas', }), - maxuserdisplay: flags.number({ - char: "m", + maxuserdisplay: Flags.integer({ + char: 'm', default: 100, - description: "Maximum users to display in logs", + description: 'Maximum users to display in logs', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected maxUsersDisplay = 100; protected debugMode = false; @@ -78,26 +96,28 @@ export default class OrgFreezeUser extends SfdxCommand { /* jscpd:ignore-end */ public async run(): Promise { - const includeProfileNames = this.flags.includeprofiles ? this.flags.includeprofiles.split(",") : []; - const excludeProfileNames = this.flags.excludeprofiles ? this.flags.excludeprofiles.split(",") : []; - this.maxUsersDisplay = this.flags.maxuserdisplay || 100; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(OrgFreezeUser); + const includeProfileNames = flags.includeprofiles ? flags.includeprofiles.split(',') : []; + const excludeProfileNames = flags.excludeprofiles ? flags.excludeprofiles.split(',') : []; + this.maxUsersDisplay = flags.maxuserdisplay || 100; + this.debugMode = flags.debug || false; - const conn = this.org.getConnection(); + const conn = flags['target-org'].getConnection(); // Select profiles that we want users to be frozen - let profileIds = []; - let profileNames = []; + let profileIds: any[] = []; + let profileNames: any[] = []; if (includeProfileNames.length === 0 && excludeProfileNames.length === 0) { // Manual user selection const profilesRes = await promptProfiles(conn, { multiselect: true, - message: "Please select profiles that you do you want to freeze users that are assigned to them ?", - returnField: "record", + message: 'Please select profiles that you do you want to freeze users that are assigned to them ?', + returnField: 'record', allowSelectMine: false, allowSelectMineErrorMessage: "If you freeze your own profile, you'll be unable to unfreeze it later :)", allowSelectAll: false, - allowSelectAllErrorMessage: "You can not select all profiles, keep at least one (usually System Administrator) so you can unfreeze later !", + allowSelectAllErrorMessage: + 'You can not select all profiles, keep at least one (usually System Administrator) so you can unfreeze later !', }); profileIds = profilesRes.map((profile) => profile.Id); profileNames = profilesRes.map((profile) => { @@ -105,11 +125,11 @@ export default class OrgFreezeUser extends SfdxCommand { }); } else if (includeProfileNames.length > 0) { // Use includeprofiles argument - const profilesConstraintIn = includeProfileNames.map((profileName) => `'${profileName}'`).join(","); + const profilesConstraintIn = includeProfileNames.map((profileName) => `'${profileName}'`).join(','); const profilesQuery = `SELECT Id,Name FROM Profile WHERE Name IN (${profilesConstraintIn})`; const profilesQueryRes = await soqlQuery(profilesQuery, conn); if (this.debugMode) { - uxLog(this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); + uxLog("log", this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); } profileIds = profilesQueryRes.records.map((profile) => profile.Id); profileNames = profilesQueryRes.records.map((profile) => { @@ -117,11 +137,11 @@ export default class OrgFreezeUser extends SfdxCommand { }); } else if (excludeProfileNames.length > 0) { // Use excludeprofiles argument - const profilesConstraintIn = excludeProfileNames.map((profileName) => `'${profileName}'`).join(","); + const profilesConstraintIn = excludeProfileNames.map((profileName) => `'${profileName}'`).join(','); const profilesQuery = `SELECT Id,Name FROM Profile WHERE Name NOT IN (${profilesConstraintIn})`; const profilesQueryRes = await soqlQuery(profilesQuery, conn); if (this.debugMode) { - uxLog(this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); + uxLog("log", this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); } profileIds = profilesQueryRes.records.map((profile) => profile.Id); profileNames = profilesQueryRes.records.map((profile) => { @@ -130,24 +150,24 @@ export default class OrgFreezeUser extends SfdxCommand { } // List profiles that must be frozen - const profileIdsStr = profileIds.map((profileId) => `'${profileId}'`).join(","); + const profileIdsStr = profileIds.map((profileId) => `'${profileId}'`).join(','); // Query users that we want to freeze - uxLog(this, c.cyan(`Querying User records matching ${c.bold(profileIds.length)} profiles...`)); + uxLog("action", this, c.cyan(`Querying User records matching ${c.bold(profileIds.length)} profiles...`)); const userQuery = `SELECT Id,Name,Username,ProfileId FROM User WHERE ProfileId IN (${profileIdsStr}) and IsActive=true`; const userQueryRes = await bulkQuery(userQuery, conn); const usersToFreeze = userQueryRes.records; - const userIdsStr = usersToFreeze.map((user) => `'${user.Id}'`).join(","); + const userIdsStr = usersToFreeze.map((user) => `'${user.Id}'`).join(','); // Check empty result if (usersToFreeze.length === 0) { const outputString = `No matching user records found with defined profile constraints`; - uxLog(this, c.yellow(outputString)); + uxLog("warning", this, c.yellow(outputString)); return { outputString }; } // Query related UserLogin records - uxLog(this, c.cyan(`Querying UserLogin records matching ${c.bold(usersToFreeze.length)} users...`)); + uxLog("action", this, c.cyan(`Querying UserLogin records matching ${c.bold(usersToFreeze.length)} users...`)); const userLoginQuery = `SELECT Id,UserId,IsFrozen FROM UserLogin WHERE UserId IN (${userIdsStr}) and IsFrozen=false`; const userLoginQueryRes = await bulkQuery(userLoginQuery, conn); const userLoginsToFreeze = userLoginQueryRes.records; @@ -161,30 +181,37 @@ export default class OrgFreezeUser extends SfdxCommand { Profile: profileNames.filter((profile) => profile[0] === matchingUser.ProfileId)[1], }; }); - uxLog(this, "\n" + c.white(columnify(this.debugMode ? usersToFreezeDisplay : usersToFreezeDisplay.slice(0, this.maxUsersDisplay)))); - if (!this.debugMode === false && usersToFreezeDisplay.length > this.maxUsersDisplay) { - uxLog(this, c.yellow(c.italic(`(list truncated to the first ${this.maxUsersDisplay} users)`))); + uxLog("action", this, c.cyan(`List of ${userLoginsToFreeze.length} users that will be frozen:`)); + uxLogTable( + this, + this.debugMode ? usersToFreezeDisplay : usersToFreezeDisplay.slice(0, this.maxUsersDisplay) + ); + if (!this.debugMode && usersToFreezeDisplay.length > this.maxUsersDisplay) { + uxLog("warning", this, c.yellow(c.italic(`(list truncated to the first ${this.maxUsersDisplay} users)`))); } - uxLog(this, c.cyan(`${c.bold(userLoginsToFreeze.length)} users can be frozen.`)); + // Generate csv + xls of users about to be frozen - await generateReports(usersToFreezeDisplay, ["Username", "Name", "Profile"], this, { - logFileName: "users-to-freeze", - logLabel: "Extract of users to freeze", + await generateReports(usersToFreezeDisplay, ['Username', 'Name', 'Profile'], this, { + logFileName: 'users-to-freeze', + logLabel: 'Extract of users to freeze', }); // Request configuration from user if (!isCI) { const confirmfreeze = await prompts({ - type: "confirm", - name: "value", + type: 'confirm', + name: 'value', initial: true, message: c.cyanBright( - `Are you sure you want to freeze these ${c.bold(userLoginsToFreeze.length)} users in org ${c.green(this.org.getUsername())} (y/n)?`, + `Are you sure you want to freeze these ${c.bold(userLoginsToFreeze.length)} users in org ${c.green( + flags['target-org'].getUsername() + )} ?` ), + description: 'Confirm freezing selected users, which will deactivate their accounts in the Salesforce org', }); if (confirmfreeze.value !== true) { - const outputString = "Script cancelled by user"; - uxLog(this, c.yellow(outputString)); + const outputString = 'Script cancelled by user'; + uxLog("warning", this, c.yellow(outputString)); return { outputString }; } } @@ -193,20 +220,20 @@ export default class OrgFreezeUser extends SfdxCommand { const userLoginsFrozen = userLoginsToFreeze.map((userLogin) => { return { Id: userLogin.Id, IsFrozen: true }; }); - const bulkUpdateRes = await bulkUpdate("UserLogin", "update", userLoginsFrozen, conn); + const bulkUpdateRes = await bulkUpdate('UserLogin', 'update', userLoginsFrozen, conn); - const freezeSuccessNb = bulkUpdateRes.successRecordsNb; - const freezeErrorsNb = bulkUpdateRes.errorRecordsNb; + const freezeSuccessNb = bulkUpdateRes.successfulResults.length; + const freezeErrorsNb = bulkUpdateRes.failedResults.length; if (freezeErrorsNb > 0) { - uxLog(this, c.yellow(`Warning: ${c.red(c.bold(freezeErrorsNb))} users has not been frozen (bulk API errors)`)); + uxLog("warning", this, c.yellow(`Warning: ${c.red(c.bold(freezeErrorsNb))} users has not been frozen (bulk API errors)`)); } // Build results summary - uxLog(this, c.green(`${c.bold(freezeSuccessNb)} users has been be frozen.`)); + uxLog("success", this, c.green(`${c.bold(freezeSuccessNb)} users has been be frozen.`)); // Return an object to be displayed with --json return { - orgId: this.org.getOrgId(), + orgId: flags['target-org'].getOrgId(), freezeSuccess: freezeSuccessNb, freezeErrors: freezeErrorsNb, outputString: `${freezeSuccessNb} users has been be frozen`, diff --git a/src/commands/hardis/org/user/unfreeze.ts b/src/commands/hardis/org/user/unfreeze.ts index 170c941f3..4ac725646 100644 --- a/src/commands/hardis/org/user/unfreeze.ts +++ b/src/commands/hardis/org/user/unfreeze.ts @@ -1,76 +1,94 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as columnify from "columnify"; -import { generateReports, isCI, uxLog } from "../../../../common/utils"; -import { promptProfiles } from "../../../../common/utils/orgUtils"; -//import { executeApex } from "../../../../common/utils/deployUtils"; -import { prompts } from "../../../../common/utils/prompts"; -import { soqlQuery, bulkQuery, bulkUpdate } from "../../../../common/utils/apiUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { generateReports, isCI, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { promptProfiles } from '../../../../common/utils/orgUtils.js'; +//import { executeApex } from "../../../../common/utils/deployUtils.js"; +import { prompts } from '../../../../common/utils/prompts.js'; +import { soqlQuery, bulkQuery, bulkUpdate } from '../../../../common/utils/apiUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class OrgUnfreezeUser extends SfCommand { + public static title = 'Unfreeze user logins'; -export default class OrgUnfreezeUser extends SfdxCommand { - public static title = "Unfreeze user logins"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("orgUnfreezeUser"); +**Unfreezes Salesforce user logins, restoring access for selected users.** + +This command allows administrators to unfreeze Salesforce user logins, reactivating their access to the Salesforce org. This is the counterpart to the \`freeze\` command and is used to restore access after a temporary suspension. + +Key functionalities: + +- **User Selection:** You can select users to unfreeze based on their assigned profiles. + - \`--includeprofiles\`: Unfreeze users belonging to a comma-separated list of specified profiles. + - \`--excludeprofiles\`: Unfreeze users belonging to all profiles *except* those specified in a comma-separated list. + - If no profile flags are provided, an interactive menu will allow you to select profiles. +- **Interactive Confirmation:** In non-CI environments, it prompts for confirmation before unfreezing the selected users. +- **Bulk Unfreezing:** Efficiently unfreezes multiple user logins using Salesforce's Bulk API. +- **Reporting:** Generates CSV and XLSX reports of the users that are about to be unfrozen. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Bulk API):** It executes SOQL queries against the \`User\` and \`Profile\` objects to identify active users based on the provided profile filters. It then queries the \`UserLogin\` object to find frozen login sessions for these users. +- **Interactive Prompts:** Uses the \`prompts\` library to guide the user through profile selection and to confirm the unfreezing operation. +- **Bulk Update:** It constructs an array of \`UserLogin\` records with their \`Id\` and \`IsFrozen\` set to \`false\`, then uses \`bulkUpdate\` to perform the mass update operation on the Salesforce org. +- **Reporting:** It uses \`generateReports\` to create CSV and XLSX files containing details of the users to be unfrozen. +- **Logging:** Provides clear messages about the number of users found and the success of the unfreezing process. +
+`; public static examples = [ - `$ sfdx hardis:org:user:unfreeze`, - `$ sfdx hardis:org:user:unfreeze --targetusername myuser@myorg.com`, - `$ sfdx hardis:org:user:unfreeze --includeprofiles 'Standard'`, - `$ sfdx hardis:org:user:unfreeze --excludeprofiles 'System Administrator,Some Other Profile'`, + `$ sf hardis:org:user:unfreeze`, + `$ sf hardis:org:user:unfreeze --target-org my-user@myorg.com`, + `$ sf hardis:org:user:unfreeze --includeprofiles 'Standard'`, + `$ sf hardis:org:user:unfreeze --excludeprofiles 'System Administrator,Some Other Profile'`, ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { + public static flags: any = { // flag with a value (-n, --name=VALUE) - name: flags.string({ - char: "n", - description: messages.getMessage("nameFilter"), + name: Flags.string({ + char: 'n', + description: messages.getMessage('nameFilter'), }), - includeprofiles: flags.string({ - char: "p", - description: "List of profiles that you want to unfreeze, separated by commas", + includeprofiles: Flags.string({ + char: 'p', + description: 'List of profiles that you want to unfreeze, separated by commas', }), - excludeprofiles: flags.string({ - char: "e", - description: "List of profiles that you want to NOT unfreeze, separated by commas", + excludeprofiles: Flags.string({ + char: 'e', + description: 'List of profiles that you want to NOT unfreeze, separated by commas', }), - maxuserdisplay: flags.number({ - char: "m", + maxuserdisplay: Flags.integer({ + char: 'm', default: 100, - description: "Maximum users to display in logs", + description: 'Maximum users to display in logs', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected maxUsersDisplay = 100; protected debugMode = false; @@ -78,22 +96,23 @@ export default class OrgUnfreezeUser extends SfdxCommand { /* jscpd:ignore-end */ public async run(): Promise { - const includeProfileNames = this.flags.includeprofiles ? this.flags.includeprofiles.split(",") : []; - const excludeProfileNames = this.flags.excludeprofiles ? this.flags.excludeprofiles.split(",") : []; - this.maxUsersDisplay = this.flags.maxuserdisplay || 100; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(OrgUnfreezeUser); + const includeProfileNames = flags.includeprofiles ? flags.includeprofiles.split(',') : []; + const excludeProfileNames = flags.excludeprofiles ? flags.excludeprofiles.split(',') : []; + this.maxUsersDisplay = flags.maxuserdisplay || 100; + this.debugMode = flags.debug || false; - const conn = this.org.getConnection(); + const conn = flags['target-org'].getConnection(); // Select profiles that we want users to be unfrozen - let profileIds = []; - let profileNames = []; + let profileIds: any[] = []; + let profileNames: any[] = []; if (includeProfileNames.length === 0 && excludeProfileNames.length === 0) { // Manual user selection const profilesRes = await promptProfiles(conn, { multiselect: true, - message: "Please select profiles that you do you want to unfreeze users that are assigned to them ?", - returnField: "record", + message: 'Please select profiles that you do you want to unfreeze users that are assigned to them ?', + returnField: 'record', }); profileIds = profilesRes.map((profile) => profile.Id); profileNames = profilesRes.map((profile) => { @@ -101,11 +120,11 @@ export default class OrgUnfreezeUser extends SfdxCommand { }); } else if (includeProfileNames.length > 0) { // Use includeprofiles argument - const profilesConstraintIn = includeProfileNames.map((profileName) => `'${profileName}'`).join(","); + const profilesConstraintIn = includeProfileNames.map((profileName) => `'${profileName}'`).join(','); const profilesQuery = `SELECT Id,Name FROM Profile WHERE Name IN (${profilesConstraintIn})`; const profilesQueryRes = await soqlQuery(profilesQuery, conn); if (this.debugMode) { - uxLog(this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); + uxLog("log", this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); } profileIds = profilesQueryRes.records.map((profile) => profile.Id); profileNames = profilesQueryRes.records.map((profile) => { @@ -113,11 +132,11 @@ export default class OrgUnfreezeUser extends SfdxCommand { }); } else if (excludeProfileNames.length > 0) { // Use excludeprofiles argument - const profilesConstraintIn = excludeProfileNames.map((profileName) => `'${profileName}'`).join(","); + const profilesConstraintIn = excludeProfileNames.map((profileName) => `'${profileName}'`).join(','); const profilesQuery = `SELECT Id,Name FROM Profile WHERE Name NOT IN (${profilesConstraintIn})`; const profilesQueryRes = await soqlQuery(profilesQuery, conn); if (this.debugMode) { - uxLog(this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); + uxLog("log", this, c.grey(`Query result:\n${JSON.stringify(profilesQueryRes, null, 2)}`)); } profileIds = profilesQueryRes.records.map((profile) => profile.Id); profileNames = profilesQueryRes.records.map((profile) => { @@ -126,24 +145,24 @@ export default class OrgUnfreezeUser extends SfdxCommand { } // List profiles that must be unfrozen - const profileIdsStr = profileIds.map((profileId) => `'${profileId}'`).join(","); + const profileIdsStr = profileIds.map((profileId) => `'${profileId}'`).join(','); // Query users that we want to unfreeze - uxLog(this, c.cyan(`Querying User records matching ${c.bold(profileIds.length)} profiles...`)); + uxLog("action", this, c.cyan(`Querying User records matching ${c.bold(profileIds.length)} profiles...`)); const userQuery = `SELECT Id,Name,Username,ProfileId FROM User WHERE ProfileId IN (${profileIdsStr}) and IsActive=true`; const userQueryRes = await bulkQuery(userQuery, conn); const usersToUnfreeze = userQueryRes.records; - const userIdsStr = usersToUnfreeze.map((user) => `'${user.Id}'`).join(","); + const userIdsStr = usersToUnfreeze.map((user) => `'${user.Id}'`).join(','); // Check empty result if (usersToUnfreeze.length === 0) { const outputString = `No matching user records found with defined profile constraints`; - uxLog(this, c.yellow(outputString)); + uxLog("warning", this, c.yellow(outputString)); return { outputString }; } // Query related UserLogin records - uxLog(this, c.cyan(`Querying UserLogin records matching ${c.bold(usersToUnfreeze.length)} users...`)); + uxLog("action", this, c.cyan(`Querying UserLogin records matching ${c.bold(usersToUnfreeze.length)} users...`)); const userLoginQuery = `SELECT Id,UserId,IsFrozen FROM UserLogin WHERE UserId IN (${userIdsStr}) and IsFrozen=true`; const userLoginQueryRes = await bulkQuery(userLoginQuery, conn); const userLoginsToUnfreeze = userLoginQueryRes.records; @@ -157,30 +176,37 @@ export default class OrgUnfreezeUser extends SfdxCommand { Profile: profileNames.filter((profile) => profile[0] === matchingUser.ProfileId)[1], }; }); - uxLog(this, "\n" + c.white(columnify(this.debugMode ? usersToUnfreezeDisplay : usersToUnfreezeDisplay.slice(0, this.maxUsersDisplay)))); - if (!this.debugMode === false && usersToUnfreezeDisplay.length > this.maxUsersDisplay) { - uxLog(this, c.yellow(c.italic(`(list truncated to the first ${this.maxUsersDisplay} users)`))); + uxLog("action", this, c.cyan(`List of users to unfreeze (${userLoginsToUnfreeze.length}):`)); + uxLogTable( + this, + this.debugMode ? usersToUnfreezeDisplay : usersToUnfreezeDisplay.slice(0, this.maxUsersDisplay) + ); + if (!this.debugMode && usersToUnfreezeDisplay.length > this.maxUsersDisplay) { + uxLog("warning", this, c.yellow(c.italic(`(list truncated to the first ${this.maxUsersDisplay} users)`))); } - uxLog(this, c.cyan(`${c.bold(userLoginsToUnfreeze.length)} users can be unfrozen.`)); + // Generate csv + xls of users about to be unfrozen - await generateReports(usersToUnfreezeDisplay, ["Username", "Name", "Profile"], this, { - logFileName: "users-to-unfreeze", - logLabel: "Extract of users to unfreeze", + await generateReports(usersToUnfreezeDisplay, ['Username', 'Name', 'Profile'], this, { + logFileName: 'users-to-unfreeze', + logLabel: 'Extract of users to unfreeze', }); // Request configuration from user if (!isCI) { const confirmunfreeze = await prompts({ - type: "confirm", - name: "value", + type: 'confirm', + name: 'value', initial: true, message: c.cyanBright( - `Are you sure you want to unfreeze these ${c.bold(userLoginsToUnfreeze.length)} users in org ${c.green(this.org.getUsername())} (y/n)?`, + `Are you sure you want to unfreeze these ${c.bold(userLoginsToUnfreeze.length)} users in org ${c.green( + flags['target-org'].getUsername() + )} ?` ), + description: 'Confirm unfreezing selected users, which will reactivate their accounts in the Salesforce org', }); if (confirmunfreeze.value !== true) { - const outputString = "Script cancelled by user"; - uxLog(this, c.yellow(outputString)); + const outputString = 'Script cancelled by user'; + uxLog("warning", this, c.yellow(outputString)); return { outputString }; } } @@ -189,20 +215,24 @@ export default class OrgUnfreezeUser extends SfdxCommand { const userLoginsFrozen = userLoginsToUnfreeze.map((userLogin) => { return { Id: userLogin.Id, IsFrozen: false }; }); - const bulkUpdateRes = await bulkUpdate("UserLogin", "update", userLoginsFrozen, conn); + const bulkUpdateRes = await bulkUpdate('UserLogin', 'update', userLoginsFrozen, conn); - const unfreezeSuccessNb = bulkUpdateRes.successRecordsNb; - const unfreezeErrorsNb = bulkUpdateRes.errorRecordsNb; + const unfreezeSuccessNb = bulkUpdateRes.successfulResults.length; + const unfreezeErrorsNb = bulkUpdateRes.failedResults.length; if (unfreezeErrorsNb > 0) { - uxLog(this, c.yellow(`Warning: ${c.red(c.bold(unfreezeErrorsNb))} users has not been unfrozen (bulk API errors)`)); + uxLog( + "warning", + this, + c.yellow(`Warning: ${c.red(c.bold(unfreezeErrorsNb))} users has not been unfrozen (bulk API errors)`) + ); } // Build results summary - uxLog(this, c.green(`${c.bold(unfreezeSuccessNb)} users has been be unfrozen.`)); + uxLog("success", this, c.green(`${c.bold(unfreezeSuccessNb)} users has been be unfrozen.`)); // Return an object to be displayed with --json return { - orgId: this.org.getOrgId(), + orgId: flags['target-org'].getOrgId(), unfreezeSuccess: unfreezeSuccessNb, unfreezeErrors: unfreezeErrorsNb, outputString: `${unfreezeSuccessNb} users has been be unfrozen`, diff --git a/src/commands/hardis/package/create.ts b/src/commands/hardis/package/create.ts index 44daf1fc7..a5a16afe4 100644 --- a/src/commands/hardis/package/create.ts +++ b/src/commands/hardis/package/create.ts @@ -1,82 +1,106 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { execSfdxJson, uxLog } from "../../../common/utils"; -import { prompts } from "../../../common/utils/prompts"; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { execSfdxJson, uxLog } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class PackageCreate extends SfCommand { + public static title = 'Create a new package'; -export default class PackageCreate extends SfdxCommand { - public static title = "Create a new package"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("packageCreate"); +**Creates a new Salesforce package (either Managed or Unlocked) in your Dev Hub.** - public static examples = ["$ sfdx hardis:package:create"]; +This command streamlines the process of setting up a new Salesforce package, which is a fundamental step for modularizing your Salesforce metadata and enabling continuous integration and delivery practices. It guides you through defining the package's essential properties. + +Key functionalities: + +- **Interactive Package Definition:** Prompts you for the package name, the path to its source code, and the package type (Managed or Unlocked). +- **Package Type Selection:** + - **Managed Packages:** Ideal for AppExchange solutions, where code is hidden in subscriber orgs. + - **Unlocked Packages:** Suitable for client projects or shared tooling, where code is readable and modifiable in subscriber orgs. +- **Package Creation:** Executes the Salesforce CLI command to create the package in your connected Dev Hub. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Prompts:** Uses the \`prompts\` library to gather necessary information from the user, such as \`packageName\`, \`packagePath\`, and \`packageType\`. +- **Salesforce CLI Integration:** It constructs and executes the \`sf package create\` command, passing the user-provided details as arguments. +- **\`execSfdxJson\`:** This utility is used to execute the Salesforce CLI command and capture its JSON output, which includes the newly created package's ID. +- **User Feedback:** Provides clear messages to the user about the successful creation of the package, including its ID and the associated Dev Hub. +
+`; + + public static examples = ['$ sf hardis:package:create']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { - const debugMode = this.flags.debug || false; + const { flags } = await this.parse(PackageCreate); + const debugMode = flags.debug || false; // Request questions to user const packageResponse = await prompts([ { - type: "text", - name: "packageName", + type: 'text', + name: 'packageName', message: c.cyanBright(`Please input the name of the package (ex: MyPackage)`), + description: 'Enter a clear name for your new Salesforce package', + placeholder: 'Ex: MyPackage', }, { - type: "text", - name: "packagePath", + type: 'text', + name: 'packagePath', message: c.cyanBright(`Please input the path of the package (ex: sfdx-source/apex-mocks)`), + description: 'Specify the directory path where the package source code is located', + placeholder: 'Ex: sfdx-source/apex-mocks', }, { - type: "select", - name: "packageType", + type: 'select', + name: 'packageType', message: c.cyanBright(`Please select the type of the package`), + description: 'Choose whether this is an unlocked package or managed package', + placeholder: 'Select package type', choices: [ { - title: "Managed", - value: "Managed", - description: "Managed packages code is hidden in orgs where it is installed. Suited for AppExchanges packages", + title: 'Managed', + value: 'Managed', + description: + 'Managed packages code is hidden in orgs where it is installed. Suited for AppExchanges packages', }, { - title: "Unlocked", - value: "Unlocked", + title: 'Unlocked', + value: 'Unlocked', description: - "Unlocked packages code is readable and modifiable in orgs where it is installed. Use it for client project or shared tooling", + 'Unlocked packages code is readable and modifiable in orgs where it is installed. Use it for client project or shared tooling', }, ], }, @@ -84,16 +108,24 @@ export default class PackageCreate extends SfdxCommand { // Create package const packageCreateCommand = - "sfdx force:package:create" + + 'sf package create' + ` --name "${packageResponse.packageName}"` + - ` --packagetype ${packageResponse.packageType}` + + ` --package-type ${packageResponse.packageType}` + ` --path "${packageResponse.packagePath}"`; const packageCreateResult = await execSfdxJson(packageCreateCommand, this, { output: true, fail: true, debug: debugMode, }); - uxLog(this, c.cyan(`Created package Id: ${c.green(packageCreateResult.result.Id)} associated to DevHub ${c.green(this.hubOrg.getUsername())}`)); + uxLog( + "action", + this, + c.cyan( + `Created package Id: ${c.green(packageCreateResult.result.Id)} associated to DevHub ${c.green( + flags['target-dev-hub'].getUsername() + )}` + ) + ); // Return an object to be displayed with --json return { diff --git a/src/commands/hardis/package/install.ts b/src/commands/hardis/package/install.ts index 1d8fbdc0e..4d7426ba2 100644 --- a/src/commands/hardis/package/install.ts +++ b/src/commands/hardis/package/install.ts @@ -1,109 +1,107 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as axios1 from "axios"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import axios from 'axios'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; // import * as packages from '../../../../defaults/packages.json' -import { MetadataUtils } from "../../../common/metadata-utils"; -import { isCI, uxLog } from "../../../common/utils"; -import { managePackageConfig } from "../../../common/utils/orgUtils"; -import { prompts } from "../../../common/utils/prompts"; -import { PACKAGE_ROOT_DIR } from "../../../settings"; +import { MetadataUtils } from '../../../common/metadata-utils/index.js'; +import { isCI, uxLog } from '../../../common/utils/index.js'; +import { managePackageConfig } from '../../../common/utils/orgUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { PACKAGE_ROOT_DIR } from '../../../settings.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -const axios = axios1.default; - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class PackageVersionInstall extends SfdxCommand { - public static title = "Install packages in an org"; +export default class PackageVersionInstall extends SfCommand { + public static title = 'Install packages in an org'; public static description = `Install a package in an org using its id (starting with **04t**) Assisted menu to propose to update \`installedPackages\` property in \`.sfdx-hardis.yml\` `; - public static examples = ["$ sfdx hardis:package:install"]; + public static examples = ['$ sf hardis:package:install']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - package: flags.string({ - char: "p", - description: "Package Version Id to install (04t...)", + public static flags: any = { + package: Flags.string({ + char: 'p', + description: 'Package Version Id to install (04t...)', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - installationkey: flags.string({ - char: "k", - default: null, - description: messages.getMessage("packageInstallationKey"), + installationkey: Flags.string({ + char: 'k', + default: '', + description: messages.getMessage('packageInstallationKey'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; - /* jscpd:ignore-end */ - protected allPackagesFileName = path.join(PACKAGE_ROOT_DIR, "defaults/packages.json"); - protected sfdxProjectJsonFileName = path.join(process.cwd(), "sfdx-project.json"); + protected allPackagesFileName = path.join(PACKAGE_ROOT_DIR, 'defaults/packages.json'); + protected sfdxProjectJsonFileName = path.join(process.cwd(), 'sfdx-project.json'); public async run(): Promise { - const packagesRaw = await fs.readFile(this.allPackagesFileName, "utf8"); + const { flags } = await this.parse(PackageVersionInstall); + const packagesRaw = await fs.readFile(this.allPackagesFileName, 'utf8'); const packages = JSON.parse(packagesRaw); - const packageId = this.flags.package || null; - const packagesToInstall = []; + const packageId = flags.package || null; + const packagesToInstall: any[] = []; // If no package Id is sent, ask user what package he/she wants to install - if (!isCI && (packageId == null || !packageId.startsWith("04t"))) { + if (!isCI && (packageId == null || !packageId.startsWith('04t'))) { const allPackages = packages.map((pack) => ({ - title: `${c.yellow(pack.name)} - ${pack.repoUrl || "Bundle"}`, + title: `${c.yellow(pack.name)} - ${pack.repoUrl || 'Bundle'}`, value: pack, })); - allPackages.push({ title: "Other", value: "other" }); + allPackages.push({ title: 'Other', value: 'other' }); const packageResponse = await prompts({ - type: "select", - name: "value", - message: c.cyanBright(`Please select the package you want to install on org ${c.green(this.org.getUsername())}`), + type: 'select', + name: 'value', + message: c.cyanBright( + `Please select the package you want to install on org ${c.green(flags['target-org'].getUsername())}` + ), + description: 'Choose which package to install from the available list', + placeholder: 'Select a package', choices: allPackages, initial: 0, }); - if (packageResponse.value === "other") { + if (packageResponse.value === 'other') { const packageDtlResponse = await prompts([ { - type: "text", - name: "value", + type: 'text', + name: 'value', message: c.cyanBright( - "What is the id of the Package Version to install ? (starting with 04t)\nYou can find it using tooling api request " + - c.bold("Select Id,SubscriberPackage.Name,SubscriberPackageVersionId from InstalledSubscriberPackage"), + 'What is the id of the Package Version to install ? (starting with 04t)\nYou can find it using tooling api request ' + + c.bold('Select Id,SubscriberPackage.Name,SubscriberPackageVersionId from InstalledSubscriberPackage') ), + description: 'Enter the package version ID for the package you want to install', + placeholder: 'Ex: 04t2p000000XXXXXX', }, { - type: "text", - name: "installationkey", - message: c.cyanBright("Enter the password for this package (leave empty if package is not protected by a password)"), + type: 'text', + name: 'installationkey', + message: c.cyanBright( + 'Enter the password for this package (leave empty if package is not protected by a password)' + ), + description: 'Provide the installation password if the package is protected', + placeholder: 'Ex: mypassword123', }, ]); const pckg: { SubscriberPackageVersionId?: string; installationkey?: string } = { @@ -125,10 +123,10 @@ Assisted menu to propose to update \`installedPackages\` property in \`.sfdx-har } } else { const pckg: { SubscriberPackageVersionId: string; installationkey?: string } = { - SubscriberPackageVersionId: packageId, + SubscriberPackageVersionId: packageId || '', }; - if (this.flags.installationkey) { - pckg.installationkey = this.flags.installationkey; + if (flags.installationkey) { + pckg.installationkey = flags.installationkey; } packagesToInstall.push(pckg); } @@ -140,7 +138,7 @@ Assisted menu to propose to update \`installedPackages\` property in \`.sfdx-har const configResp = await axios.get(pckg.configUrl); const packageAliases = configResp.data.packageAliases || []; pckg.SubscriberPackageName = pckg.package; - if (pckg.package.includes("@")) { + if (pckg.package.includes('@')) { pckg.SubscriberPackageVersionId = packageAliases[pckg.package]; } else { // use last occurrence of package alias @@ -153,76 +151,19 @@ Assisted menu to propose to update \`installedPackages\` property in \`.sfdx-har } } return pckg; - }), + }) ); // Install packages - await MetadataUtils.installPackagesOnOrg(packagesToInstallCompleted, null, this, "install"); + await MetadataUtils.installPackagesOnOrg(packagesToInstallCompleted, null, this, 'install'); const installedPackages = await MetadataUtils.listInstalledPackages(null, this); - uxLog(this, c.italic(c.grey("New package list on org:\n" + JSON.stringify(installedPackages, null, 2)))); + uxLog("other", this, c.italic(c.grey('New package list on org:\n' + JSON.stringify(installedPackages, null, 2)))); if (!isCI) { // Manage package install config storage await managePackageConfig(installedPackages, packagesToInstallCompleted); } - - /* disabled until sfdx multiple package deployment is working >_< - // Post install actions - if (!isCI && fs.existsSync(this.sfdxProjectJsonFileName)) { - - const postInstallResponse = await prompts([ - { - type: 'confirm', - name: 'retrieve', - message: c.cyanBright('Do you want to retrieve installed package sources in your local branch ?'), - initial: true - }, - { - type: 'confirm', - name: 'sfdxProject', - message: c.cyanBright('Do you want to update your sfdx-project.json ? (advice: yes)'), - initial: true - } - ]); - // Retrieve package sources if requested - if (postInstallResponse.retrieve === true) { - for (const pckg of packagesToInstallCompleted) { - const retrieveCommand = 'sfdx force:source:retrieve' + - ` -n ${pckg.key}` + - // ` -p ./force-app/main/default` + // let's try without it - ' -w 60'; - try { - await execCommand(retrieveCommand, this, { output: true, fail: true, debug: debugMode }); - } catch (e) { - // Ugly workaround but it's a sfdx bug... - uxLog(this, c.yellow(`Error while retrieving ${c.bold(pckg.key)} but it may have worked anyway`)); - if (fs.existsSync(path.join('.', pckg.key))) { - await fs.remove(path.join('.', pckg.key)); - } - } - } - } - // Update sfdx-project.json with new unlocked packages folder references, so it is taken in account with force:source:push and force:source:pull - if (postInstallResponse.sfdxProject === true) { - const sfdxProjectRaw = await fs.readFile(this.sfdxProjectJsonFileName, 'utf8'); - const sfdxProject = JSON.parse(sfdxProjectRaw); - let updated = false; - for (const installedPackage of installedPackages) { - const matchInstalled = packagesToInstallCompleted.filter(pckg => pckg.key === installedPackage.SubscriberPackageName); - const matchLocal = sfdxProject.packageDirectories.filter(packageDirectory => installedPackage.SubscriberPackageName === packageDirectory.path); - if (matchInstalled.length > 0 && matchLocal.length === 0) { - sfdxProject.packageDirectories.push({path: installedPackage.SubscriberPackageName}); - updated = true; - } - } - if (updated) { - await fs.writeFile(this.sfdxProjectJsonFileName, JSON.stringify(sfdxProject, null, 2)); - uxLog(this, c.cyan('[config] Updated sfdx-project.json to add new package folders')); - } - } - } - */ - + WebSocketClient.sendRefreshPipelineMessage(); // Return an object to be displayed with --json - return { outputString: "Installed package(s)" }; + return { outputString: 'Installed package(s)' }; } } diff --git a/src/commands/hardis/package/mergexml.ts b/src/commands/hardis/package/mergexml.ts index 7bedbb140..8f352b397 100644 --- a/src/commands/hardis/package/mergexml.ts +++ b/src/commands/hardis/package/mergexml.ts @@ -1,96 +1,118 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import { execCommand, uxLog } from "../../../common/utils"; -import { prompts } from "../../../common/utils/prompts"; -import { WebSocketClient } from "../../../common/websocketClient"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class MergePackageXml extends SfdxCommand { - public static title = "Merge package.xml files"; - - public static description = "Select and merge package.xml files"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { appendPackageXmlFilesContent } from '../../../common/utils/xmlUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class MergePackageXml extends SfCommand { + public static title = 'Merge package.xml files'; + + public static description = ` +## Command Behavior + +**Merges multiple Salesforce \`package.xml\` files into a single, consolidated \`package.xml\` file.** + +This command is useful for combining metadata definitions from various sources (e.g., different feature branches, separate development efforts) into one comprehensive package.xml, which can then be used for deployments or retrievals. + +Key functionalities: + +- **Flexible Input:** You can specify the \`package.xml\` files to merge either by: + - Providing a comma-separated list of file paths using the \`--packagexmls\` flag. + - Specifying a folder and a glob pattern using \`--folder\` and \`--pattern\` to automatically discover \`package.xml\` files. + - If no input is provided, an interactive menu will prompt you to select files from the \`manifest\` folder. +- **Customizable Output:** You can define the name and path of the resulting merged \`package.xml\` file using the \`--result\` flag. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** It uses \`glob\` to find \`package.xml\` files based on the provided folder and pattern, or it directly uses the list of files from the \`--packagexmls\` flag. +- **Interactive Prompts:** If no \`package.xml\` files are specified, it uses the \`prompts\` library to allow the user to interactively select files to merge. +- **\`appendPackageXmlFilesContent\` Utility:** The core merging logic is handled by the \`appendPackageXmlFilesContent\` utility function. This function reads the content of each input \`package.xml\` file, combines their metadata types and members, and writes the consolidated content to the specified result file. +- **XML Manipulation:** Internally, \`appendPackageXmlFilesContent\` parses the XML of each \`package.xml\`, merges the \`\` and \`\` elements, and then rebuilds the XML structure for the output file. +- **File System Operations:** It uses \`fs-extra\` to ensure the output directory exists and to write the merged \`package.xml\` file. +- **WebSocket Communication:** It uses \`WebSocketClient.requestOpenFile\` to open the generated merged \`package.xml\` file in VS Code for immediate review. +
+`; public static examples = [ - "$ sfdx hardis:package:mergexml", - "$ sfdx hardis:package:mergexml --folder packages --pattern /**/*.xml --result myMergedPackage.xml", - '$ sfdx hardis:package:mergexml --packagexmls "config/mypackage1.xml,config/mypackage2.xml,config/mypackage3.xml" --result myMergedPackage.xml', + '$ sf hardis:package:mergexml', + '$ sf hardis:package:mergexml --folder packages --pattern /**/*.xml --result myMergedPackage.xml', + '$ sf hardis:package:mergexml --packagexmls "config/mypackage1.xml,config/mypackage2.xml,config/mypackage3.xml" --result myMergedPackage.xml', ]; - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "manifest", - description: "Root folder", + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'manifest', + description: 'Root folder', }), - packagexmls: flags.string({ - char: "p", - description: "Comma separated list of package.xml files to merge. Will be prompted to user if not provided", + packagexmls: Flags.string({ + char: 'p', + description: 'Comma separated list of package.xml files to merge. Will be prompted to user if not provided', }), - pattern: flags.string({ - char: "x", - default: "/**/*package*.xml", - description: "Name criteria to list package.xml files", + pattern: Flags.string({ + char: 'x', + default: '/**/*package*.xml', + description: 'Name criteria to list package.xml files', }), - result: flags.string({ - char: "r", - description: "Result package.xml file name", + result: Flags.string({ + char: 'r', + description: 'Result package.xml file name', }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + debug: Flags.boolean({ + default: false, + description: 'debug', }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; - - // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials"]; + public static requiresProject = false; protected folder: string; protected pattern: string; - protected packageXmlFiles = []; + protected packageXmlFiles: any[] = []; protected resultFileName: string; protected debugMode = false; public async run(): Promise { - this.folder = this.flags.folder || "./manifest"; - this.pattern = this.flags.pattern || "/**/*package*.xml"; - this.packageXmlFiles = this.flags.packagexmls ? this.flags.packagexmls.split(",") : []; - this.resultFileName = this.flags.result || path.join(this.folder, "package-merge.xml"); + const { flags } = await this.parse(MergePackageXml); + this.folder = flags.folder || './manifest'; + this.pattern = flags.pattern || '/**/*package*.xml'; + this.packageXmlFiles = flags.packagexmls ? flags.packagexmls.split(',') : []; + this.resultFileName = flags.result || path.join(this.folder, 'package-merge.xml'); await fs.ensureDir(path.dirname(this.resultFileName)); - this.debugMode = this.flags.debug || false; + this.debugMode = flags.debug || false; /* jscpd:ignore-end */ // If packagexmls are not provided, prompt user if (this.packageXmlFiles.length === 0) { const rootFolder = path.resolve(this.folder); const findPackageXmlPattern = rootFolder + this.pattern; - const matchingFiles = await glob(findPackageXmlPattern, { cwd: process.cwd() }); + const matchingFiles = await glob(findPackageXmlPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); const filesSelectRes = await prompts({ - type: "multiselect", - name: "files", - message: "Please select the package.xml files you want to merge", + type: 'multiselect', + name: 'files', + message: 'Please select the package.xml files you want to merge', + description: 'Choose which package.xml files to combine into a single merged file', choices: matchingFiles.map((file) => { const relativeFile = path.relative(process.cwd(), file); return { title: relativeFile, value: relativeFile }; @@ -100,16 +122,11 @@ export default class MergePackageXml extends SfdxCommand { } // Process merge of package.xml files - const appendPackageXmlCommand = - "sfdx essentials:packagexml:append" + ` --packagexmls "${this.packageXmlFiles.join(",")}"` + ` --outputfile "${this.resultFileName}"`; - await execCommand(appendPackageXmlCommand, this, { - fail: true, - debug: this.debugMode, - }); + await appendPackageXmlFilesContent(this.packageXmlFiles, this.resultFileName); // Summary const msg = `Merged ${c.green(c.bold(this.packageXmlFiles.length))} files into ${c.green(this.resultFileName)}`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Trigger command to open files config file in VsCode extension WebSocketClient.requestOpenFile(this.resultFileName); diff --git a/src/commands/hardis/package/version/create.ts b/src/commands/hardis/package/version/create.ts index 03a7c13f7..3f562c01a 100644 --- a/src/commands/hardis/package/version/create.ts +++ b/src/commands/hardis/package/version/create.ts @@ -1,111 +1,140 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { MetadataUtils } from "../../../../common/metadata-utils"; -import { execSfdxJson, isCI, uxLog } from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; -import { getConfig, setConfig } from "../../../../config"; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { MetadataUtils } from '../../../../common/metadata-utils/index.js'; +import { execSfdxJson, isCI, uxLog } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { getConfig, setConfig } from '../../../../config/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class PackageVersionCreate extends SfCommand { + public static title = 'Create a new version of a package'; -export default class PackageVersionCreate extends SfdxCommand { - public static title = "Create a new version of a package"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("packageVersionCreate"); +**Creates a new version of a Salesforce package (2GP or Unlocked) in your Dev Hub.** - public static examples = ["$ sfdx hardis:package:version:create"]; +This command is a crucial step in the package development lifecycle, allowing you to iterate on your Salesforce functionalities and prepare them for deployment or distribution. It automates the process of creating a new, immutable package version. + +Key functionalities: + +- **Package Selection:** Prompts you to select an existing package from your \`sfdx-project.json\` file if not specified via the \`--package\` flag. +- **Installation Key:** Allows you to set an installation key (password) for the package version, protecting it from unauthorized installations. This can be provided via the \`--installkey\` flag or interactively. +- **Code Coverage:** Automatically includes code coverage checks during package version creation. +- **Post-Creation Actions:** + - **Delete After Creation (\`--deleteafter\`):** Deletes the newly created package version immediately after its creation. This is useful for testing the package creation process without accumulating unnecessary versions. + - **Install After Creation (\`--install\`):** Installs the newly created package version on your default Salesforce org. This is convenient for immediate testing or validation. + +
+Technical explanations + +The command's technical implementation involves: + +- **Package Directory Identification:** It identifies the package directory from your \`sfdx-project.json\` based on the selected package name. +- **Interactive Prompts:** Uses the \`prompts\` library to guide the user through package selection and installation key input if not provided as command-line arguments. +- **Configuration Persistence:** Stores the \`defaultPackageInstallationKey\` in your project's configuration (\`.sfdx-hardis.yml\`) for future use. +- **Salesforce CLI Integration:** It constructs and executes the \`sf package version create\` command, passing the package ID, installation key, and other flags. +- **\`execSfdxJson\`:** This utility is used to execute the Salesforce CLI command and capture its JSON output, which includes the \`SubscriberPackageVersionId\` of the newly created version. +- **Post-Creation Command Execution:** If \`--deleteafter\` or \`--install\` flags are set, it executes \`sf package version delete\` or delegates to \`MetadataUtils.installPackagesOnOrg\` respectively. +- **Error Handling:** Includes checks for missing package arguments and handles errors during package version creation or post-creation actions. +
+`; + + public static examples = ['$ sf hardis:package:version:create']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - package: flags.string({ - char: "p", - default: null, - description: "Package identifier that you want to use to generate a new package version", + package: Flags.string({ + char: 'p', + default: '', + description: 'Package identifier that you want to use to generate a new package version', }), - installkey: flags.string({ - char: "k", - default: null, - description: "Package installation key", + installkey: Flags.string({ + char: 'k', + default: '', + description: 'Package installation key', }), - deleteafter: flags.boolean({ + deleteafter: Flags.boolean({ default: false, - description: "Delete package version after creating it", + description: 'Delete package version after creating it', }), - install: flags.boolean({ - char: "i", + install: Flags.boolean({ + char: 'i', default: false, - description: "Install package version on default org after generation", + description: 'Install package version on default org after generation', }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; - protected package: string; + protected package: string | null; protected deleteAfter = false; protected install = false; - protected installKey = null; + protected installKey: string | null = null; protected promote = false; /* jscpd:ignore-end */ public async run(): Promise { - this.package = this.flags.package || null; - this.install = this.flags.install || false; - this.installKey = this.flags.installkey || null; - this.deleteAfter = this.flags.deleteafter || false; - this.promote = this.flags.promote || false; - const debugMode = this.flags.debug || false; - const config = await getConfig("project"); + const { flags } = await this.parse(PackageVersionCreate); + this.package = flags.package || null; + this.install = flags.install || false; + this.installKey = flags.installkey || null; + this.deleteAfter = flags.deleteafter || false; + //this.promote = flags.promote || false; + const debugMode = flags.debug || false; + const config = await getConfig('project'); // List project packages - const packageDirectories = this.project.getUniquePackageDirectories(); + const packageDirectories: any[] = this.project?.getUniquePackageDirectories() || []; // Ask user to select package and input install key if not sent as command arguments if (this.package == null) { if (isCI) { - throw new SfdxError("You need to send argument 'package'"); + throw new SfError("You need to send argument 'package'"); } const packageResponse = await prompts([ { - type: "select", - name: "packageSelected", - message: c.cyanBright(`Please select a package (this is not a drill, it will create an official new version !)`), + type: 'select', + name: 'packageSelected', + message: c.cyanBright( + `Please select a package (this is not a drill, it will create an official new version !)` + ), + description: 'Choose which package to create a new version for - this action creates a permanent version', + placeholder: 'Select a package', choices: packageDirectories.map((packageDirectory) => { return { - title: packageDirectory.package || packageDirectory.path, + title: packageDirectory?.package || packageDirectory?.path || packageDirectory?.fullPath || packageDirectory?.name, value: packageDirectory.name, }; }), }, { - type: "text", - name: "packageInstallationKey", - message: c.cyanBright(`Please input an installation password (or let empty)`), - initial: config.defaultPackageInstallationKey || "", + type: 'text', + name: 'packageInstallationKey', + message: c.cyanBright( + 'Do you want to password protect your package ? (blank means no)' + ), + description: 'Optionally set a password to protect the package installation', + placeholder: 'Ex: mySecretPassword123', + initial: config.defaultPackageInstallationKey || '', }, ]); this.package = packageResponse.packageSelected; @@ -113,21 +142,21 @@ export default class PackageVersionCreate extends SfdxCommand { } // Identify package directory const pckgDirectory = packageDirectories.filter( - (pckgDirectory) => pckgDirectory.name === this.package || pckgDirectory.package === this.package, + (pckgDirectory) => pckgDirectory.name === this.package || pckgDirectory.package === this.package )[0]; if (config.defaultPackageInstallationKey !== this.installKey && this.installKey != null) { - await setConfig("project", { + await setConfig('project', { defaultPackageInstallationKey: this.installKey, }); } // Create package version - uxLog(this, c.cyan(`Generating new package version for ${c.green(pckgDirectory.package)}...`)); + uxLog("action", this, c.cyan(`Generating new package version for ${c.green(pckgDirectory.package)}...`)); const createCommand = - "sfdx force:package:version:create" + + 'sf package version create' + ` --package "${pckgDirectory.package}"` + - (this.installKey ? ` --installationkey "${this.installKey}"` : " --installationkeybypass") + - " --codecoverage" + - " -w 60"; + (this.installKey ? ` --installation-key "${this.installKey}"` : ' --installation-key-bypass') + + ' --code-coverage' + + ' --wait 60'; const createResult = await execSfdxJson(createCommand, this, { fail: true, output: true, @@ -138,20 +167,24 @@ export default class PackageVersionCreate extends SfdxCommand { // If delete after is true, delete package version we just created if (this.deleteAfter) { // Delete package version - uxLog(this, c.cyan(`Delete new package version ${c.green(latestVersion)} of package ${c.green(pckgDirectory.package)}...`)); - const deleteVersionCommand = "sfdx force:package:version:delete --noprompt -p " + latestVersion; + uxLog( + "action", + this, + c.cyan(`Delete new package version ${c.green(latestVersion)} of package ${c.green(pckgDirectory.package)}...`) + ); + const deleteVersionCommand = 'sf package version delete --no-prompt --package ' + latestVersion; const deleteVersionResult = await execSfdxJson(deleteVersionCommand, this, { fail: true, output: true, debug: debugMode, }); if (!(deleteVersionResult.result.success === true)) { - throw new SfdxError(`Unable to delete package version ${latestVersion}`); + throw new SfError(`Unable to delete package version ${latestVersion}`); } } // Install package on org just after is has been generated else if (this.install) { - const packagesToInstall = []; + const packagesToInstall: any[] = []; const pckg: { SubscriberPackageVersionId?: string; installationkey?: string } = { SubscriberPackageVersionId: latestVersion, }; @@ -159,12 +192,12 @@ export default class PackageVersionCreate extends SfdxCommand { pckg.installationkey = this.installKey; } packagesToInstall.push(pckg); - await MetadataUtils.installPackagesOnOrg(packagesToInstall, null, this, "install"); + await MetadataUtils.installPackagesOnOrg(packagesToInstall, null, this, 'install'); } // Return an object to be displayed with --json return { - outputString: "Generated new package version", + outputString: 'Generated new package version', packageVersionId: latestVersion, }; } diff --git a/src/commands/hardis/package/version/list.ts b/src/commands/hardis/package/version/list.ts index 6cac3a375..a379f0dbd 100644 --- a/src/commands/hardis/package/version/list.ts +++ b/src/commands/hardis/package/version/list.ts @@ -1,59 +1,72 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { execCommand } from "../../../../common/utils"; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { execCommand } from '../../../../common/utils/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class PackageVersionList extends SfCommand { + public static title = 'Create a new version of a package'; -export default class PackageVersionCreate extends SfdxCommand { - public static title = "Create a new version of a package"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("packageVersionList"); +**Lists all Salesforce package versions associated with your Dev Hub.** - public static examples = ["$ sfdx hardis:package:version:list"]; +This command provides a comprehensive overview of your Salesforce packages and their versions, including details such as package ID, version number, installation key status, and creation date. It's an essential tool for managing your package development lifecycle, tracking releases, and identifying available versions for installation or promotion. + +Key functionalities: + +- **Comprehensive Listing:** Displays all package versions, regardless of their status (e.g., released, beta). +- **Dev Hub Integration:** Retrieves package version information directly from your connected Dev Hub. + +
+Technical explanations + +The command's technical implementation is straightforward: + +- **Salesforce CLI Integration:** It directly executes the \`sf package version list\` command. +- **\`execCommand\`:** This utility is used to run the Salesforce CLI command and capture its output. +- **Output Display:** The raw output from the Salesforce CLI command is displayed to the user, providing all the details about the package versions. +
+`; + + public static examples = ['$ sf hardis:package:version:list']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { - const debugMode = this.flags.debug || false; - const createCommand = "sfdx force:package:version:list"; + const { flags } = await this.parse(PackageVersionList); + const debugMode = flags.debug || false; + const createCommand = 'sf package version list'; await execCommand(createCommand, this, { fail: true, output: true, debug: debugMode, }); // Return an object to be displayed with --json - return { outputString: "Listed package versions" }; + return { outputString: 'Listed package versions' }; } } diff --git a/src/commands/hardis/package/version/promote.ts b/src/commands/hardis/package/version/promote.ts index c6b5776e6..ac1b0e22e 100644 --- a/src/commands/hardis/package/version/promote.ts +++ b/src/commands/hardis/package/version/promote.ts @@ -1,75 +1,95 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { execSfdxJson, uxLog } from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { execSfdxJson, uxLog } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class PackageVersionPromote extends SfCommand { + public static title = 'Promote new versions of package(s)'; -export default class PackageVersionPromote extends SfdxCommand { - public static title = "Promote new versions of package(s)"; + public static description = ` +## Command Behavior - public static description = "Promote package(s) version(s): convert it from beta to released"; +**Promotes a Salesforce package version from beta to released status in your Dev Hub.** - public static examples = ["$ sfdx hardis:package:version:promote", "$ sfdx hardis:package:version:promote --auto"]; +This command is a critical step in the package development lifecycle, marking a package version as stable and ready for production use. Once promoted, a package version can be installed in production organizations. + +Key functionalities: + +- **Package Version Selection:** Allows you to select a specific package version to promote. If the \`--auto\` flag is used, it automatically identifies package versions that are not yet released and promotes them. +- **Automated Promotion:** When \`--auto\` is enabled, it queries for all unreleased package versions and promotes them without further user interaction. +- **Dev Hub Integration:** Interacts with your connected Dev Hub to change the status of the package version. + +
+Technical explanations + +The command's technical implementation involves: + +- **Package Alias Retrieval:** It retrieves package aliases from your \`sfdx-project.json\` to identify available packages. +- **Automated Promotion Logic:** If \`--auto\` is used, it executes \`sf package version list --released\` to get a list of already released packages and then filters the available package aliases to find those that are not yet released. +- **Interactive Prompts:** If not in auto mode, it uses the \`prompts\` library to allow the user to select a package version to promote. +- **Salesforce CLI Integration:** It constructs and executes the \`sf package version promote\` command, passing the package version ID. +- **\`execSfdxJson\`:** This utility is used to execute the Salesforce CLI command and capture its JSON output. +- **Error Handling:** It handles cases where a package version might already be promoted or if other errors occur during the promotion process. +
+`; + + public static examples = ['$ sf hardis:package:version:promote', '$ sf hardis:package:version:promote --auto']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - auto: flags.boolean({ - char: "d", + public static flags: any = { + auto: Flags.boolean({ + char: 'f', default: false, - description: "Auto-detect which versions of which packages need to be promoted", + description: 'Auto-detect which versions of which packages need to be promoted', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { - const debugMode = this.flags.debug || false; - const auto = this.flags.auto || false; + const { flags } = await this.parse(PackageVersionPromote); + const debugMode = flags.debug || false; + const auto = flags.auto || false; // List project packages - const sfdxProjectJson = await this.project.retrieveSfdxProjectJson(false); - const packageAliases = sfdxProjectJson.get("packageAliases") || []; + const sfdxProjectJson: any = this.project?.getSfProjectJson(false) || {}; + const packageAliases = sfdxProjectJson.get('packageAliases') || []; const availablePackageAliases = {}; for (const packageAlias of Object.keys(packageAliases) .sort() - .filter((pckgAlias) => pckgAlias.includes("@"))) { - const packageName = packageAlias.split("@")[0]; + .filter((pckgAlias) => pckgAlias.includes('@'))) { + const packageName = packageAlias.split('@')[0]; availablePackageAliases[packageName] = packageAlias; } // Select packages to promote - const packagesToPromote = []; + const packagesToPromote: any[] = []; if (auto) { // Promote only packages not promoted yet - const packageListRes = await execSfdxJson("sfdx force:package:version:list --released", this, { output: true, fail: true }); + const packageListRes = await execSfdxJson('sf package version list --released', this, { + output: true, + fail: true, + }); const filteredPackagesToPromote = Object.values(availablePackageAliases).filter((packageAlias) => { return ( packageListRes.result.filter((releasedPackage) => { @@ -82,9 +102,13 @@ export default class PackageVersionPromote extends SfdxCommand { // Prompt user if not auto const packageResponse = await prompts([ { - type: "select", - name: "packageSelected", - message: c.cyanBright(`Please select a package (this is not a drill, it will create an official new version !)`), + type: 'select', + name: 'packageSelected', + message: c.cyanBright( + `Please select a package (this is not a drill, it will create an official new version !)` + ), + description: 'Choose which package to promote - this will create a new official version that cannot be undone', + placeholder: 'Select a package', choices: Object.values(availablePackageAliases).map((packageAlias) => { return { title: packageAlias, value: packageAlias }; }), @@ -94,13 +118,13 @@ export default class PackageVersionPromote extends SfdxCommand { packagesToPromote.push(packageResponse.packageSelected); } - const promotedPackageVersions = []; - const errorPromotedVersions = []; + const promotedPackageVersions: any[] = []; + const errorPromotedVersions: any[] = []; // Promote packages for (const packageToPromote of packagesToPromote) { - uxLog(this, c.cyan(`Promoting version of package ${c.green(packageToPromote)}`)); - const promoteCommand = "sfdx force:package:version:promote" + ` --package "${packageToPromote}"` + " --noprompt"; + uxLog("action", this, c.cyan(`Promoting version of package ${c.green(packageToPromote)}`)); + const promoteCommand = 'sf package version promote' + ` --package "${packageToPromote}"` + ' --no-prompt'; const promoteResult = await execSfdxJson(promoteCommand, this, { fail: false, output: false, @@ -108,21 +132,32 @@ export default class PackageVersionPromote extends SfdxCommand { }); if (promoteResult.status === 0) { uxLog( + "action", this, c.cyan( `Promoted package version ${c.green(packageToPromote)} with id ${c.green( - promoteResult.result.id, - )}. It is now installable on production orgs`, - ), + promoteResult.result.id + )}. It is now installable on production orgs` + ) ); promotedPackageVersions.push({ package: packageToPromote, result: promoteResult }); } else { - uxLog(this, c.yellow(`Error promoting package version ${c.red(packageToPromote)} (probably already promoted so it can be ok)`)); + uxLog( + "warning", + this, + c.yellow( + `Error promoting package version ${c.red(packageToPromote)} (probably already promoted so it can be ok)` + ) + ); errorPromotedVersions.push({ package: packageToPromote, result: promoteResult }); } } process.exitCode = errorPromotedVersions.length === 0 ? 0 : 1; // Return an object to be displayed with --json - return { outputString: "Promoted packages", promotedPackageVersions: promotedPackageVersions, errorPromotedVersions: errorPromotedVersions }; + return { + outputString: 'Promoted packages', + promotedPackageVersions: promotedPackageVersions, + errorPromotedVersions: errorPromotedVersions, + }; } } diff --git a/src/commands/hardis/packagexml/append.ts b/src/commands/hardis/packagexml/append.ts new file mode 100644 index 000000000..c41d822b8 --- /dev/null +++ b/src/commands/hardis/packagexml/append.ts @@ -0,0 +1,62 @@ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { AnyJson } from "@salesforce/ts-types"; +import { appendPackageXmlFilesContent } from "../../../common/utils/xmlUtils.js"; + +export class PackageXmlAppend extends SfCommand { + public static readonly description = ` +## Command Behavior + +**Appends the content of one or more Salesforce \`package.xml\` files into a single target \`package.xml\` file.** + +This command is useful for consolidating metadata definitions from various sources into a single manifest. For instance, you might have separate \`package.xml\` files for different features or metadata types, and this command allows you to combine them into one comprehensive file for deployment or retrieval. + +Key functionalities: + +- **Multiple Input Files:** Takes a comma-separated list of \`package.xml\` file paths as input. +- **Single Output File:** Merges the content of all input files into a specified output \`package.xml\` file. +- **Metadata Consolidation:** Combines the \`\` and \`\` elements from all input files, ensuring that all unique metadata components are included in the resulting file. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Parsing:** It reads and parses the XML content of each input \`package.xml\` file. +- **Content Merging:** It iterates through the parsed XML structures, merging the \`types\` and \`members\` arrays. If a metadata type exists in multiple input files, its members are combined (duplicates are typically handled by the underlying XML utility). +- **XML Building:** After consolidating the metadata, it rebuilds the XML structure for the output \`package.xml\` file. +- **File Writing:** The newly constructed XML content is then written to the specified output file. +- **\`appendPackageXmlFilesContent\` Utility:** The core logic for this operation is encapsulated within the \`appendPackageXmlFilesContent\` utility function, which handles the parsing, merging, and writing of the \`package.xml\` files. +
+`; + public static readonly examples = ["$ sf hardis packagexml append -p package1.xml,package2.xml -o package3.xml"]; + public static readonly flags: any = { + packagexmls: Flags.string({ + char: "p", + description: "package.xml files path (separated by commas)", + required: true + }), + outputfile: Flags.string({ + char: "f", + description: "package.xml output file", + required: true + }), + debug: Flags.boolean({ + default: false, + description: "debug", + }), + websocket: Flags.string({ + description: "websocket", + }), + }; + + protected packageXmlFiles: string[]; + protected outputFile: string; + + public async run(): Promise { + const { flags } = await this.parse(PackageXmlAppend); + this.packageXmlFiles = (flags.packagexmls || "").split(","); + this.outputFile = flags.outputfile; + await appendPackageXmlFilesContent(this.packageXmlFiles, this.outputFile); + return { outputPackageXmlFile: this.outputFile }; + } +} diff --git a/src/commands/hardis/packagexml/remove.ts b/src/commands/hardis/packagexml/remove.ts new file mode 100644 index 000000000..61fb37214 --- /dev/null +++ b/src/commands/hardis/packagexml/remove.ts @@ -0,0 +1,82 @@ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { AnyJson } from "@salesforce/ts-types"; +import { removePackageXmlFilesContent } from "../../../common/utils/xmlUtils.js"; + +export class PackageXmlRemove extends SfCommand { + public static readonly description = ` +## Command Behavior + +**Removes metadata components from a \`package.xml\` file that are also present in another \`package.xml\` file (e.g., a \`destructiveChanges.xml\`).** + +This command is useful for refining your \`package.xml\` manifests by excluding components that are being deleted or are otherwise irrelevant for a specific deployment or retrieval. For example, you can use it to create a \`package.xml\` that only contains additions and modifications, by removing items listed in a \`destructiveChanges.xml\`. + +Key functionalities: + +- **Source \`package.xml\`:** The main \`package.xml\` file from which components will be removed (specified by \`--packagexml\`). Defaults to \`package.xml\`. +- **Filter \`package.xml\`:** The \`package.xml\` file containing the components to be removed from the source (specified by \`--removepackagexml\`). Defaults to \`destructiveChanges.xml\`. +- **Output File:** The path to the new \`package.xml\` file that will contain the filtered content (specified by \`--outputfile\`). +- **Removed Only Output:** The \`--removedonly\` flag allows you to generate a \`package.xml\` that contains *only* the items that were removed from the source \`package.xml\`. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Parsing:** It reads and parses the XML content of both the source \`package.xml\` and the filter \`package.xml\`. +- **Content Comparison and Filtering:** It compares the metadata types and members defined in both files. Components found in the filter \`package.xml\` are excluded from the output. +- **XML Building:** After filtering, it rebuilds the XML structure for the new \`package.xml\` file. +- **File Writing:** The newly constructed XML content is then written to the specified output file. +- **\`removePackageXmlFilesContent\` Utility:** The core logic for this operation is encapsulated within the \`removePackageXmlFilesContent\` utility function, which handles the parsing, filtering, and writing of the \`package.xml\` files. +
+`; + public static readonly examples = ["$ sf hardis packagexml:remove -p package.xml -r destructiveChanges.xml -o my-reduced-package.xml"]; + public static readonly requiresProject = false; + public static readonly flags: any = { + packagexml: Flags.string({ + char: 'p', + description: 'package.xml file to reduce' + }), + removepackagexml: Flags.string({ + char: 'r', + description: 'package.xml file to use to filter input package.xml' + }), + removedonly: Flags.boolean({ + char: 'z', + description: 'Use this flag to generate a package.xml with only removed items', + default: false + }), + outputfile: Flags.string({ + char: 'f', + description: 'package.xml output file', + required: true + }), + debug: Flags.boolean({ + default: false, + description: "debug", + }), + websocket: Flags.string({ + description: "websocket", + }), + }; + + protected packageXmlFile: string; + protected removePackageXmlFile: string; + protected removedOnly = false; + protected outputFile: string; + + public async run(): Promise { + const { flags } = await this.parse(PackageXmlRemove); + this.packageXmlFile = flags.packagexml || 'package.xml'; + this.removePackageXmlFile = flags.removepackagexml || 'destructiveChanges.xml'; + this.removedOnly = flags.removedonly || false; + this.outputFile = flags.outputfile; + + await removePackageXmlFilesContent( + this.packageXmlFile, + this.removePackageXmlFile, + { logFlag: flags.debug, outputXmlFile: this.outputFile, removedOnly: this.removedOnly } + ); + + return { outputPackageXmlFile: this.outputFile }; + } +} diff --git a/src/commands/hardis/project/audit/apiversion.ts b/src/commands/hardis/project/audit/apiversion.ts index 0ea22e32b..9b30a188b 100644 --- a/src/commands/hardis/project/audit/apiversion.ts +++ b/src/commands/hardis/project/audit/apiversion.ts @@ -1,162 +1,265 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as sortArray from "sort-array"; -import { catchMatches, generateReports, uxLog } from "../../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import sortArray from 'sort-array'; +import { catchMatches, generateReports, uxLog, uxLogTable } from '../../../../common/utils/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -export default class CallInCallOut extends SfdxCommand { - public static title = "Audit Metadatas API Version"; +export default class CallInCallOut extends SfCommand { + public static title = 'Audit Metadatas API Version'; - public static description = messages.getMessage("auditApiVersion"); + public static description = `This command identifies metadata with an apiVersion lower than the value specified in the --minimumapiversion parameter. - public static examples = ["$ sfdx hardis:project:audit:apiversion"]; + It can also update the apiVersion to a specific value: + - When --fix parameter is provided (updates to minimumapiversion) + - When --newapiversion is specified (updates to that version) + + Example to handle [ApexClass / Trigger & ApexPage mandatory version upgrade](https://help.salesforce.com/s/articleView?id=sf.admin_locales_update_api.htm&type=5) : + + \`sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45 --newapiversion 50\` + ` + + public static examples = [ + '$ sf hardis:project:audit:apiversion', + '$ sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45', + '$ sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45 --fix', + '$ sf hardis:project:audit:apiversion --metadatatype ApexClass,ApexTrigger,ApexPage --minimumapiversion 45 --newapiversion 50' + ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - minimumapiversion: flags.number({ - char: "m", - default: 20.0, - description: messages.getMessage("minimumApiVersion"), + public static flags: any = { + minimumapiversion: Flags.integer({ + char: 'm', + default: 20, + description: messages.getMessage('minimumApiVersion'), }), - failiferror: flags.boolean({ - char: "f", + failiferror: Flags.boolean({ + char: 'f', default: false, - description: messages.getMessage("failIfError"), + description: messages.getMessage('failIfError'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + metadatatype: Flags.string({ + description: 'Metadata Types to fix. Comma separated. Supported Metadata types: ApexClass, ApexTrigger, ApexPage' + }), + fix: Flags.boolean({ + // can't use "f", already use for failiferror + default: false, + description: 'Automatically update API versions in files that are below the minimum version threshold to match the minimum version', + }), + newapiversion: Flags.integer({ + char: 'n', + description: 'Define an API version value to apply when updating files', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; - - /* jscpd:ignore-end */ + public static requiresProject = true; protected matchResults: any[] = []; public async run(): Promise { - this.debug = this.flags.debug || false; - const minimumApiVersion = this.flags.minimumapiversion || false; - const failIfError = this.flags.failiferror || false; + const { flags } = await this.parse(CallInCallOut); + const minimumApiVersion = flags.minimumapiversion || false; + const failIfError = flags.failiferror || false; + const newApiVersion = flags.newapiversion; + // Apply fixes if either fix flag is present or a new API version is specified + const shouldFix = flags.fix || (newApiVersion !== undefined); + const fixApiVersion = newApiVersion || minimumApiVersion; + const metadataType = flags.metadatatype || ''; + + const fixAllowedExtensions = { + "ApexClass": "cls", + "ApexTrigger": "trigger", + "ApexPage": "page", + }; + const fixAllowedMetadataTypes = Object.keys(fixAllowedExtensions); + + const fixTargetedMetadataTypes = metadataType.trim() === '' ? [] : (metadataType || '').replace(/\s+/g, '').split(','); + const fixInvalidMetadataTypes = fixTargetedMetadataTypes.filter(value => !fixAllowedMetadataTypes.includes(value)); + if (fixTargetedMetadataTypes.length > 0 && fixInvalidMetadataTypes.length > 0 && shouldFix) { + uxLog( + "warning", + this, + c.yellow( + `[sfdx-hardis] WARNING: --fix Invalid Metadata Type(s) found: ${c.bold( + fixInvalidMetadataTypes.join(', ') + )}. Only ${c.bold( + fixAllowedMetadataTypes.join(', ') + )} Metadata Types are allowed for the fix.` + ) + ); + if (failIfError) { + throw new SfError( + c.red( + `[sfdx-hardis] WARNING: --fix Invalid Metadata Type(s) found: ${c.bold( + fixInvalidMetadataTypes.join(', ') + )}. Only ${c.bold( + fixAllowedMetadataTypes.join(', ') + )} Metadata Types are allowed for the fix.` + ) + ); + } + } + + // Metadata Type Extensions to fix + const fixTargetedMetadataTypesExtensions = fixTargetedMetadataTypes.map(type => fixAllowedExtensions[type]) + const fixTargetedMetadataTypesPattern = new RegExp(`\\.(${fixTargetedMetadataTypesExtensions.join('|')})-meta\\.xml$`); + + let pattern = '**/*.xml'; + if (fixTargetedMetadataTypes.length > 0) { + // Check if there's only one extension type + if (fixTargetedMetadataTypesExtensions.length === 1) { + pattern = `**/*.${fixTargetedMetadataTypesExtensions[0]}-meta.xml`; + } else { + pattern = `**/*.{${fixTargetedMetadataTypesExtensions.join(',')}}-meta.xml`; + } + } - const pattern = "**/*.xml"; const catchers = [ { - type: "apiVersion", - subType: "", + type: 'apiVersion', + subType: '', regex: /(.*?)<\/apiVersion>/gims, - detail: [{ name: "apiVersion", regex: /(.*?)<\/apiVersion>/gims }], + detail: [{ name: 'apiVersion', regex: /(.*?)<\/apiVersion>/gims }], + fixed: false, }, ]; - const xmlFiles = await glob(pattern); + const xmlFiles = await glob(pattern, { ignore: GLOB_IGNORE_PATTERNS }); this.matchResults = []; - uxLog(this, `Browsing ${xmlFiles.length} files`); - /* jscpd:ignore-start */ + uxLog("other", this, `Browsing ${xmlFiles.length} files`); // Loop in files for (const file of xmlFiles) { - const fileText = await fs.readFile(file, "utf8"); - // Loop on criteria to find matches in this file - for (const catcher of catchers) { - const catcherMatchResults = await catchMatches(catcher, file, fileText, this); - this.matchResults.push(...catcherMatchResults); + try { + const fileText = await fs.readFile(file, 'utf8'); + // Update ApiVersion on file + let fixed = false; + if (shouldFix && fixTargetedMetadataTypes.length > 0 && fixTargetedMetadataTypesPattern.test(file)) { + const apiVersionMatch = fileText.match(/(.*?)<\/apiVersion>/); + if (apiVersionMatch && apiVersionMatch[1]) { + const currentApiVersion = parseFloat(apiVersionMatch[1]); + if (currentApiVersion < minimumApiVersion) { + const updatedContent = fileText.replace(/(.*?)<\/apiVersion>/, `${fixApiVersion}.0`); + await fs.promises.writeFile(file, updatedContent, 'utf-8'); + fixed = true; + uxLog("other", this, `Updated apiVersion in file: ${file} from ${currentApiVersion}.0 to ${fixApiVersion}.0`); + } + } + } + // Loop on criteria to find matches in this file + for (const catcher of catchers) { + const catcherMatchResults = await catchMatches(catcher, file, fileText, this); + // Add the "fixed" flag + const enrichedResults = catcherMatchResults.map(result => ({ + ...result, + fixed, + })); + this.matchResults.push(...enrichedResults); + } + } catch (error) { + if (error instanceof Error) { + uxLog("warning", this, c.yellow(`Error processing file ${file}: ${error.message}`)); + } else { + uxLog("warning", this, c.yellow(`Error processing file ${file}: ${String(error)}`)); + } } } - /* jscpd:ignore-end */ - // Format result const result: any[] = this.matchResults.map((item: any) => { return { type: item.type, fileName: item.fileName, - nameSpace: item.fileName.includes("__") ? item.fileName.split("__")[0] : "Custom", - apiVersion: parseFloat(item.detail["apiVersion"]), - valid: parseFloat(item.detail["apiVersion"]) > minimumApiVersion ? "yes" : "no", + nameSpace: item.fileName.includes('__') ? item.fileName.split('__')[0] : 'Custom', + apiVersion: parseFloat(item.detail['apiVersion']), + valid: parseFloat(item.detail['apiVersion']) >= (minimumApiVersion || 100) ? 'yes' : 'no', + fixed: item.fixed ? 'yes' : 'no', }; }); - // Sort array const resultSorted = sortArray(result, { - by: ["type", "subType", "fileName"], - order: ["asc", "asc", "asc"], + by: ['type', 'subType', 'fileName'], + order: ['asc', 'asc', 'asc'], }); // Display as table + uxLog("action", this, c.cyan(`Found ${c.bold(resultSorted.length)} metadata files with API Version.`)); const resultsLight = JSON.parse(JSON.stringify(resultSorted)); - console.table( + uxLogTable(this, resultsLight.map((item: any) => { delete item.detail; return item; - }), + }) ); - // Generate output files - const columns = [ - { key: "type", header: "IN/OUT" }, - { key: "fileName", header: "Apex" }, - { key: "nameSpace", header: "Namespace" }, - { key: "apiVersion", header: "API Version" }, - { key: "valid", header: `Valid ( > ${minimumApiVersion} )` }, - ]; - const reportFiles = await generateReports(resultSorted, columns, this); - const numberOfInvalid = result.filter((res: any) => res.valid === "no").length; + const numberOfInvalid = result.filter((res: any) => res.valid === 'no').length; const numberOfValid = result.length - numberOfInvalid; - if (numberOfInvalid > 0) { uxLog( + "warning", this, c.yellow( - `[sfdx-hardis] WARNING: Your sources contain ${c.bold(numberOfInvalid)} metadata files with API Version lesser than ${c.bold( - minimumApiVersion, - )}`, - ), + `WARNING: Your sources contain ${c.bold( + numberOfInvalid + )} metadata files with API Version lesser than ${c.bold(minimumApiVersion)}` + ) ); if (failIfError) { - throw new SfdxError(c.red(`[sfdx-hardis][ERROR] ${c.bold(numberOfInvalid)} metadata files with wrong API version detected`)); + throw new SfError( + c.red(`${c.bold(numberOfInvalid)} metadata files with wrong API version detected`) + ); } } else { uxLog( + "success", this, c.green( - `[sfdx-hardis] SUCCESS: Your sources contain ${c.bold(numberOfValid)} metadata files with API Version superior to ${c.bold( - minimumApiVersion, - )}`, - ), + `SUCCESS: Your sources contain ${c.bold( + numberOfValid + )} metadata files with API Version superior to ${c.bold(minimumApiVersion)}` + ) ); } + // Generate output files + const columns = [ + { key: 'type', header: 'IN/OUT' }, + { key: 'fileName', header: 'Apex' }, + { key: 'nameSpace', header: 'Namespace' }, + { key: 'apiVersion', header: 'API Version' }, + { key: 'valid', header: `Valid ( > ${minimumApiVersion} )` }, + { key: 'fixed', header: 'Fixed' }, + ]; + const reportFiles = await generateReports(resultSorted, columns, this, { + logFileName: 'api-versions', + logLabel: 'Extract and Fix Metadata Api Versions', + }); + // Return an object to be displayed with --json return { - outputString: "Processed apiVersion audit", + outputString: 'Processed apiVersion audit', result: resultSorted, reportFiles, }; } + /* jscpd:ignore-end */ } diff --git a/src/commands/hardis/project/audit/callincallout.ts b/src/commands/hardis/project/audit/callincallout.ts index e30ae5906..5a46f39df 100644 --- a/src/commands/hardis/project/audit/callincallout.ts +++ b/src/commands/hardis/project/audit/callincallout.ts @@ -1,90 +1,108 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as sortArray from "sort-array"; -import { catchMatches, generateReports, uxLog } from "../../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import sortArray from 'sort-array'; +import { catchMatches, generateReports, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class CallInCallOut extends SfCommand { + public static title = 'Audit CallIns and CallOuts'; -export default class CallInCallOut extends SfdxCommand { - public static title = "Audit CallIns and CallOuts"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("auditCallInCallOut"); +**Audits Apex classes for inbound (Call-In) and outbound (Call-Out) API calls, providing insights into integration points.** - public static examples = ["$ sfdx hardis:project:audit:callouts"]; +This command helps developers and architects understand the integration landscape of their Salesforce project by identifying where Apex code interacts with external systems or exposes functionality for external consumption. It's useful for security reviews, refactoring efforts, and documenting system integrations. + +Key functionalities: + +- **Inbound Call Detection:** Identifies Apex methods exposed as web services (\`webservice static\`) or REST resources (\`@RestResource\`). +- **Outbound Call Detection:** Detects HTTP callouts (\`new HttpRequest\`). +- **Detailed Information:** Extracts relevant details for each detected call, such as endpoint URLs for outbound calls or resource names for inbound calls. +- **Test Class Exclusion:** Automatically skips test classes (\`@isTest\`) to focus on production code. +- **CSV Report Generation:** Generates a CSV report summarizing all detected call-ins and call-outs, including their type, subtype (protocol), file name, namespace, and extracted details. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses \`glob\` to find all Apex class (\`.cls\`) and trigger (\`.trigger\`) files within the project. +- **Content Analysis:** Reads the content of each Apex file and uses regular expressions to identify patterns indicative of inbound or outbound calls. +- **Pattern Matching:** Defines a set of \`catchers\`, each with a \`type\` (INBOUND/OUTBOUND), \`subType\` (SOAP/REST/HTTP), and \`regex\` to match specific API call patterns. It also includes \`detail\` regexes to extract additional information. +- **\`catchMatches\` Utility:** This utility function is used to apply the defined \`catchers\` to each Apex file and extract all matching occurrences. +- **Data Structuring:** Organizes the extracted information into a structured format, including the file name, namespace, and detailed matches. +- **Reporting:** Uses \`generateReports\` to create a CSV report and display a table in the console, summarizing the audit findings. +- **Filtering:** Filters out files that start with 'hidden' or contain \`@isTest\` to focus on relevant code. +
+`; + + public static examples = ['$ sf hardis:project:audit:callouts']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { + public static flags: any = { // flag with a value (-n, --name=VALUE) - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ protected matchResults: any[] = []; public async run(): Promise { - this.debug = this.flags.debug || false; - - const pattern = "**/*.{cls,trigger}"; + const pattern = '**/*.{cls,trigger}'; const catchers = [ { - type: "INBOUND", - subType: "SOAP", + type: 'INBOUND', + subType: 'SOAP', regex: /webservice static/gim, - detail: [{ name: "webServiceName", regex: /webservice static (.*?){/gims }], + detail: [{ name: 'webServiceName', regex: /webservice static (.*?){/gims }], }, { - type: "INBOUND", - subType: "REST", + type: 'INBOUND', + subType: 'REST', regex: /@RestResource/gim, - detail: [{ name: "restResource", regex: /@RestResource\((.*?)\)/gims }], + detail: [{ name: 'restResource', regex: /@RestResource\((.*?)\)/gims }], }, { - type: "OUTBOUND", - subType: "HTTP", + type: 'OUTBOUND', + subType: 'HTTP', regex: /new HttpRequest/gim, detail: [ - { name: "endPoint", regex: /setEndpoint\((.*?);/gims }, - { name: "action", regex: /<[A-Za-z0-9_-]*:(.*?)>/gims }, + { name: 'endPoint', regex: /setEndpoint\((.*?);/gims }, + { name: 'action', regex: /<[A-Za-z0-9_-]*:(.*?)>/gims }, ], }, ]; - const apexFiles = await glob(pattern); + const apexFiles = await glob(pattern, { ignore: GLOB_IGNORE_PATTERNS }); this.matchResults = []; - uxLog(this, `Browsing ${apexFiles.length} files`); + uxLog("other", this, `Browsing ${apexFiles.length} files`); // Loop in files for (const file of apexFiles) { - const fileText = await fs.readFile(file, "utf8"); - if (fileText.startsWith("hidden") || fileText.includes("@isTest")) { + const fileText = await fs.readFile(file, 'utf8'); + if (fileText.startsWith('hidden') || fileText.includes('@isTest')) { continue; } // Loop on criteria to find matches in this file @@ -100,56 +118,60 @@ export default class CallInCallOut extends SfdxCommand { type: item.type, subType: item.subType, fileName: item.fileName, - nameSpace: item.fileName.includes("__") ? item.fileName.split("__")[0] : "Custom", + nameSpace: item.fileName.includes('__') ? item.fileName.split('__')[0] : 'Custom', matches: item.matches, detail: Object.keys(item.detail) .map( (key: string) => key + - ": " + + ': ' + item.detail[key] .map( (extractedText: string) => extractedText - .replace(/(\r\n|\n|\r)/gm, "") // Remove new lines from result - .replace(/\s+/g, " "), // Replace multiple whitespaces by single whitespaces + .replace(/(\r\n|\n|\r)/gm, '') // Remove new lines from result + .replace(/\s+/g, ' ') // Replace multiple whitespaces by single whitespaces ) - .join(" | "), + .join(' | ') ) - .join(" || ") || "", + .join(' || ') || '', }; }); // Sort array const resultSorted = sortArray(result, { - by: ["type", "subType", "fileName", "matches"], - order: ["asc", "asc", "asc", "desc"], + by: ['type', 'subType', 'fileName', 'matches'], + order: ['asc', 'asc', 'asc', 'desc'], }); // Display as table const resultsLight = JSON.parse(JSON.stringify(resultSorted)); - console.table( + uxLog("action", this, c.cyan(`Found ${c.bold(resultsLight.length)} call-ins and call-outs.`)); + uxLogTable(this, resultsLight.map((item: any) => { delete item.detail; return item; - }), + }) ); // Generate output files const columns = [ - { key: "type", header: "IN/OUT" }, - { key: "subType", header: "Protocol" }, - { key: "fileName", header: "Apex" }, - { key: "nameSpace", header: "Namespace" }, - { key: "matches", header: "Number" }, - { key: "detail", header: "Detail" }, + { key: 'type', header: 'IN/OUT' }, + { key: 'subType', header: 'Protocol' }, + { key: 'fileName', header: 'Apex' }, + { key: 'nameSpace', header: 'Namespace' }, + { key: 'matches', header: 'Number' }, + { key: 'detail', header: 'Detail' }, ]; - const reportFiles = await generateReports(resultSorted, columns, this); + const reportFiles = await generateReports(resultSorted, columns, this, { + logFileName: 'callins-callouts-audit', + logLabel: 'CallIns and CallOuts Audit', + }); // Return an object to be displayed with --json return { - outputString: "Processed callIns and callOuts audit", + outputString: 'Processed callIns and callOuts audit', result: resultSorted, reportFiles, }; diff --git a/src/commands/hardis/project/audit/duplicatefiles.ts b/src/commands/hardis/project/audit/duplicatefiles.ts index 92d8ed1c2..cf60dc4aa 100644 --- a/src/commands/hardis/project/audit/duplicatefiles.ts +++ b/src/commands/hardis/project/audit/duplicatefiles.ts @@ -1,70 +1,92 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as readFilesRecursive from "fs-readdir-recursive"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import readFilesRecursive from 'fs-readdir-recursive'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class AuditDuplicateFiles extends SfCommand { + public static title = 'Find duplicate sfdx files'; -export default class AuditDuplicateFiles extends SfdxCommand { - public static title = "Find duplicate sfdx files"; + public static description = ` +## Command Behavior - public static description = "Find duplicate files in sfdx folder (often from past @salesforce/cli bugs)"; +**Identifies and reports on duplicate file names within your Salesforce DX project folder.** - public static examples = ["$ sfdx hardis:project:audit:duplicatefiles"]; +This command helps detect instances where files with the same name exist in different directories within your SFDX project. While some duplicates are expected (e.g., metadata files for different components of the same object), others can be a result of past Salesforce CLI bugs or improper source control practices, leading to confusion and potential deployment issues. - protected static flagsConfig = { - path: flags.string({ - char: "p", +Key functionalities: + +- **File Scan:** Recursively scans a specified root path (defaults to the current working directory) for all files. +- **Duplicate Detection:** Identifies files that share the same name but reside in different locations. +- **Intelligent Filtering:** Accounts for known patterns where duplicate file names are legitimate (e.g., \`field-meta.xml\`, \`listView-meta.xml\`, \`recordType-meta.xml\`, \`webLink-meta.xml\` files within object subdirectories). +- **Reporting:** Outputs a JSON object detailing the detected duplicates, including the file name and the full paths of its occurrences. + +
+Technical explanations + +The command's technical implementation involves: + +- **File System Traversal:** Uses \`fs-readdir-recursive\` to list all files within the specified directory, excluding \`node_modules\`. +- **Duplicate Logic:** Iterates through the list of all files and compares their base names. If two files have the same base name but different full paths, they are considered potential duplicates. +- **Exclusion Logic:** The \`checkDoublingAllowed\` function contains regular expressions to identify specific file path patterns where duplicate names are acceptable (e.g., \`objects/Account/fields/MyField__c.field-meta.xml\` and \`objects/Contact/fields/MyField__c.field-meta.xml\`). This prevents false positives. +- **Data Structuring:** Organizes the results into a JavaScript object where keys are duplicate file names and values are arrays of their full paths. +
+`; + + public static examples = ['$ sf hardis:project:audit:duplicatefiles']; + + public static flags: any = { + path: Flags.string({ + char: 'p', default: process.cwd(), - description: "Root path to check", + description: 'Root path to check', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - protected static requiresProject = false; + public static requiresProject = false; /* jscpd:ignore-end */ protected matchResults: any[] = []; public async run(): Promise { - const pathToBrowser = this.flags.path || process.cwd(); - this.debug = this.flags.debug || false; + const { flags } = await this.parse(AuditDuplicateFiles); + const pathToBrowser = flags.path || process.cwd(); // List all files const allFiles = readFilesRecursive(pathToBrowser) - .filter((file) => !file.includes("node_modules")) + .filter((file) => !file.includes('node_modules')) .map((file) => { return { fullPath: file, fileName: path.basename(file) }; }); - uxLog(this, c.cyan(`Checking for duplicate file names in ${c.bold(pathToBrowser)}. Files: ${c.bold(allFiles.length)}`)); + uxLog( + "action", + this, + c.cyan(`Checking for duplicate file names in ${c.bold(pathToBrowser)}. Files: ${c.bold(allFiles.length)}`) + ); // Find duplicates - const duplicates = {}; + const duplicates: Record = {}; for (const file of allFiles) { const doublingFiles = allFiles.filter( - (f) => f.fileName === file.fileName && f.fullPath !== file.fullPath && !this.checkDoublingAllowed(file, f), + (f) => f.fileName === file.fileName && f.fullPath !== file.fullPath && !this.checkDoublingAllowed(file, f) ); if (doublingFiles.length > 0) { const doublingFullPaths = duplicates[file.fileName] || []; @@ -72,7 +94,22 @@ export default class AuditDuplicateFiles extends SfdxCommand { duplicates[file.fileName] = doublingFullPaths; } } - uxLog(this, JSON.stringify(duplicates, null, 2)); + // Build summary + const duplicateCount = Object.keys(duplicates).length; + if (duplicateCount > 0) { + const duplicateList = Object.entries(duplicates) + .map(([fileName, paths]) => `${c.bold(fileName)}:\n - ${paths.join('\n - ')}`) + .join('\n'); + uxLog( + "action", + this, + c.cyan(`Found ${c.bold(duplicateCount)} duplicate file names in ${c.bold(pathToBrowser)}.`) + ); + uxLog("warning", this, c.yellow(`Duplicate files:\n${duplicateList}`)); + } + else { + uxLog("action", this, c.cyan(`No duplicate file names found in ${c.bold(pathToBrowser)}.`)); + } return { duplicates: duplicates }; } diff --git a/src/commands/hardis/project/audit/remotesites.ts b/src/commands/hardis/project/audit/remotesites.ts index 9084d3635..ea0045e08 100644 --- a/src/commands/hardis/project/audit/remotesites.ts +++ b/src/commands/hardis/project/audit/remotesites.ts @@ -1,83 +1,99 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as psl from "psl"; -import * as sortArray from "sort-array"; -import * as url from "url"; -import { catchMatches, generateReports, uxLog } from "../../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import fs from 'fs-extra'; +import c from 'chalk'; +import { glob } from 'glob'; +import * as psl from 'psl'; +import sortArray from 'sort-array'; +import * as url from 'url'; +import { catchMatches, generateReports, uxLog, uxLogTable } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class RemoteSites extends SfCommand { + public static title = 'Audit Remote Sites'; -export default class RemoteSites extends SfdxCommand { - public static title = "Audit Remote Sites"; + public static description: string = ` +## Command Behavior - public static description = messages.getMessage("auditRemoteSites"); +**Audits Salesforce Remote Site Settings in your project, providing a comprehensive overview of external endpoints accessed by your Salesforce org.** - public static examples = ["$ sfdx hardis:project:audit:remotesites"]; +This command is crucial for security reviews, compliance checks, and understanding the external integrations of your Salesforce environment. It helps identify all configured remote sites, their URLs, activity status, and associated protocols. + +Key functionalities: + +- **Remote Site Discovery:** Scans your project for RemoteSiteSetting metadata files (.remoteSite-meta.xml or .remoteSite). +- **URL Extraction:** Extracts the URL, active status, and description for each remote site. +- **Protocol and Domain Identification:** Determines the protocol (HTTP/HTTPS) and extracts the domain from each URL, providing a clearer picture of the external systems being accessed. +- **Reporting:** Generates a CSV report summarizing all detected remote sites, including their protocol, domain, name, URL, active status, and description. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses \`glob\` to find all RemoteSiteSetting metadata files within the project. +- **Content Analysis:** Reads the content of each XML file and uses regular expressions (/(.*?)<\\/url>/gim, /(.*?)<\\/isActive>/gim, /(.*?)<\\/description>/gim) to extract relevant details. +- **\`catchMatches\` Utility:** This utility function is used to apply the defined regular expressions to each file and extract all matching occurrences. +- **URL Parsing:** Uses Node.js's \`url\` module to parse the extracted URLs and \`psl\` (Public Suffix List) to extract the domain name from the hostname. +- **Data Structuring:** Organizes the extracted information into a structured format, including the remote site's name, file name, namespace, URL, active status, description, protocol, and domain. +- **Reporting:** Uses \`generateReports\` to create a CSV report and display a table in the console, summarizing the audit findings. +
+`; + + public static examples = ['$ sf hardis:project:audit:remotesites']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { + public static flags: any = { // flag with a value (-n, --name=VALUE) - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ protected matchResults: any[] = []; public async run(): Promise { - this.debug = this.flags.debug || false; - - const pattern = "**/*.{remoteSite-meta.xml,remoteSite}"; + const pattern = '**/*.{remoteSite-meta.xml,remoteSite}'; const catchers = [ { - type: "", - subType: "", + type: '', + subType: '', regex: /(.*?)<\/url>/gim, detail: [ - { name: "url", regex: /(.*?)<\/url>/gims }, - { name: "active", regex: /(.*?)<\/isActive>/gims }, + { name: 'url', regex: /(.*?)<\/url>/gims }, + { name: 'active', regex: /(.*?)<\/isActive>/gims }, { - name: "description", + name: 'description', regex: /(.*?)<\/description>/gimsu, }, ], }, ]; - const remoteSiteSettingsFiles = await glob(pattern); + const remoteSiteSettingsFiles = await glob(pattern, { ignore: GLOB_IGNORE_PATTERNS }); this.matchResults = []; - uxLog(this, `Browsing ${remoteSiteSettingsFiles.length} files`); + uxLog("other", this, `Browsing ${remoteSiteSettingsFiles.length} files`); // Loop in files for (const file of remoteSiteSettingsFiles) { - const fileText = await fs.readFile(file, "utf8"); + const fileText = await fs.readFile(file, 'utf8'); // Loop on criteria to find matches in this file for (const catcher of catchers) { const catcherMatchResults = await catchMatches(catcher, file, fileText, this); @@ -88,49 +104,53 @@ export default class RemoteSites extends SfdxCommand { // Format result const result: any[] = this.matchResults.map((item: any) => { return { - name: item.fileName.replace(".remoteSite-meta.xml", "").replace(".remoteSite", ""), + name: item.fileName.replace('.remoteSite-meta.xml', '').replace('.remoteSite', ''), fileName: item.fileName, - nameSpace: item.fileName.includes("__") ? item.fileName.split("__")[0] : "Custom", + nameSpace: item.fileName.includes('__') ? item.fileName.split('__')[0] : 'Custom', matches: item.matches, - url: item.detail?.url ? item.detail.url[0] : "", - active: item.detail?.active ? "yes" : "no", - description: item.detail?.description ? item.detail.description[0] : "", - protocol: item.detail.url[0].includes("https") ? "HTTPS" : "HTTP", - domain: psl.parse(new url.URL(item.detail.url[0]).hostname).domain, + url: item.detail?.url ? item.detail.url[0] : '', + active: item.detail?.active ? 'yes' : 'no', + description: item.detail?.description ? item.detail.description[0] : '', + protocol: item.detail.url[0].includes('https') ? 'HTTPS' : 'HTTP', + domain: (psl.parse(new url.URL(item.detail.url[0]).hostname) as any)?.domain || 'Domain not found', }; }); // Sort array const resultSorted = sortArray(result, { - by: ["protocol", "domain", "name", "active", "description"], - order: ["asc", "asc", "asc", "desc", "asc"], + by: ['protocol', 'domain', 'name', 'active', 'description'], + order: ['asc', 'asc', 'asc', 'desc', 'asc'], }); // Display as table + uxLog("action", this, c.cyan(`Found ${c.bold(resultSorted.length)} remote sites.`)); const resultsLight = JSON.parse(JSON.stringify(resultSorted)); - console.table( + uxLogTable(this, resultsLight.map((item: any) => { delete item.fileName; delete item.detail; delete item.matches; return item; - }), + }) ); // Export into csv & excel file const columns = [ - { key: "protocol", header: "Protocol" }, - { key: "domain", header: "Domain" }, - { key: "name", header: "Name" }, - { key: "url", header: "URL" }, - { key: "active", header: "Active" }, - { key: "description", header: "Description" }, + { key: 'protocol', header: 'Protocol' }, + { key: 'domain', header: 'Domain' }, + { key: 'name', header: 'Name' }, + { key: 'url', header: 'URL' }, + { key: 'active', header: 'Active' }, + { key: 'description', header: 'Description' }, ]; - const reportFiles = await generateReports(resultSorted, columns, this); + const reportFiles = await generateReports(resultSorted, columns, this, { + logFileName: 'remote-sites-audit', + logLabel: 'Remote Sites Audit', + }); // Return an object to be displayed with --json return { - outputString: "Processed callIns and callOuts audit", + outputString: 'Processed callIns and callOuts audit', result: resultSorted, reportFiles, }; diff --git a/src/commands/hardis/project/clean/emptyitems.ts b/src/commands/hardis/project/clean/emptyitems.ts index 86d98f763..3e59452d3 100644 --- a/src/commands/hardis/project/clean/emptyitems.ts +++ b/src/commands/hardis/project/clean/emptyitems.ts @@ -1,82 +1,103 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; -import { parseXmlFile } from "../../../../common/utils/xmlUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class CleanEmptyItems extends SfdxCommand { - public static title = "Clean retrieved empty items in dx sources"; - - public static description = "Remove unwanted empty items within sfdx project sources"; - - public static examples = ["$ sfdx hardis:project:clean:emptyitems"]; - - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { parseXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CleanEmptyItems extends SfCommand { + public static title = 'Clean retrieved empty items in dx sources'; + + public static description: string = ` +## Command Behavior + +**Removes empty or irrelevant metadata items from your Salesforce DX project sources.** + +This command helps maintain a clean and efficient Salesforce codebase by deleting metadata files that are essentially empty or contain no meaningful configuration. These files can sometimes be generated during retrieval processes or remain after refactoring, contributing to unnecessary clutter in your project. + +Key functionalities: + +- **Targeted Cleaning:** Specifically targets and removes empty instances of: + - Global Value Set Translations (\`.globalValueSetTranslation-meta.xml\`) + - Standard Value Sets (\`.standardValueSet-meta.xml\`) + - Sharing Rules (\`.sharingRules-meta.xml\`) +- **Content-Based Deletion:** It checks the XML content of these files for the presence of specific tags (e.g., \`valueTranslation\` for Global Value Set Translations) to determine if they are truly empty or lack relevant data. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses \`glob\` to find files matching predefined patterns for Global Value Set Translations, Standard Value Sets, and Sharing Rules within the specified root folder (defaults to \`force-app\`). +- **XML Parsing:** For each matching file, it reads and parses the XML content using \`parseXmlFile\`. +- **Content Validation:** It then checks the parsed XML object for the existence of specific nested properties (e.g., \`xmlContent.GlobalValueSetTranslation.valueTranslation\`). If these properties are missing or empty, the file is considered empty. +- **File Deletion:** If a file is determined to be empty, it is removed from the file system using \`fs.remove\`. +- **Logging:** Provides clear messages about which files are being removed and a summary of the total number of items cleaned. +
+`; + + public static examples = ['$ sf hardis:project:clean:emptyitems']; + + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; protected debugMode = false; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanEmptyItems); + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Removing empty dx managed source files`)); + uxLog("action", this, c.cyan(`Removing empty dx managed source files`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const emptyConstraints = [ - { globPattern: `/**/*.globalValueSetTranslation-meta.xml`, tags: ["GlobalValueSetTranslation", "valueTranslation"] }, - { globPattern: `/**/*.standardValueSet-meta.xml`, tags: ["StandardValueSet", "standardValue"] }, - { globPattern: `/**/*.sharingRules-meta.xml`, tags: ["SharingRules", "sharingOwnerRules"] }, + { + globPattern: `/**/*.globalValueSetTranslation-meta.xml`, + tags: ['GlobalValueSetTranslation', 'valueTranslation'], + }, + { globPattern: `/**/*.standardValueSet-meta.xml`, tags: ['StandardValueSet', 'standardValue'] }, + { globPattern: `/**/*.sharingRules-meta.xml`, tags: ['SharingRules', 'sharingOwnerRules'] }, ]; let counter = 0; for (const emptyConstraint of emptyConstraints) { const findStandardValueSetPattern = rootFolder + emptyConstraint.globPattern; - const matchingCustomFiles = await glob(findStandardValueSetPattern, { cwd: process.cwd() }); + const matchingCustomFiles = await glob(findStandardValueSetPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); for (const matchingCustomFile of matchingCustomFiles) { const xmlContent = await parseXmlFile(matchingCustomFile); const tag1 = xmlContent[emptyConstraint.tags[0]]; if (!(tag1 && tag1[emptyConstraint.tags[1]])) { await fs.remove(matchingCustomFile); - uxLog(this, c.cyan(`Removed empty item ${c.yellow(matchingCustomFile)}`)); + uxLog("action", this, c.cyan(`Removed empty item ${c.yellow(matchingCustomFile)}`)); counter++; } } @@ -84,7 +105,7 @@ export default class CleanEmptyItems extends SfdxCommand { // Summary const msg = `Removed ${c.green(c.bold(counter))} hidden source items`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg }; } diff --git a/src/commands/hardis/project/clean/filter-xml-content.ts b/src/commands/hardis/project/clean/filter-xml-content.ts new file mode 100644 index 000000000..39af4aa63 --- /dev/null +++ b/src/commands/hardis/project/clean/filter-xml-content.ts @@ -0,0 +1,219 @@ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import * as util from 'util'; +import * as xml2js from 'xml2js'; +import { AnyJson } from '@salesforce/ts-types'; +import { uxLog } from '../../../../common/utils/index.js'; +import { writeXmlFile } from '../../../../common/utils/xmlUtils.js'; + +// The code of this method is awful... it's migrated from sfdx-essentials, written when async / await were not existing ^^ +export class FilterXmlContent extends SfCommand { + public static readonly description = ` +## Command Behavior + +**Filters the content of Salesforce metadata XML files to remove specific elements, enabling more granular deployments.** + +This command addresses a common challenge in Salesforce development: deploying only a subset of metadata from an XML file when the target org might not support all elements or when certain elements are not desired. It allows you to define rules in a JSON configuration file to remove unwanted XML nodes. + +Key functionalities: + +- **Configurable Filtering:** Uses a JSON configuration file (e.g., \`filter-config.json\`) to define which XML elements to remove. This configuration specifies the XML tags to target and the values within those tags that should trigger removal. +- **Targeted File Processing:** Processes XML files within a specified input folder (defaults to current directory) and writes the filtered content to an output folder. +- **Example Use Cases:** Useful for scenarios like: + - Removing references to features not enabled in the target org. + - Stripping out specific profile permissions or field-level security settings. + - Cleaning up metadata that is not relevant to a particular deployment. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** Reads the \`filter-config.json\` file, which contains an array of \`filters\`. Each filter defines a \`name\`, \`description\`, \`folders\` (where to apply the filter), \`file_extensions\`, and an \`exclude_list\`. +- **File System Operations:** Copies the input folder to an output folder (if different) to avoid modifying original files directly. It then iterates through the files in the output folder that match the specified file extensions. +- **XML Parsing and Manipulation:** For each matching XML file: + - It uses \`xml2js.Parser\` to parse the XML content into a JavaScript object. + - It recursively traverses the JavaScript object, applying the \`filterElement\` function. + - The \`filterElement\` function checks for \`type_tag\` and \`identifier_tag\` defined in the \`exclude_list\`. If a match is found and the value is in the \`excludeDef.values\`, the element is removed from the XML structure. + - After filtering, it uses \`writeXmlFile\` to write the modified JavaScript object back to the XML file. +- **Logging:** Provides detailed logs about the filtering process, including which files are being processed and which elements are being filtered. +- **Summary Reporting:** Tracks and reports on the files that have been updated due to filtering. +
+`; + public static readonly examples = [ + 'sf hardis:project:clean:filter-xml-content -i "./mdapi_output"', + 'sf hardis:project:clean:filter-xml-content -i "retrieveUnpackaged"', + ]; + public static readonly requiresProject = true; + public static readonly flags: any = { + configfile: Flags.string({ + char: 'c', + description: 'Config JSON file path', + }), + inputfolder: Flags.string({ + char: 'i', + description: 'Input folder (default: "." )', + }), + outputfolder: Flags.string({ + char: 'f', + description: 'Output folder (default: parentFolder + _xml_content_filtered)', + }), + debug: Flags.boolean({ + default: false, + description: 'debug', + }), + websocket: Flags.string({ + description: 'websocket', + }), + }; + + // Input params properties + public configFile: string; + public inputFolder: string; + public outputFolder: string; + + // Internal properties + public smmryUpdatedFiles = {}; + public smmryResult = { filterResults: {} }; + + public async run(): Promise { + const { flags } = await this.parse(FilterXmlContent); + this.configFile = flags.configfile || './filter-config.json'; + this.inputFolder = flags.inputfolder || '.'; + this.outputFolder = + flags.outputfolder || + './' + path.dirname(this.inputFolder) + '/' + path.basename(this.inputFolder) + '_xml_content_filtered'; + uxLog( + "log", + this, + c.grey( + `Initialize XML content filtering of ${this.inputFolder}, using ${c.bold(this.configFile)} , into ${this.outputFolder + }` + ) + ); + // Read json config file + const filterConfig = fs.readJsonSync(this.configFile); + if (flags.debug) { + uxLog("log", this, c.grey('Filtering config file content:\n' + JSON.stringify(filterConfig, null, 2))); + } + + // Create output folder/empty it if existing + if (fs.existsSync(this.outputFolder) && this.outputFolder !== this.inputFolder) { + uxLog("log", this, c.grey('Empty output folder ' + this.outputFolder)); + fs.emptyDirSync(this.outputFolder); + } else if (!fs.existsSync(this.outputFolder)) { + uxLog("log", this, c.grey('Create output folder ' + this.outputFolder)); + fs.mkdirSync(this.outputFolder); + } + + // Copy input folder to output folder + if (this.outputFolder !== this.inputFolder) { + uxLog("other", this, 'Copy in output folder ' + this.outputFolder); + fs.copySync(this.inputFolder, this.outputFolder); + } + + // Browse filters + filterConfig.filters.forEach((filter) => { + uxLog("log", this, c.grey(filter.name + ' (' + filter.description + ')...')); + // Browse filter folders + filter.folders.forEach((filterFolder) => { + // Browse folder files + if (!fs.existsSync(this.outputFolder + '/' + filterFolder)) { + return; + } + const folderFiles = fs.readdirSync(this.outputFolder + '/' + filterFolder); + folderFiles.forEach((file) => { + // Build file name + const fpath = file.replace(/\\/g, '/'); + const browsedFileExtension = fpath.substring(fpath.lastIndexOf('.') + 1); + filter.file_extensions.forEach((filterFileExt) => { + if (browsedFileExtension === filterFileExt) { + // Found a matching file, process it + const fullFilePath = this.outputFolder + '/' + filterFolder + '/' + fpath; + uxLog("log", this, c.grey('- ' + fullFilePath)); + this.filterXmlFromFile(filter, fullFilePath); + } + }); + }); + }); + }); + this.smmryResult.filterResults = this.smmryUpdatedFiles; + + // Display results as JSON + uxLog("log", this, c.grey('Filtering results:' + JSON.stringify(this.smmryResult))); + return {}; + } + + // Filter XML content of the file + public filterXmlFromFile(filter, file) { + const parser = new xml2js.Parser(); + const data = fs.readFileSync(file); + parser.parseString(data, (err2, fileXmlContent) => { + uxLog("other", this, 'Parsed XML \n' + util.inspect(fileXmlContent, false, null)); + Object.keys(fileXmlContent).forEach((eltKey) => { + fileXmlContent[eltKey] = this.filterElement(fileXmlContent[eltKey], filter, file); + }); + if (this.smmryUpdatedFiles[file] != null && this.smmryUpdatedFiles[file].updated === true) { + writeXmlFile(file, fileXmlContent); + uxLog("log", this, 'Updated ' + file); + } + }); + } + + public filterElement(elementValue, filter, file) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + // Object case + if (typeof elementValue === 'object') { + Object.keys(elementValue).forEach((eltKey) => { + let found = false; + // Browse filter exclude_list for elementValue + filter.exclude_list.forEach((excludeDef) => { + if (excludeDef.type_tag === eltKey) { + // Found matching type tag + found = true; + uxLog("other", this, '\nFound type: ' + eltKey); + uxLog("other", this, elementValue[eltKey]); + // Filter type values + const typeValues = elementValue[eltKey]; + const newTypeValues: any[] = []; + typeValues.forEach((typeItem) => { + // If identifier tag not found, do not filter and avoid crash + if ( + typeItem[excludeDef.identifier_tag] && + (excludeDef.values.includes(typeItem[excludeDef.identifier_tag]) || + excludeDef.values.includes(typeItem[excludeDef.identifier_tag][0])) + ) { + uxLog("other", this, '----- filtered ' + typeItem[excludeDef.identifier_tag]); + if (self.smmryUpdatedFiles[file] == null) { + self.smmryUpdatedFiles[file] = { updated: true, excluded: {} }; + } + if (self.smmryUpdatedFiles[file].excluded[excludeDef.type_tag] == null) { + self.smmryUpdatedFiles[file].excluded[excludeDef.type_tag] = []; + } + self.smmryUpdatedFiles[file].excluded[excludeDef.type_tag].push(typeItem[excludeDef.identifier_tag][0]); + } else { + uxLog("other", this, '--- kept ' + typeItem[excludeDef.identifier_tag]); + newTypeValues.push(typeItem); + } + }); + elementValue[eltKey] = newTypeValues; + } + }); + if (!found) { + elementValue[eltKey] = self.filterElement(elementValue[eltKey], filter, file); + } + }); + } else if (Array.isArray(elementValue)) { + const newElementValue: any[] = []; + elementValue.forEach((element) => { + element = self.filterElement(element, filter, file); + newElementValue.push(element); + }); + elementValue = newElementValue; + } + return elementValue; + } +} diff --git a/src/commands/hardis/project/clean/flowpositions.ts b/src/commands/hardis/project/clean/flowpositions.ts index 7fffc6c98..3b78906ea 100644 --- a/src/commands/hardis/project/clean/flowpositions.ts +++ b/src/commands/hardis/project/clean/flowpositions.ts @@ -1,22 +1,19 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { glob } from "glob"; -import * as path from "path"; -import * as fs from "fs-extra"; -import { uxLog } from "../../../../common/utils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class CleanListViews extends SfdxCommand { - public static title = "Clean Flow Positions"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { glob } from 'glob'; +import * as path from 'path'; +import fs from 'fs-extra'; +import { uxLog } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CleanFlowPositions extends SfCommand { + public static title = 'Clean Flow Positions'; public static description = `Replace all positions in Auto-Layout Flows by 0 to simplify conflicts management @@ -47,66 +44,61 @@ autoCleanTypes: \`\`\` `; - public static examples = ["$ sfdx hardis:project:clean:flowpositions"]; + public static examples = ['$ sf hardis:project:clean:flowpositions']; - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; protected debugMode = false; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanFlowPositions); + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Setting flows as Auto Layout and remove positions...`)); + uxLog("action", this, c.cyan(`Setting flows as Auto Layout and remove positions...`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const findManagedPattern = rootFolder + `/**/*.flow-meta.xml`; - const matchingFlows = await glob(findManagedPattern, { cwd: process.cwd() }); + const matchingFlows = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); let counter = 0; for (const flowMetadataFile of matchingFlows) { - const flowXml = await fs.readFile(flowMetadataFile, "utf8"); - if (flowXml.includes("AUTO_LAYOUT_CANVAS")) { - let updatedFlowXml = flowXml.replace(/([0-9]*)<\/locationX>/gm, "0"); - updatedFlowXml = updatedFlowXml.replace(/([0-9]*)<\/locationY>/gm, "0"); + const flowXml = await fs.readFile(flowMetadataFile, 'utf8'); + if (flowXml.includes('AUTO_LAYOUT_CANVAS')) { + let updatedFlowXml = flowXml.replace(/([0-9]*)<\/locationX>/gm, '0'); + updatedFlowXml = updatedFlowXml.replace(/([0-9]*)<\/locationY>/gm, '0'); if (updatedFlowXml !== flowXml) { await fs.writeFile(flowMetadataFile, updatedFlowXml); counter++; - uxLog(this, c.grey(`Removed positions from Flow ${flowMetadataFile}`)); + uxLog("log", this, c.grey(`Removed positions from Flow ${flowMetadataFile}`)); } } } // Summary const msg = `Updated ${c.green(c.bold(counter))} flows to remove positions`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg }; } diff --git a/src/commands/hardis/project/clean/hiddenitems.ts b/src/commands/hardis/project/clean/hiddenitems.ts index f050deb88..9f627368d 100644 --- a/src/commands/hardis/project/clean/hiddenitems.ts +++ b/src/commands/hardis/project/clean/hiddenitems.ts @@ -1,87 +1,106 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class CleanHiddenItems extends SfdxCommand { - public static title = "Clean retrieved hidden items in dx sources"; - - public static description = "Remove unwanted hidden items within sfdx project sources"; - - public static examples = ["$ sfdx hardis:project:clean:hiddenitems"]; - - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CleanHiddenItems extends SfCommand { + public static title = 'Clean retrieved hidden items in dx sources'; + + public static description: string = ` +## Command Behavior + +**Removes hidden or temporary metadata items from your Salesforce DX project sources.** + +This command helps clean up your local Salesforce project by deleting files that are marked as hidden or are temporary artifacts. These files can sometimes be generated by Salesforce CLI or other tools and are not intended to be part of your version-controlled source. + +Key functionalities: + +- **Targeted File Scan:** Scans for files with specific extensions (\`.app\`, \`.cmp\`, \`.evt\`, \`.tokens\`, \`.html\`, \`.css\`, \`.js\`, \`.xml\`) within the specified root folder (defaults to \`force-app\`). +- **Hidden Content Detection:** Identifies files whose content starts with (hidden). This is a convention used by some Salesforce tools to mark temporary or internal files. +- **Component Folder Removal:** If a hidden file is part of a Lightning Web Component (LWC) or Aura component folder, the entire component folder is removed to ensure a complete cleanup. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses \`glob\` to find files matching the specified patterns within the \`folder\`. +- **Content Reading:** Reads the content of each file. +- **Hidden Marker Check:** Checks if the file content starts with the literal string (hidden). +- **Folder or File Removal:** If a file is identified as hidden: + - If it's within an lwc or aura component folder, the entire component folder is removed using \`fs.remove\`. + - Otherwise, only the individual file is removed. +- **Logging:** Provides clear messages about which items are being removed and a summary of the total number of hidden items cleaned. +
+`; + + public static examples = ['$ sf hardis:project:clean:hiddenitems']; + + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; protected debugMode = false; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanHiddenItems); + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Removing hidden dx managed source files`)); + uxLog("action", this, c.cyan(`Removing hidden dx managed source files`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const findManagedPattern = rootFolder + `/**/*.{app,cmp,evt,tokens,html,css,js,xml}`; - const matchingCustomFiles = await glob(findManagedPattern, { cwd: process.cwd() }); + const matchingCustomFiles = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); let counter = 0; for (const matchingCustomFile of matchingCustomFiles) { if (!fs.existsSync(matchingCustomFile)) { continue; } - const fileContent = await fs.readFile(matchingCustomFile, "utf8"); - if (fileContent.startsWith("(hidden)")) { + const fileContent = await fs.readFile(matchingCustomFile, 'utf8'); + if (fileContent.startsWith('(hidden)')) { const componentFolder = path.dirname(matchingCustomFile); const folderSplit = componentFolder.split(path.sep); - const toRemove = folderSplit.includes("lwc") || folderSplit.includes("aura") ? componentFolder : matchingCustomFile; + const toRemove = + folderSplit.includes('lwc') || folderSplit.includes('aura') ? componentFolder : matchingCustomFile; await fs.remove(toRemove); - uxLog(this, c.cyan(`Removed hidden item ${c.yellow(toRemove)}`)); + uxLog("action", this, c.cyan(`Removed hidden item ${c.yellow(toRemove)}`)); counter++; } } // Summary const msg = `Removed ${c.green(c.bold(counter))} hidden source items`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg }; } diff --git a/src/commands/hardis/project/clean/listviews.ts b/src/commands/hardis/project/clean/listviews.ts index 15292b052..19ac63748 100644 --- a/src/commands/hardis/project/clean/listviews.ts +++ b/src/commands/hardis/project/clean/listviews.ts @@ -1,88 +1,80 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { glob } from "glob"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; -import { parseXmlFile, writeXmlFile } from "../../../../common/utils/xmlUtils"; -import { getConfig, setConfig } from "../../../../config"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { parseXmlFile, writeXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { getConfig, setConfig } from '../../../../config/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class CleanListViews extends SfCommand { + public static title = 'Replace Mine by Everything in ListViews'; -export default class CleanListViews extends SfdxCommand { - public static title = "Replace Mine by Everything in ListViews"; + public static description = 'Replace Mine by Everything in ListView, and log the replacements in sfdx-hardis.yml'; - public static description = "Replace Mine by Everything in ListView, and log the replacements in sfdx-hardis.yml"; + public static examples = ['$ sf hardis:project:clean:listviews']; - public static examples = ["$ sfdx hardis:project:clean:listviews"]; - - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; protected debugMode = false; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanListViews); + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Replacing 'Mine' by 'Everything' in ListViews for deployments to pass`)); + uxLog("action", this, c.cyan(`Replacing 'Mine' by 'Everything' in ListViews for deployments to pass`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const findManagedPattern = rootFolder + `/**/*.listView-meta.xml`; - const matchingListViews = await glob(findManagedPattern, { cwd: process.cwd() }); + const matchingListViews = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); let counter = 0; - const config = await getConfig("project"); + const config = await getConfig('project'); let listViewsMine = config.listViewsToSetToMine || []; for (const listViewfile of matchingListViews) { const listViewXml = await parseXmlFile(listViewfile); - if (listViewXml.ListView?.filterScope[0] === "Mine") { - listViewXml.ListView.filterScope[0] = "Everything"; - uxLog(this, c.grey(`replaced Mine by Everything in ListView ${listViewXml}`)); + if (listViewXml.ListView?.filterScope[0] === 'Mine') { + listViewXml.ListView.filterScope[0] = 'Everything'; + uxLog("log", this, c.grey(`replaced Mine by Everything in ListView ${listViewXml}`)); await writeXmlFile(listViewfile, listViewXml); - listViewsMine.push(path.relative(process.cwd(), listViewfile).replace(/\\/g, "/")); + listViewsMine.push(path.relative(process.cwd(), listViewfile).replace(/\\/g, '/')); counter++; } } listViewsMine = [...new Set(listViewsMine)]; // Make unique - await setConfig("project", { listViewsToSetToMine: listViewsMine }); + await setConfig('project', { listViewsToSetToMine: listViewsMine }); // Summary const msg = `Replaced ${c.green(c.bold(counter))} Mine by Everything in ListViews`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg }; } diff --git a/src/commands/hardis/project/clean/manageditems.ts b/src/commands/hardis/project/clean/manageditems.ts index e9016d758..d448e756c 100644 --- a/src/commands/hardis/project/clean/manageditems.ts +++ b/src/commands/hardis/project/clean/manageditems.ts @@ -1,79 +1,96 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class CleanManagedItems extends SfdxCommand { - public static title = "Clean retrieved managed items in dx sources"; - - public static description = "Remove unwanted managed items within sfdx project sources"; - - public static examples = ["$ sfdx hardis:project:clean:manageditems --namespace crta"]; - - protected static flagsConfig = { - namespace: flags.string({ - char: "n", - default: "", - description: "Namespace to remove", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CleanManagedItems extends SfCommand { + public static title = 'Clean retrieved managed items in dx sources'; + + public static description: string = ` +## Command Behavior + +**Removes unwanted managed package items from your Salesforce DX project sources.** + +This command helps clean up your local Salesforce project by deleting metadata files that belong to a specific managed package namespace. This is particularly useful when you retrieve metadata from an org that contains managed packages, and you only want to keep the unmanaged or custom metadata in your local repository. + +Key functionalities: + +- **Namespace-Based Filtering:** Requires a \`--namespace\` flag to specify which managed package namespace's files should be removed. +- **Targeted File Deletion:** Scans for files and folders that start with the specified namespace prefix (e.g., \`yourNamespace__*\`). +- **Intelligent Folder Handling:** Prevents the deletion of managed folders if they contain local custom items. This ensures that if you have custom metadata within a managed package's folder structure, only the managed components are removed, preserving your local customizations. +- **Object Metadata Preservation:** Specifically, it will not remove .object-meta.xml files if there are local custom items defined within that object's folder. + +
+Technical explanations + +The command's technical implementation involves: + +- **Namespace Validation:** Ensures that a namespace is provided, throwing an \`SfError\` if it's missing. +- **File Discovery:** Uses \`glob\` to find all files and directories within the specified \`folder\` (defaults to \`force-app\`) that match the managed package namespace pattern (\`**/\${this.namespace}__*\`). +- **Folder Content Check:** For identified managed folders, the \`folderContainsLocalItems\` function is called. This function uses \`glob\` again to check for the presence of any files within that folder that *do not* start with the managed package namespace, indicating local customizations. +- **Conditional Deletion:** Based on the \`folderContainsLocalItems\` check, it conditionally removes files and folders using \`fs.remove\`. If a managed folder contains local items, it is skipped to prevent accidental deletion of custom work. +- **Logging:** Provides clear messages about which managed items are being removed. +
+`; + + public static examples = ['$ sf hardis:project:clean:manageditems --namespace crta']; + + public static flags: any = { + namespace: Flags.string({ + char: 'n', + default: '', + description: 'Namespace to remove', }), - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected namespace: string; protected folder: string; protected debugMode = false; public async run(): Promise { - this.namespace = this.flags.namespace || ""; - this.folder = this.flags.folder || "./force-app"; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanManagedItems); + this.namespace = flags.namespace || ''; + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; - if (this.namespace === "") { - throw new SfdxError("namespace argument is mandatory"); + if (this.namespace === '') { + throw new SfError('namespace argument is mandatory'); } // Delete standard files when necessary - uxLog(this, c.cyan(`Removing unwanted dx managed source files with namespace ${c.bold(this.namespace)}...`)); + uxLog("action", this, c.cyan(`Removing unwanted dx managed source files with namespace ${c.bold(this.namespace)}...`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const findManagedPattern = rootFolder + `/**/${this.namespace}__*`; - const matchingCustomFiles = await glob(findManagedPattern, { cwd: process.cwd() }); + const matchingCustomFiles = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); for (const matchingCustomFile of matchingCustomFiles) { if (!fs.existsSync(matchingCustomFile)) { continue; @@ -86,23 +103,23 @@ export default class CleanManagedItems extends SfdxCommand { } } // Keep .object-meta.xml item if there are local custom items defined on it - if (matchingCustomFile.endsWith(".object-meta.xml")) { + if (matchingCustomFile.endsWith('.object-meta.xml')) { const localItems = await this.folderContainsLocalItems(path.dirname(matchingCustomFile)); if (localItems) { continue; } } await fs.remove(matchingCustomFile); - uxLog(this, c.cyan(`Removed managed item ${c.yellow(matchingCustomFile)}`)); + uxLog("action", this, c.cyan(`Removed managed item ${c.yellow(matchingCustomFile)}`)); } // Return an object to be displayed with --json - return { outputString: "Cleaned managed items from sfdx project" }; + return { outputString: 'Cleaned managed items from sfdx project' }; } private async folderContainsLocalItems(folder: string): Promise { // Do not remove managed folders when there are local custom items defined on it - const subFiles = await glob(folder + "/**/*", { cwd: process.cwd() }); + const subFiles = await glob(folder + '/**/*', { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); const standardItems = subFiles.filter((file) => { return !fs.lstatSync(file).isDirectory() && !path.basename(file).startsWith(`${this.namespace}__`); }); diff --git a/src/commands/hardis/project/clean/minimizeprofiles.ts b/src/commands/hardis/project/clean/minimizeprofiles.ts index cd7ebc59b..144070eb8 100644 --- a/src/commands/hardis/project/clean/minimizeprofiles.ts +++ b/src/commands/hardis/project/clean/minimizeprofiles.ts @@ -1,31 +1,31 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { glob } from "glob"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; -import { minimizeProfile } from "../../../../common/utils/profileUtils"; -import { getConfig } from "../../../../config"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { minimizeProfile } from '../../../../common/utils/profileUtils.js'; +import { getConfig } from '../../../../config/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class CleanMinimizeProfiles extends SfCommand { + public static title = 'Clean profiles of Permission Set attributes'; -export default class CleanMinimizeProfiles extends SfdxCommand { - public static title = "Clean profiles of Permission Set attributes"; + public static description = ` +## Command Behavior - public static description = `Remove all profile attributes that exist on Permission Sets +**Removes all profile attributes that exist on Permission Sets** It is a bad practice to define on Profiles elements that can be defined on Permission Sets. Salesforce will deprecate such capability in Spring 26. -Don't wait for that, and use minimizeProfiles cleaning to automatically remove from Profiles any permission that exists on a Permission Set ! +Don't wait for that, and use minimizeProfiles cleaning to automatically remove from Profiles any permission that exists on a Permission Set! The following XML tags are removed automatically: @@ -37,69 +37,64 @@ The following XML tags are removed automatically: - pageAccesses - userPermissions (except on Admin Profile) -You can override this list by defining a property minimizeProfilesNodesToRemove in your .sfdx-hardis.yml config file. +You can override this list by defining a property \`minimizeProfilesNodesToRemove\` in your \`.sfdx-hardis.yml\` config file. -You can also skip profiles using property skipMinimizeProfiles +You can also skip profiles using property \`skipMinimizeProfiles\`. -Example: +Example: \`\`\`yaml -skipMinimizeProfiles +skipMinimizeProfiles: - MyClient Customer Community Login User - MyClientPortail Profile \`\`\` `; - public static examples = ["$ sfdx hardis:project:clean:minimizeprofiles"]; + public static examples = ['$ sf hardis:project:clean:minimizeprofiles']; - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; protected debugMode = false; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanMinimizeProfiles); + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Removing profile attributes that exist on Permission Sets`)); + uxLog("action", this, c.cyan(`Removing profile attributes that exist on Permission Sets`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const findManagedPattern = rootFolder + `/**/*.profile-meta.xml`; - const matchingProfileFiles = await glob(findManagedPattern, { cwd: process.cwd() }); - const config = await getConfig("branch"); + const matchingProfileFiles = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); + const config = await getConfig('branch'); const skipMinimizeProfiles = config.skipMinimizeProfiles || []; let counter = 0; for (const profileFile of matchingProfileFiles) { - const profileName = path.basename(profileFile).replace(".profile-meta.xml", ""); + const profileName = path.basename(profileFile).replace('.profile-meta.xml', ''); if (skipMinimizeProfiles.includes(profileName)) { - uxLog(this, c.grey(`Skipped ${profileName} as found in skipMinimizeProfiles property`)); + uxLog("log", this, c.grey(`Skipped ${profileName} as found in skipMinimizeProfiles property`)); continue; } const res = await minimizeProfile(profileFile); @@ -110,11 +105,11 @@ skipMinimizeProfiles // Summary if (counter > 0) { - uxLog(this, c.yellow("Please make sure the attributes removed from Profiles are defined on Permission Sets")); + uxLog("warning", this, c.yellow('Please make sure the attributes removed from Profiles are defined on Permission Sets')); globalThis.displayProfilesWarning = true; } const msg = `Cleaned ${c.green(c.bold(counter))} profiles from attributes existing on Permission Sets`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg }; } diff --git a/src/commands/hardis/project/clean/orgmissingitems.ts b/src/commands/hardis/project/clean/orgmissingitems.ts index 4cca15a71..3b8b5ab83 100644 --- a/src/commands/hardis/project/clean/orgmissingitems.ts +++ b/src/commands/hardis/project/clean/orgmissingitems.ts @@ -1,98 +1,130 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import { mergeObjectPropertyLists, uxLog } from "../../../../common/utils"; -import { buildOrgManifest } from "../../../../common/utils/deployUtils"; -import { promptOrg } from "../../../../common/utils/orgUtils"; -import { parsePackageXmlFile, parseXmlFile, writeXmlFile } from "../../../../common/utils/xmlUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class OrgMissingItems extends SfdxCommand { - public static title = "Clean SFDX items using target org definition"; - - public static description = "Clean SFDX sources from items present neither in target org nor local package.xml"; - - public static examples = ["$ sfdx hardis:project:clean:orgmissingitems"]; - - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import { mergeObjectPropertyLists, uxLog } from '../../../../common/utils/index.js'; +import { buildOrgManifest } from '../../../../common/utils/deployUtils.js'; +import { promptOrg } from '../../../../common/utils/orgUtils.js'; +import { parsePackageXmlFile, parseXmlFile, writeXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class OrgMissingItems extends SfCommand { + public static title = 'Clean SFDX items using target org definition'; + + public static description: string = ` +## Command Behavior + +**Cleans Salesforce DX project sources by removing metadata components that are not present in a target Salesforce org or the local \`package.xml\` file.** + +This command helps maintain a lean and accurate codebase by identifying and removing metadata that is either obsolete in the target org or not explicitly included in your project's \`package.xml\`. This is particularly useful for: + +- **Reducing Deployment Size:** Eliminating unnecessary metadata reduces the size of deployments, leading to faster deployments and fewer conflicts. +- **Ensuring Consistency:** Synchronizing your local codebase with the actual state of a Salesforce org. +- **Cleaning Up Orphaned Metadata:** Removing components that might have been deleted from the org but still exist in your local project. + +Key features: + +- **Target Org Integration:** Connects to a specified Salesforce org (or prompts for one) to retrieve its metadata manifest. +- **\`package.xml\` Comparison:** Compares your local project's metadata with the target org's metadata and your local \`package.xml\` to identify missing items. +- **Report Type Cleaning:** Specifically targets and cleans \`reportType-meta.xml\` files by removing references to fields or objects that are not present in the target org or your \`package.xml\`. + +
+Technical explanations + +The command's technical implementation involves several steps: + +- **Org Manifest Generation:** If not provided, it generates a full \`package.xml\` from the target Salesforce org using \`buildOrgManifest\`. +- **XML Parsing and Merging:** It parses the generated org manifest and merges it with the local \`package.xml\` and \`destructiveChanges.xml\` files to create a comprehensive list of existing and deleted metadata. +- **Metadata Analysis:** It iterates through specific metadata types (currently \`reportType-meta.xml\` files) within the configured source folder. +- **Field and Object Validation:** For each \`reportType-meta.xml\` file, it examines the columns and filters out references to custom fields or objects that are not found in the merged \`package.xml\` content or are marked for destruction. +- **XML Modification:** If changes are detected, it updates the \`reportType-meta.xml\` file by writing the modified XML content back to the file using \`writeXmlFile\`. +- **File System Operations:** It uses \`fs-extra\` for file system operations and \`glob\` for pattern matching to find relevant metadata files. +- **SOQL Queries:** The \`buildOrgManifest\` utility (used internally) performs SOQL queries to retrieve metadata information from the Salesforce org. +
+`; + + public static examples = ['$ sf hardis:project:clean:orgmissingitems']; + + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - packagexmlfull: flags.string({ - char: "p", + packagexmlfull: Flags.string({ + char: 'p', description: - "Path to packagexml used for cleaning.\nMust contain also standard CustomObject and CustomField elements.\nIf not provided, it will be generated from a remote org", + 'Path to packagexml used for cleaning.\nMust contain also standard CustomObject and CustomField elements.\nIf not provided, it will be generated from a remote org', }), - packagexmltargetorg: flags.string({ - char: "t", - description: "Target org username or alias to build package.xml (sfdx must be authenticated).\nIf not provided, will be prompted to the user.", + packagexmltargetorg: Flags.string({ + char: 't', + description: + 'Target org username or alias to build package.xml (SF CLI must be authenticated).\nIf not provided, will be prompted to the user.', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ protected folder: string; - protected targetOrgUsernameAlias: string; - protected packageXmlFull: string; + protected targetOrgUsernameAlias: string | null; + protected packageXmlFull: string | null; protected debugMode = false; protected standardFields = [ - "Id", - "Name", - "Parent", - "IsActive", - "Alias", - "Owner", - "CreatedBy", - "CreatedDate", - "LastActivityDate", - "LastModifiedBy", - "LastModifiedDate", - "RecordType", + 'Id', + 'Name', + 'Parent', + 'IsActive', + 'Alias', + 'Owner', + 'CreatedBy', + 'CreatedDate', + 'LastActivityDate', + 'LastModifiedBy', + 'LastModifiedDate', + 'RecordType', ]; - protected standardSuffixes = ["Street", "City", "State", "PostalCode", "Country", "Latitude", "Longitude", "GeocodeAccuracy"]; + protected standardSuffixes = [ + 'Street', + 'City', + 'State', + 'PostalCode', + 'Country', + 'Latitude', + 'Longitude', + 'GeocodeAccuracy', + ]; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.debugMode = this.flags.debug || false; - this.targetOrgUsernameAlias = this.flags.packagexmltargetorg || null; - this.packageXmlFull = this.flags.packagexmlfull || null; + const { flags } = await this.parse(OrgMissingItems); + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; + this.targetOrgUsernameAlias = flags.packagexmltargetorg || null; + this.packageXmlFull = flags.packagexmlfull || null; if (this.packageXmlFull === null) { // Request user to select an org if not provided if (this.targetOrgUsernameAlias == null) { - const targetOrg = await promptOrg(this, { devHub: false, setDefault: false }); + const targetOrg = await promptOrg(this, { devHub: false, setDefault: false, defaultOrgUsername: flags['target-org']?.getUsername() }); this.targetOrgUsernameAlias = targetOrg.username; } this.packageXmlFull = await buildOrgManifest(this.targetOrgUsernameAlias); @@ -100,17 +132,17 @@ export default class OrgMissingItems extends SfdxCommand { let packageXmlContent = await parsePackageXmlFile(this.packageXmlFull); // Merge with local package.xml content - if (fs.existsSync("./manifest/package.xml")) { - const localPackageXmlContent = await parsePackageXmlFile("./manifest/package.xml"); + if (fs.existsSync('./manifest/package.xml')) { + const localPackageXmlContent = await parsePackageXmlFile('./manifest/package.xml'); packageXmlContent = mergeObjectPropertyLists(packageXmlContent, localPackageXmlContent, { sort: true }); } // Build destructiveChanges let destructiveChangesContent = {}; - if (fs.existsSync("./manifest/destructiveChanges.xml")) { - destructiveChangesContent = await parsePackageXmlFile("./manifest/destructiveChanges.xml"); + if (fs.existsSync('./manifest/destructiveChanges.xml')) { + destructiveChangesContent = await parsePackageXmlFile('./manifest/destructiveChanges.xml'); } // Build additional lists - const packageXmlAllFields = packageXmlContent["CustomField"].map((customField) => customField.split(".")[1]); + const packageXmlAllFields = packageXmlContent['CustomField'].map((customField) => customField.split('.')[1]); // const destructiveChangesAllFields = (destructiveChangesContent["CustomField"] || []).map(customField => customField.split('.')[1]); // Clean report types @@ -119,11 +151,12 @@ export default class OrgMissingItems extends SfdxCommand { const patternReportType = this.folder + `/**/reportTypes/*.reportType-meta.xml`; const matchFilesPattern = await glob(patternReportType, { cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS }); - uxLog(this, `Processing reportTypes...`); + uxLog("log", this, `Processing reportTypes...`); for (const reportTypeFile of matchFilesPattern) { if (this.debugMode) { - uxLog(this, `Processing ${reportTypeFile}...`); + uxLog("other", this, `Processing ${reportTypeFile}...`); } let changed = false; const reportType = await parseXmlFile(reportTypeFile); @@ -133,24 +166,24 @@ export default class OrgMissingItems extends SfdxCommand { // Filter columns referring to fields not in package.xml of target org + local package.xml section.columns = section.columns.filter((column) => { const object = column.table[0]; - const field = column.field[0].split(".")[0]; + const field = column.field[0].split('.')[0]; const objectField = `${object}.${field}`; - if ((destructiveChangesContent["CustomObject"] || []).includes(object)) { + if ((destructiveChangesContent['CustomObject'] || []).includes(object)) { return false; } - const objectFound = (packageXmlContent["CustomObject"] || []).includes(object); - const fieldFound = (packageXmlContent["CustomField"] || []).includes(objectField); + const objectFound = (packageXmlContent['CustomObject'] || []).includes(object); + const fieldFound = (packageXmlContent['CustomField'] || []).includes(objectField); const isStandardTechField = this.standardFields.includes(field); const isStandardSubField = this.standardSuffixes.filter((suffix) => field.endsWith(suffix)).length > 0; if ( (objectFound && (fieldFound || isStandardTechField || isStandardSubField)) || - (object.includes("__r") && (isStandardTechField || isStandardSubField)) || - (object.includes("__r") && packageXmlAllFields.includes(field)) + (object.includes('__r') && (isStandardTechField || isStandardSubField)) || + (object.includes('__r') && packageXmlAllFields.includes(field)) ) { return true; } else { if (this.debugMode) { - uxLog(this, `-- filtered ${objectField}`); + uxLog("log", this, `-- filtered ${objectField}`); } return false; } @@ -163,14 +196,14 @@ export default class OrgMissingItems extends SfdxCommand { // Update source file if content has been updated if (changed) { await writeXmlFile(reportTypeFile, reportType); - uxLog(this, `Updated ${reportTypeFile}`); + uxLog("log", this, `Updated ${reportTypeFile}`); counterItems++; } } // Summary const msg = `Updated ${c.green(c.bold(counterItems))} items`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg }; } diff --git a/src/commands/hardis/project/clean/references.ts b/src/commands/hardis/project/clean/references.ts index ef29c03b6..f31879bba 100644 --- a/src/commands/hardis/project/clean/references.ts +++ b/src/commands/hardis/project/clean/references.ts @@ -1,160 +1,200 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { glob } from "glob"; -import { createTempDir, execCommand, isCI, removeObjectPropertyLists, uxLog } from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; -import { parsePackageXmlFile, parseXmlFile, writePackageXmlFile, writeXmlFile } from "../../../../common/utils/xmlUtils"; -import { getConfig, setConfig } from "../../../../config"; -import { PACKAGE_ROOT_DIR } from "../../../../settings"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { glob } from 'glob'; +import { createTempDir, execCommand, isCI, removeObjectPropertyLists, uxLog } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { + parsePackageXmlFile, + parseXmlFile, + writePackageXmlFile, + writeXmlFile, +} from '../../../../common/utils/xmlUtils.js'; +import { getConfig, setConfig } from '../../../../config/index.js'; +import { PACKAGE_ROOT_DIR } from '../../../../settings.js'; +import { FilterXmlContent } from './filter-xml-content.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class CleanReferences extends SfCommand { + public static title = 'Clean references in dx sources'; -export default class CleanReferences extends SfdxCommand { - public static title = "Clean references in dx sources"; + public static description = ` +## Command Behavior - public static description = "Remove unwanted references within sfdx project sources"; +**Removes unwanted references and cleans up metadata within your Salesforce DX project sources.** + +This command provides a powerful way to maintain a clean and efficient Salesforce codebase by eliminating unnecessary or problematic metadata. It supports various cleaning types, from removing hardcoded user references in dashboards to minimizing profile attributes. + +Key functionalities include: + +- **Configurable Cleaning Types:** You can specify a particular cleaning type (e.g., +- **JSON/XML Configuration:** Cleaning operations can be driven by a JSON configuration file or a +- **Interactive Selection:** If no cleaning type is specified, the command interactively prompts you to select which references to clean. +- **Persistent Configuration:** You can choose to save your cleaning selections in your project's configuration (\`.sfdx-hardis.yml\`) so they are automatically applied during future Work Save operations. +- **File Deletion:** Beyond just cleaning XML content, it can also delete related files (e.g., custom field files and their translations when a custom field is marked for deletion). + +
+Technical explanations + +The command's technical implementation involves several steps: + +- **Configuration Loading:** It reads the project's configuration to determine default cleaning types and user preferences. +- **Cleaning Type Processing:** For each selected cleaning type, it either executes a dedicated sub-command (e.g., +- **XML Filtering:** For template-based cleanings, it constructs a temporary JSON configuration file based on predefined templates or user-provided +- **Package.xml Cleanup:** It iterates through +- **Object Property Removal:** The +
+`; public static examples = [ - "$ sfdx hardis:project:clean:references", - "$ sfdx hardis:project:clean:references --type all", - "$ sfdx hardis:project:clean:references --config ./cleaning/myconfig.json", - "$ sfdx hardis:project:clean:references --config ./somefolder/myDestructivePackage.xml", + '$ sf hardis:project:clean:references', + '$ sf hardis:project:clean:references --type all', + '$ sf hardis:project:clean:references --config ./cleaning/myconfig.json', + '$ sf hardis:project:clean:references --config ./somefolder/myDestructivePackage.xml', ]; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - type: flags.string({ - char: "t", - description: "Cleaning type", - options: ["all", "caseentitlement", "dashboards", "datadotcom", "destructivechanges", "localfields", "productrequest", "entitlement"], + public static flags: any = { + type: Flags.string({ + char: 't', + description: 'Cleaning type', + options: [ + 'all', + 'caseentitlement', + 'dashboards', + 'datadotcom', + 'destructivechanges', + 'localfields', + 'productrequest', + 'entitlement', + 'flowPositions', + 'sensitiveMetadatas', + 'minimizeProfiles' + ], }), - config: flags.string({ - char: "c", - description: "Path to a JSON config file or a destructiveChanges.xml file", + config: Flags.string({ + char: 'c', + description: 'Path to a JSON config file or a destructiveChanges.xml file', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = true; /* jscpd:ignore-end */ - // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials"]; - protected debugMode = false; - protected cleaningTypes = []; + protected cleaningTypes: any[] = []; protected allCleaningTypes = [ { - value: "checkPermissions", - title: "Check custom items are existing it at least one Permission Set", - command: "sfdx hardis:lint:access", + value: 'checkPermissions', + title: 'Check custom items are existing it at least one Permission Set', + command: 'sf hardis:lint:access', }, { - value: "dashboards", - title: "Dashboards: Remove reference to hardcoded users", + value: 'dashboards', + title: 'Dashboards: Remove reference to hardcoded users', }, { - value: "destructivechanges", - title: "DestructiveChanges.xml: Remove source files mentioned in destructiveChanges.xml", + value: 'destructivechanges', + title: 'DestructiveChanges.xml: Remove source files mentioned in destructiveChanges.xml', }, { - value: "flowPositions", + value: 'flowPositions', title: `Flows: Replace all positions in AutoLayout Flows by 0 to simplify conflicts management`, - command: "sfdx hardis:project:clean:flowpositions", + command: 'sf hardis:project:clean:flowpositions', + }, + { + value: 'sensitiveMetadatas', + title: `Remove sensitive metadata content from sources (ex: Certificates)`, + command: 'sf hardis:project:clean:sensitive-metadatas', }, { - value: "listViewsMine", + value: 'listViewsMine', title: `ListViews: Convert scope "Everything" into scope "Mine" on ListViews`, - command: "sfdx hardis:project:clean:listviews", + command: 'sf hardis:project:clean:listviews', }, { - value: "minimizeProfiles", - title: "Profiles: Remove profile attributes that exists on permission sets", - command: "sfdx hardis:project:clean:minimizeprofiles", + value: 'minimizeProfiles', + title: 'Profiles: Remove profile attributes that exists on permission sets', + command: 'sf hardis:project:clean:minimizeprofiles', }, { - value: "caseentitlement", - title: "References to Entitlement Management items", + value: 'caseentitlement', + title: 'References to Entitlement Management items', }, { - value: "datadotcom", - title: "References to Data.com items. https://help.salesforce.com/articleView?id=000320795&type=1&mode=1", + value: 'datadotcom', + title: 'References to Data.com items. https://help.salesforce.com/articleView?id=000320795&type=1&mode=1', }, { - value: "entitlement", - title: "References to Entitlement object", + value: 'entitlement', + title: 'References to Entitlement object', }, { - value: "localfields", - title: "References to Local Fields items. https://help.salesforce.com/articleView?id=sf.admin_local_name_fields.htm&type=5", + value: 'localfields', + title: + 'References to Local Fields items. https://help.salesforce.com/articleView?id=sf.admin_local_name_fields.htm&type=5', }, { - value: "productrequest", - title: "References to ProductRequest object", + value: 'productrequest', + title: 'References to ProductRequest object', }, { - value: "systemDebug", - title: "Remove System.debug from sources", - command: "sfdx hardis:project:clean:systemdebug", + value: 'systemDebug', + title: 'Remove System.debug from sources', + command: 'sf hardis:project:clean:systemdebug', }, { - value: "v60", - title: "Make metadata compliant with v60", + value: 'v60', + title: 'Make metadata compliant with v60', }, ]; - protected configFile: string; + protected configFile: string | null; protected deleteItems: any = {}; public async run(): Promise { - this.debugMode = this.flags.debug || false; - this.cleaningTypes = this.flags.type ? [this.flags.type] : []; - this.configFile = this.flags.config || null; - const config = await getConfig("project"); + const { flags } = await this.parse(CleanReferences); + this.debugMode = flags.debug || false; + this.cleaningTypes = flags.type ? [flags.type] : []; + this.configFile = flags.config || null; + const config = await getConfig('project'); // Config file sent by user if (this.configFile != null) { this.cleaningTypes = [this.configFile.trim()]; } else { // Read list of cleanings to perform in references - if (this.cleaningTypes.length > 0 && this.cleaningTypes[0] === "all") { + if (this.cleaningTypes.length > 0 && this.cleaningTypes[0] === 'all') { this.cleaningTypes = config.autoCleanTypes || []; } // Prompt user cleanings to perform if (!isCI && this.cleaningTypes.length === 0) { const typesResponse = await prompts({ - type: "multiselect", - name: "value", - message: c.cyanBright("What references do you want to clean from your SFDX project sources ?"), + type: 'multiselect', + name: 'value', + message: c.cyanBright('What references do you want to clean from your SFDX project sources ?'), + description: 'Select which types of reference cleaning to perform on your project', choices: this.allCleaningTypes, }); this.cleaningTypes = typesResponse.value; @@ -164,16 +204,19 @@ export default class CleanReferences extends SfdxCommand { // Prompt user to save choice in configuration const autoCleanTypes = config.autoCleanTypes || []; const toAdd = this.cleaningTypes.filter((type) => !autoCleanTypes.includes(type)); - if (toAdd.length > 0 && !isCI && this.flags.type !== "all") { + if (toAdd.length > 0 && !isCI && flags.type !== 'all') { const saveResponse = await prompts({ - type: "confirm", - name: "value", + type: 'confirm', + name: 'value', default: true, - message: c.cyanBright("Do you want to save this action in your project configuration, so it is executed at each Work Save ?"), + message: c.cyanBright( + 'Do you want to save this action in your project configuration, so it is executed at each Work Save ?' + ), + description: 'Choose whether to automatically apply these cleaning types during future work saves', }); if (saveResponse.value === true) { autoCleanTypes.push(...this.cleaningTypes); - await setConfig("project", { + await setConfig('project', { autoCleanTypes: [...new Set(autoCleanTypes)], }); } @@ -181,13 +224,15 @@ export default class CleanReferences extends SfdxCommand { // Process cleaning for (const cleaningType of this.cleaningTypes) { - const cleaningTypeObj = this.allCleaningTypes.filter((cleaningTypeObj) => cleaningTypeObj.value === cleaningType)[0]; + const cleaningTypeObj = this.allCleaningTypes.filter( + (cleaningTypeObj) => cleaningTypeObj.value === cleaningType + )[0]; if (cleaningTypeObj?.command) { let command = cleaningTypeObj?.command; - if (this.argv.indexOf("--websocket") > -1) { - command += ` --websocket ${this.argv[this.argv.indexOf("--websocket") + 1]}`; + if (this.argv.indexOf('--websocket') > -1) { + command += ` --websocket ${this.argv[this.argv.indexOf('--websocket') + 1]}`; } - uxLog(this, c.cyan(`Run cleaning command ${c.bold(cleaningType)} (${cleaningTypeObj.title}) ...`)); + uxLog("action", this, c.cyan(`Run cleaning command ${c.bold(cleaningType)} (${cleaningTypeObj.title}) ...`)); // Command based cleaning await execCommand(command, this, { fail: true, @@ -196,27 +241,24 @@ export default class CleanReferences extends SfdxCommand { }); } else { // Template based cleaning - uxLog(this, c.cyan(`Apply cleaning of references to ${c.bold(cleaningType)} (${cleaningTypeObj.title})...`)); + uxLog("action", this, c.cyan(`Apply cleaning of references to ${c.bold(cleaningType)} (${cleaningTypeObj.title})...`)); const filterConfigFile = await this.getFilterConfigFile(cleaningType); - const cleanCommand = - "sfdx essentials:metadata:filter-xml-content" + - ` -c ${filterConfigFile}` + - ` --inputfolder ./force-app/main/default` + - ` --outputfolder ./force-app/main/default` + - " --noinsight"; - await execCommand(cleanCommand, this, { - fail: true, - output: false, - debug: this.debugMode, - }); + const packageDirectories = this.project?.getPackageDirectories() || []; + for (const packageDirectory of packageDirectories) { + await FilterXmlContent.run( + ['-c', filterConfigFile, '--inputfolder', packageDirectory.path, '--outputfolder', packageDirectory.path], + this.config + ); + } } } // Clean package.xml file from deleted items - uxLog(this, c.grey(`Cleaning package.xml files...`)); - const patternPackageXml = "**/manifest/**/package*.xml"; + uxLog("log", this, c.grey(`Cleaning package.xml & files from deleted items...`)); + const patternPackageXml = '**/manifest/**/package*.xml'; const packageXmlFiles = await glob(patternPackageXml, { cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS }); for (const packageXmlFile of packageXmlFiles) { const packageXmlContent = await parsePackageXmlFile(packageXmlFile); @@ -224,50 +266,57 @@ export default class CleanReferences extends SfdxCommand { const newPackageXmlContent = removeObjectPropertyLists(packageXmlContent, this.deleteItems); if (packageXmlContentStr !== JSON.stringify(newPackageXmlContent)) { await writePackageXmlFile(packageXmlFile, newPackageXmlContent); - uxLog(this, c.grey("-- cleaned elements from " + packageXmlFile)); + uxLog("log", this, c.grey('-- cleaned elements from ' + packageXmlFile)); } } // Delete files when necessary (in parallel) - uxLog(this, c.grey(`Removing obsolete files...`)); + uxLog("log", this, c.grey(`Removing obsolete files...`)); await Promise.all( Object.keys(this.deleteItems).map(async (type) => { await this.manageDeleteRelatedFiles(type); - }), + }) ); - uxLog(this, c.green(`Cleaning complete`)); + uxLog("success", this, c.green(`Cleaning complete`)); // Return an object to be displayed with --json - return { outputString: "Cleaned references from sfdx project" }; + return { outputString: 'Cleaned references from sfdx project' }; } private async getFilterConfigFile(cleaningType) { - const templateFile = path.join(path.join(PACKAGE_ROOT_DIR, "defaults/clean", "template.txt")); + const templateFile = path.join(path.join(PACKAGE_ROOT_DIR, 'defaults/clean', 'template.txt')); // Read and complete cleaning template - let templateContent = await fs.readFile(templateFile, "utf8"); - if (cleaningType === "destructivechanges" || cleaningType.endsWith(".xml")) { + let templateContent = await fs.readFile(templateFile, 'utf8'); + if (cleaningType === 'destructivechanges' || cleaningType.endsWith('.xml')) { // destructive changes file - const destructiveChangesFile = cleaningType.endsWith(".xml") ? cleaningType : "./manifest/destructiveChanges.xml"; + const destructiveChangesFile = cleaningType.endsWith('.xml') ? cleaningType : './manifest/destructiveChanges.xml'; const destructiveChanges = await parseXmlFile(destructiveChangesFile); for (const type of destructiveChanges.Package.types || []) { const members = type.members; - templateContent = templateContent.replace(new RegExp(`{{ ${type.name[0]} }}`, "g"), JSON.stringify(members, null, 2)); + templateContent = templateContent.replace( + new RegExp(`{{ ${type.name[0]} }}`, 'g'), + JSON.stringify(members, null, 2) + ); this.deleteItems[type.name[0]] = (this.deleteItems[type.name[0]] || []).concat(members); } } else { // Predefined destructive items file - const filterConfigFileConfigPath = cleaningType.endsWith(".json") + const filterConfigFileConfigPath = cleaningType.endsWith('.json') ? cleaningType - : path.join(path.join(PACKAGE_ROOT_DIR, "defaults/clean", cleaningType + ".json")); - const filterConfigFileConfig = JSON.parse(await fs.readFile(filterConfigFileConfigPath, "utf8")); + : path.join(path.join(PACKAGE_ROOT_DIR, 'defaults/clean', cleaningType + '.json')); + const filterConfigFileConfig = JSON.parse(await fs.readFile(filterConfigFileConfigPath, 'utf8')); for (const type of Object.keys(filterConfigFileConfig.items)) { - templateContent = templateContent.replace(new RegExp(`{{ ${type} }}`, "g"), JSON.stringify(filterConfigFileConfig.items[type], null, 2)); + templateContent = templateContent.replace( + new RegExp(`{{ ${type} }}`, 'g'), + JSON.stringify(filterConfigFileConfig.items[type], null, 2) + ); this.deleteItems[type] = (this.deleteItems[type] || []).concat(filterConfigFileConfig.items[type]); } } // Create temporary file - templateContent = templateContent.replace(/{{ .* }}/gm, "[]"); - const tmpCleanFileName = cleaningType.endsWith(".xml") || cleaningType.endsWith(".json") ? path.basename(cleaningType) : cleaningType; + templateContent = templateContent.replace(/{{ .* }}/gm, '[]'); + const tmpCleanFileName = + cleaningType.endsWith('.xml') || cleaningType.endsWith('.json') ? path.basename(cleaningType) : cleaningType; const filterConfigFile = path.join(await createTempDir(), `clean_${tmpCleanFileName}.json`); await fs.writeFile(filterConfigFile, templateContent); return filterConfigFile; @@ -275,7 +324,7 @@ export default class CleanReferences extends SfdxCommand { private async manageDeleteRelatedFiles(type) { // Custom fields - if (type === "CustomField") { + if (type === 'CustomField') { for (const field of this.deleteItems[type] || []) { await this.manageDeleteCustomFieldRelatedFiles(field); } @@ -284,20 +333,21 @@ export default class CleanReferences extends SfdxCommand { private async manageDeleteCustomFieldRelatedFiles(field: string) { // Remove custom field and customTranslation - const [obj, fld] = field.split("."); - const patternField = `force-app/**/objects/${obj}/fields/${fld}.field-meta.xml`; - const patternTranslation = `force-app/**/objectTranslations/${obj}-*/${fld}.fieldTranslation-meta.xml`; + const [obj, fld] = field.split('.'); + const patternField = `**/objects/${obj}/fields/${fld}.field-meta.xml`; + const patternTranslation = `**/objectTranslations/${obj}-*/${fld}.fieldTranslation-meta.xml`; for (const pattern of [patternField, patternTranslation]) { - const matchFiles = await glob(pattern, { cwd: process.cwd() }); + const matchFiles = await glob(pattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); for (const removeFile of matchFiles) { await fs.remove(removeFile); - uxLog(this, c.grey(`Removed file ${removeFile}`)); + uxLog("log", this, c.grey(`Removed file ${removeFile}`)); } } // Remove field in recordTypes - const patternRecordType = `/force-app/**/objects/${obj}/recordTypes/*.recordType-meta.xml`; + const patternRecordType = `**/objects/${obj}/recordTypes/*.recordType-meta.xml`; const matchFilesPattern = await glob(patternRecordType, { cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS }); for (const recordTypeFile of matchFilesPattern) { const recordType = await parseXmlFile(recordTypeFile); @@ -308,9 +358,9 @@ export default class CleanReferences extends SfdxCommand { if (updatedPicklistValues.length !== recordType.RecordType.picklistValues.length) { recordType.RecordType.picklistValues = updatedPicklistValues; await writeXmlFile(recordTypeFile, recordType); - uxLog(this, c.grey(`Cleaned file ${recordTypeFile} from ${obj}.${fld}`)); + uxLog("log", this, c.grey(`Cleaned file ${recordTypeFile} from ${obj}.${fld}`)); } } } } -} +} \ No newline at end of file diff --git a/src/commands/hardis/project/clean/retrievefolders.ts b/src/commands/hardis/project/clean/retrievefolders.ts index 0cc3f186e..f1ed18395 100644 --- a/src/commands/hardis/project/clean/retrievefolders.ts +++ b/src/commands/hardis/project/clean/retrievefolders.ts @@ -1,75 +1,86 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { execCommand, uxLog } from "../../../../common/utils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { execCommand, uxLog } from '../../../../common/utils/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class CleanRetrieveFolders extends SfCommand { + public static title = 'Retrieve dashboards, documents and report folders in DX sources'; -export default class CleanRetrieveFolders extends SfdxCommand { - public static title = "Retrieve dashboards, documents and report folders in DX sources"; + public static description: string = ` +## Command Behavior - public static description = "Retrieve dashboards, documents and report folders in DX sources. Use -u ORGALIAS"; +**Retrieves specific folders of Dashboards, Documents, Email Templates, and Reports from a Salesforce org into your DX project sources.** - public static examples = ["$ sfdx hardis:project:clean:retrievefolders"]; +This command is designed to help developers and administrators synchronize their local Salesforce DX project with the latest versions of these folder-based metadata types. It's particularly useful for: - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", +- **Selective Retrieval:** Instead of retrieving all dashboards or reports, it allows you to retrieve specific folders, which can be more efficient for targeted development or backup. +- **Maintaining Folder Structure:** Ensures that the folder structure of these metadata types is preserved in your local project. + +
+Technical explanations + +The command's technical implementation involves: + +- **Folder Iteration:** It defines a list of folder-based metadata types (\`dashboards\`, \`documents\`, \`email\`, \`reports\`). +- **File System Check:** For each type, it checks if the corresponding folder exists in \`force-app/main/default/\`. +- **Recursive Retrieval:** It iterates through subfolders within these main folders. For each subfolder, it constructs and executes a \`sf project retrieve start\` command. +- **Salesforce CLI Integration:** It uses \`sf project retrieve start -m :\` to retrieve the content of individual folders. This ensures that only the specified folder and its contents are retrieved. +- **Error Handling:** It includes basic error handling for the \`execCommand\` calls. +
+`; + + public static examples = ['$ sf hardis:project:clean:retrievefolders']; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + 'target-org': requiredOrgFlagWithDeprecations, + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; /* jscpd:ignore-end */ protected debugMode = false; protected deleteItems: any = {}; public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanRetrieveFolders); + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Retrieve dashboards, documents and report folders in DX sources`)); + uxLog("action", this, c.cyan(`Retrieve dashboards, documents and report folders in DX sources`)); - const rootSourcesFolder = path.join(process.cwd() + "/force-app/main/default"); + const rootSourcesFolder = path.join(process.cwd() + '/force-app/main/default'); const folderTypes = [ - { sourceType: "dashboards", mdType: "Dashboard" }, - { sourceType: "documents", mdType: "Document" }, - { sourceType: "email", mdType: "EmailTemplate" }, - { sourceType: "reports", mdType: "Report" }, + { sourceType: 'dashboards', mdType: 'Dashboard' }, + { sourceType: 'documents', mdType: 'Document' }, + { sourceType: 'email', mdType: 'EmailTemplate' }, + { sourceType: 'reports', mdType: 'Report' }, ]; // Iterate on types, and for each sub folder found, retrieve its SFDX source from org for (const folderType of folderTypes) { - const folderDir = rootSourcesFolder + "/" + folderType.sourceType; + const folderDir = rootSourcesFolder + '/' + folderType.sourceType; await this.manageRetrieveFolder(folderDir, folderType); } // Return an object to be displayed with --json - return { outputString: "Retrieved folders" }; + return { outputString: 'Retrieved folders' }; } private async manageRetrieveFolder(folderDir, folderType) { @@ -78,10 +89,10 @@ export default class CleanRetrieveFolders extends SfdxCommand { } const folderDirContent = await fs.readdir(folderDir); for (const subFolder of folderDirContent) { - const subFolderFull = folderDir + "/" + subFolder; + const subFolderFull = folderDir + '/' + subFolder; if (fs.lstatSync(subFolderFull).isDirectory()) { // Retrieve sub folder DX source - await execCommand(`sfdx force:source:retrieve -m ${folderType.mdType}:${subFolder}`, this, { + await execCommand(`sf project retrieve start -m ${folderType.mdType}:${subFolder}`, this, { fail: true, output: true, debug: this.debugMode, diff --git a/src/commands/hardis/project/clean/sensitive-metadatas.ts b/src/commands/hardis/project/clean/sensitive-metadatas.ts new file mode 100644 index 000000000..cfe2ccc7d --- /dev/null +++ b/src/commands/hardis/project/clean/sensitive-metadatas.ts @@ -0,0 +1,94 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { glob } from 'glob'; +import * as path from 'path'; +import fs from 'fs-extra'; +import { uxLog } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CleanSensitiveMetadatas extends SfCommand { + public static title = 'Clean Sensitive Metadatas'; + + public static description = `Sensitive data like credentials and certificates are not supposed to be stored in Git, to avoid security breaches. + +This command detects the related metadata and replaces their sensitive content by "HIDDEN_BY_SFDX_HARDIS" + +Can be automated at each **hardis:work:save** if **sensitiveMetadatas** is added in .sfdx-hardis.yml **autoCleanTypes** property + +Example in config/.sfdx-hardis.yml: + +\`\`\`yaml +autoCleanTypes: + - destructivechanges + - sensitiveMetadatas +\`\`\` +`; + + public static examples = ['$ sf hardis:project:clean:sensitive-metadatas']; + + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + protected folder: string; + protected debugMode = false; + + public async run(): Promise { + const { flags } = await this.parse(CleanSensitiveMetadatas); + this.folder = flags.folder || './force-app'; + this.debugMode = flags.debug || false; + + // Delete standard files when necessary + uxLog("action", this, c.cyan(`Looking for certificates...`)); + /* jscpd:ignore-end */ + const rootFolder = path.resolve(this.folder); + const findManagedPattern = rootFolder + `/**/*.crt`; + const matchingCerts = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); + let counter = 0; + for (const cert of matchingCerts) { + let certText = await fs.readFile(cert, 'utf8'); + if (certText.includes('BEGIN CERTIFICATE')) { + certText = `CERTIFICATE HIDDEN BY SFDX-HARDIS. + +Certificates are not supposed to be stored in Git Repositories, please: + +- Make sure they are never overwritten thanks to package-no-overwrite.Xml +- Manually upload them in target orgs when necessary +` + await fs.writeFile(cert, certText); + counter++; + uxLog("log", this, c.grey(`Replaced certificate content of ${cert}`)); + } + } + + // Summary + const msg = `Updated ${c.green(c.bold(counter))} certificates to hide their content`; + uxLog("action", this, c.cyan(msg)); + // Return an object to be displayed with --json + return { outputString: msg }; + } +} diff --git a/src/commands/hardis/project/clean/standarditems.ts b/src/commands/hardis/project/clean/standarditems.ts index c5ef64185..e4741188a 100644 --- a/src/commands/hardis/project/clean/standarditems.ts +++ b/src/commands/hardis/project/clean/standarditems.ts @@ -1,95 +1,113 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class CleanStandardItems extends SfCommand { + public static title = 'Clean retrieved standard items in dx sources'; -export default class CleanStandardItems extends SfdxCommand { - public static title = "Clean retrieved standard items in dx sources"; + public static description: string = ` +## Command Behavior - public static description = "Remove unwanted standard items within sfdx project sources"; +**Removes unwanted standard Salesforce items from your Salesforce DX project sources.** - public static examples = ["$ sfdx hardis:project:clean:standarditems"]; +This command helps maintain a clean and focused Salesforce codebase by deleting metadata files that represent standard Salesforce objects or fields, especially when they are retrieved but not intended to be managed in your version control system. This is useful for reducing repository size and avoiding conflicts with standard Salesforce metadata. - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", +Key functionalities: + +- **Standard Object Cleaning:** Scans for standard objects (those without a \`__c\` suffix) within your \`force-app/main/default/objects\` folder. +- **Conditional Folder Deletion:** If a standard object folder contains no custom fields (fields with a \`__c\` suffix), the entire folder and its associated sharing rules (\`.sharingRules-meta.xml\`) are removed. +- **Standard Field Deletion:** If a standard object folder *does* contain custom fields, only the standard fields within that object are removed, preserving your custom metadata. + +
+Technical explanations + +The command's technical implementation involves: + +- **File System Traversal:** It starts by listing the contents of the \`force-app/main/default/objects\` directory. +- **Standard Object Identification:** It iterates through each directory within \`objects\` and identifies standard objects by checking if their name does not contain \`__\` (the custom object suffix). +- **Custom Field Detection:** For each standard object, it uses \`glob\` to search for custom fields (\`*__*.field-meta.xml\`) within its \`fields\` subdirectory. +- **Conditional Removal:** + - If no custom fields are found, it removes the entire object directory and any corresponding sharing rules file using \`fs.remove\`. + - If custom fields are found, it then uses \`glob\` again to find all standard fields (\`*.field-meta.xml\` without \`__\`) within the object's \`fields\` directory and removes only those standard field files. +- **Logging:** Provides clear messages about which folders and files are being removed or kept. +
+`; + + public static examples = ['$ sf hardis:project:clean:standarditems']; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected debugMode = false; protected deleteItems: any = {}; public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanStandardItems); + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Removing unwanted standard dx source files...`)); + uxLog("action", this, c.cyan(`Removing unwanted standard dx source files...`)); /* jscpd:ignore-end */ - const sourceRootFolder = path.join(process.cwd() + "/force-app/main/default"); - const objectsFolder = path.join(sourceRootFolder + "/objects"); + const sourceRootFolder = path.join(process.cwd() + '/force-app/main/default'); + const objectsFolder = path.join(sourceRootFolder + '/objects'); const objectsFolderContent = await fs.readdir(objectsFolder); for (const objectDirName of objectsFolderContent) { - const objectDir = objectsFolder + "/" + objectDirName; + const objectDir = objectsFolder + '/' + objectDirName; // Process only standard objects - if (fs.lstatSync(objectDir).isDirectory() && !objectDir.includes("__")) { + if (fs.lstatSync(objectDir).isDirectory() && !objectDir.includes('__')) { const findCustomFieldsPattern = `${objectDir}/fields/*__*`; - const matchingCustomFiles = await glob(findCustomFieldsPattern, { cwd: process.cwd() }); + const matchingCustomFiles = await glob(findCustomFieldsPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); if (matchingCustomFiles.length === 0) { // Remove the whole folder await fs.remove(objectDir); - uxLog(this, c.cyan(`Removed folder ${c.yellow(objectDir)}`)); - const sharingRuleFile = path.join(sourceRootFolder, "sharingRules", objectDirName + ".sharingRules-meta.xml"); + uxLog("action", this, c.cyan(`Removed folder ${c.yellow(objectDir)}`)); + const sharingRuleFile = path.join(sourceRootFolder, 'sharingRules', objectDirName + '.sharingRules-meta.xml'); if (fs.existsSync(sharingRuleFile)) { // Remove sharingRule if existing await fs.remove(sharingRuleFile); - uxLog(this, c.cyan(`Removed sharing rule ${c.yellow(sharingRuleFile)}`)); + uxLog("action", this, c.cyan(`Removed sharing rule ${c.yellow(sharingRuleFile)}`)); } } else { // Remove only standard fields const findAllFieldsPattern = `${objectDir}/fields/*.field-meta.xml`; - const matchingAllFields = await glob(findAllFieldsPattern, { cwd: process.cwd() }); + const matchingAllFields = await glob(findAllFieldsPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); for (const field of matchingAllFields) { - if (!field.includes("__")) { + if (!field.includes('__')) { await fs.remove(field); - uxLog(this, c.cyan(` - removed standard field ${c.yellow(field)}`)); + uxLog("action", this, c.cyan(` - removed standard field ${c.yellow(field)}`)); } } - uxLog(this, c.cyan(`Keep folder ${c.green(objectDir)} because of custom fields found`)); + uxLog("action", this, c.cyan(`Keep folder ${c.green(objectDir)} because of custom fields found`)); } } } // Return an object to be displayed with --json - return { outputString: "Cleaned standard items from sfdx project" }; + return { outputString: 'Cleaned standard items from sfdx project' }; } } diff --git a/src/commands/hardis/project/clean/systemdebug.ts b/src/commands/hardis/project/clean/systemdebug.ts index 854e2d25a..fa9a11ab8 100644 --- a/src/commands/hardis/project/clean/systemdebug.ts +++ b/src/commands/hardis/project/clean/systemdebug.ts @@ -1,78 +1,101 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { glob } from "glob"; -import * as path from "path"; -import { uxLog } from "../../../../common/utils"; -import * as fs from "fs-extra"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class CleanSystemDebug extends SfdxCommand { - public static title = "Clean System debug"; - - public static description = "Clean System.debug() lines in APEX Code (classes and triggers)"; - - public static examples = ["$ sfdx hardis:project:clean:systemdebug"]; - - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { glob } from 'glob'; +import * as path from 'path'; +import { uxLog } from '../../../../common/utils/index.js'; +import fs from 'fs-extra'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CleanSystemDebug extends SfCommand { + public static title = 'Clean System debug'; + + public static description: string = ` +## Command Behavior + +**Removes or comments out \`System.debug()\` statements from Apex classes and triggers in your Salesforce DX project.** + +This command helps maintain clean and optimized Apex code by eliminating debug statements that are often left in production code. While \`System.debug()\` is invaluable during development, it can impact performance and expose sensitive information if left in deployed code. + +Key functionalities: + +- **Targeted File Scan:** Scans all Apex class (.cls) and trigger (.trigger) files within the specified root folder (defaults to \`force-app\`). +- **Conditional Action:** + - **Comment Out (default):** By default, it comments out \`System.debug()\` lines by prepending // to them. + - **Delete (\`--delete\` flag):** If the \`--delete\` flag is used, it completely removes the lines containing \`System.debug()\`. +- **Exclusion:** Lines containing \`NOPMD\` are ignored, allowing developers to intentionally keep specific debug statements. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses \`glob\` to find all Apex class and trigger files. +- **Content Reading:** Reads the content of each Apex file line by line. +- **Pattern Matching:** Checks each line for the presence of \`System.debug\` (case-insensitive). +- **Line Modification:** + - If \`System.debug\` is found and the \`--delete\` flag is not used, it modifies the line to comment out the debug statement. + - If \`System.debug\` is found and the \`--delete\` flag is used, it removes the line entirely. +- **File Writing:** If any changes are made to a file, the modified content is written back to the file using \`fs.writeFile\`. +- **Logging:** Provides a summary of how many files were cleaned. +
+`; + + public static examples = ['$ sf hardis:project:clean:systemdebug']; + + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - delete: flags.boolean({ - char: "d", + delete: Flags.boolean({ + char: 'd', default: false, - description: "Delete lines with System.debug", + description: 'Delete lines with System.debug', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; protected del = false; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.del = this.flags.delete || false; + const { flags } = await this.parse(CleanSystemDebug); + this.folder = flags.folder || './force-app'; + this.del = flags.delete || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Comment or delete System.debug line in apex classes and triggers`)); + uxLog("action", this, c.cyan(`Comment or delete System.debug line in apex classes and triggers`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const findManagedPattern = rootFolder + `/**/*.{cls,trigger}`; - const matchingFiles = await glob(findManagedPattern, { cwd: process.cwd() }); + const matchingFiles = await glob(findManagedPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); let countFiles = 0; for (const apexFile of matchingFiles) { - const fileText = await fs.readFile(apexFile, "utf8"); - const fileLines = fileText.split("\n"); + const fileText = await fs.readFile(apexFile, 'utf8'); + const fileLines = fileText.split('\n'); let counter = 0; let writeF = false; for (const line of fileLines) { - if ((line.includes("System.debug") || line.includes("system.debug")) && !line.includes("NOPMD")) { - if (!this.del && line.trim().substring(0, 2) != "//") { - fileLines[counter] = line.replace("System.debug", "// System.debug").replace("system.debug", "// system.debug"); + if ((line.includes('System.debug') || line.includes('system.debug')) && !line.includes('NOPMD')) { + if (!this.del && line.trim().substring(0, 2) != '//') { + fileLines[counter] = line + .replace('System.debug', '// System.debug') + .replace('system.debug', '// system.debug'); writeF = true; } else if (this.del) { delete fileLines[counter]; @@ -82,15 +105,15 @@ export default class CleanSystemDebug extends SfdxCommand { counter++; } if (writeF) { - const joinLines = fileLines.join("\n"); - await fs.writeFile(apexFile, joinLines, "utf8"); + const joinLines = fileLines.join('\n'); + await fs.writeFile(apexFile, joinLines, 'utf8'); countFiles++; } } // Summary const msg = `Cleaned ${c.green(c.bold(countFiles))} class(es) and trigger(s)`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg }; } diff --git a/src/commands/hardis/project/clean/unlockedpackages.ts b/src/commands/hardis/project/clean/unlockedpackages.ts new file mode 100644 index 000000000..ba292890e --- /dev/null +++ b/src/commands/hardis/project/clean/unlockedpackages.ts @@ -0,0 +1,152 @@ +/* jscpd:ignore-start */ +import { flags, SfdxCommand } from "@salesforce/command"; +import { Connection, Messages } from "@salesforce/core"; +import { Connection, Messages } from "@salesforce/core"; +import { AnyJson } from "@salesforce/ts-types"; +import * as c from "chalk"; +import { uxLog } from "../../../../common/utils"; +import { soqlQueryTooling, describeGlobalTooling, toolingRequest } from "../../../../common/utils/apiUtils"; +import { soqlQueryTooling, describeGlobalTooling, toolingRequest } from "../../../../common/utils/apiUtils"; +import { prompts } from "../../../../common/utils/prompts"; + +// Initialize Messages with the current plugin directory +Messages.importMessagesDirectory(__dirname); + +// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, +// or any library that is using the messages framework can also be loaded this way. +const messages = Messages.loadMessages("sfdx-hardis", "org"); + +export default class unlockedpackages extends SfdxCommand { + public static title = "Clean installed unlocked packages"; + + public static description = `Clean installed unlocked packages, such as those installed from unofficialSF`; + + public static examples = ["$ sfdx hardis:project:clean:unlockedpackages"]; + + protected static flagsConfig = { + path: flags.string({ + char: "p", + default: process.cwd(), + description: "Root folder", + }), + debug: flags.boolean({ + char: "d", + default: false, + description: messages.getMessage("debugMode"), + }), + websocket: flags.string({ + description: messages.getMessage("websocket"), + }), + skipauth: flags.boolean({ + description: "Skip authentication check when a default username is required", + }), + }; + + // Comment this out if your command does not require an org username + protected static requiresUsername = true; + + // Comment this out if your command does not support a hub org username + protected static requiresDevhubUsername = false; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + protected static requiresProject = true; + + protected pathToBrowse: string; + protected debugMode = false; + + public async run(): Promise { + this.pathToBrowse = this.flags.path || process.cwd(); + this.debugMode = this.flags.debug || false; + + /* jscpd:ignore-end */ + + // List available unlocked packages in org + const pkgsRequest = "SELECT SubscriberPackageId, SubscriberPackage.NamespacePrefix, SubscriberPackage.Name, SubscriberPackageVersionId FROM InstalledSubscriberPackage ORDER BY SubscriberPackage.NamespacePrefix"; + const pkgsResult = await soqlQueryTooling(pkgsRequest, this.org.getConnection()); + const choices = pkgsResult.records + .filter(pkg => pkg.SubscriberPackage.NamespacePrefix == null) + .map((pkg) => ({ + title: pkg.SubscriberPackage.Name, + value: pkg.SubscriberPackageId, + version: pkg.SubscriberPackageVersionId + }) + ); + + // Get All Org SObject with prefix key + const describeObjResult = await describeGlobalTooling(this.org.getConnection()); + const orgPrefixKey = describeObjResult.sobjects.reduce((obj, item) => ({ + ...obj, + [item.keyPrefix]: item.name + }), {}); + + //Prompt which package to clean up + const promptUlpkgToClean = await prompts([ + { + type: "select", + name: "packageId", + name: "packageId", + message: "Please select the package to clean out", + choices: choices + } + ]) + + const chosenPackage = choices.filter(id => id.value == promptUlpkgToClean.packageId)[0] + const chosenPackage = choices.filter(id => id.value == promptUlpkgToClean.packageId)[0] + + // Tooling query specific package + const ulpkgQuery = `SELECT SubjectID, SubjectKeyPrefix FROM Package2Member WHERE SubscriberPackageId='${promptUlpkgToClean.packageId}'` + const ulpkgQueryResult = await soqlQueryTooling(ulpkgQuery, this.org.getConnection()); + + //create array of package members, looking up object name from orgPrefixKey + const ulpkgMembers = ulpkgQueryResult.records.map(member => ({ + SubjectId: member.SubjectId, + SubjectKeyPrefix: member.SubjectKeyPrefix, + ObjectName: orgPrefixKey[member.SubjectKeyPrefix] + })).filter(member => member.ObjectName !== undefined); + + //fetch metadata for package members + const ulpkgMeta = await Promise.all(ulpkgMembers.map(async (member) => { + const toolingQuery: [string, Connection, Record] = [ + `sobjects/${member.ObjectName}/${member.SubjectId}`, + this.org.getConnection(), + {} + ] + const returnResponse: Record = await toolingRequest(...toolingQuery) + return { + name: returnResponse.Name || returnResponse.DeveloperName, + fullName: returnResponse.FullName + } + })); + + console.log(ulpkgMeta) + + })).filter(member => member.ObjectName !== undefined); + + //fetch metadata for package members + const ulpkgMeta = await Promise.all(ulpkgMembers.map(async (member) => { + const toolingQuery: [string, Connection, Record] = [ + `sobjects/${member.ObjectName}/${member.SubjectId}`, + this.org.getConnection(), + {} + ] + const returnResponse: Record = await toolingRequest(...toolingQuery) + return { + name: returnResponse.Name || returnResponse.DeveloperName, + fullName: returnResponse.FullName + } + })); + + console.log(ulpkgMeta) + + // Create json file + + // Do Clean + + // Summary + const msg = `Cleaned ${c.green(c.bold(chosenPackage.title))}.`; + const msg = `Cleaned ${c.green(c.bold(chosenPackage.title))}.`; + uxLog(this, c.cyan(msg)); + // Return an object to be displayed with --json + return { outputString: msg }; + } +} diff --git a/src/commands/hardis/project/clean/xml.ts b/src/commands/hardis/project/clean/xml.ts index 61bf85900..c037bca9a 100644 --- a/src/commands/hardis/project/clean/xml.ts +++ b/src/commands/hardis/project/clean/xml.ts @@ -1,28 +1,25 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import * as sortArray from "sort-array"; -import * as xmldom from "@xmldom/xmldom"; -import * as xpath from "xpath"; -import { isCI, uxLog } from "../../../../common/utils"; -import { prompts } from "../../../../common/utils/prompts"; -import { writeXmlFileFormatted } from "../../../../common/utils/xmlUtils"; -import { getConfig, setConfig } from "../../../../config"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class CleanXml extends SfdxCommand { - public static title = "Clean retrieved empty items in dx sources"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import sortArray from 'sort-array'; +import * as xmldom from '@xmldom/xmldom'; +import * as xpath from 'xpath'; +import { isCI, uxLog } from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { writeXmlFileFormatted } from '../../../../common/utils/xmlUtils.js'; +import { getConfig, setConfig } from '../../../../config/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class CleanXml extends SfCommand { + public static title = 'Clean retrieved empty items in dx sources'; public static description = `Remove XML elements using Glob patterns and XPath expressions @@ -39,68 +36,64 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea `; public static examples = [ - "$ sfdx hardis:project:clean:xml", - `$ sfdx hardis:project:clean:xml --globpattern "/**/*.flexipage-meta.xml" --xpath "//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]"`, + '$ sf hardis:project:clean:xml', + `$ sf hardis:project:clean:xml --globpattern "/**/*.flexipage-meta.xml" --xpath "//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]"`, ]; - protected static flagsConfig = { - folder: flags.string({ - char: "f", - default: "force-app", - description: "Root folder", + public static flags: any = { + folder: Flags.string({ + char: 'f', + default: 'force-app', + description: 'Root folder', }), - globpattern: flags.string({ - char: "p", - description: "Glob pattern to find files to clean. Ex: /**/*.flexipage-meta.xml", - dependsOn: ["xpath"], + globpattern: Flags.string({ + char: 'p', + description: 'Glob pattern to find files to clean. Ex: /**/*.flexipage-meta.xml', + dependsOn: ['xpath'], }), - xpath: flags.string({ - char: "x", - description: "XPath to use to detect the elements to remove. Ex: //ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]", - dependsOn: ["globpattern"], + xpath: Flags.string({ + char: 'x', + description: + "XPath to use to detect the elements to remove. Ex: //ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]", + dependsOn: ['globpattern'], }), - namespace: flags.string({ - char: "n", - default: "http://soap.sforce.com/2006/04/metadata", - description: "XML Namespace to use", + namespace: Flags.string({ + char: 'n', + default: 'http://soap.sforce.com/2006/04/metadata', + description: 'XML Namespace to use', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected folder: string; - protected globPattern: string; + protected globPattern: string | undefined; protected namespace: string; - protected xpath: string; + protected xpath: string | undefined; protected debugMode = false; public async run(): Promise { - this.folder = this.flags.folder || "./force-app"; - this.globPattern = this.flags.globpattern; - this.xpath = this.flags.xpath; - this.namespace = this.flags.namespace || "http://soap.sforce.com/2006/04/metadata"; - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(CleanXml); + this.folder = flags.folder || './force-app'; + this.globPattern = flags.globpattern; + this.xpath = flags.xpath; + this.namespace = flags.namespace || 'http://soap.sforce.com/2006/04/metadata'; + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Clean XML elements matching patterns`)); + uxLog("log", this, c.grey(`Clean XML elements matching patterns`)); /* jscpd:ignore-end */ const rootFolder = path.resolve(this.folder); const cleanXmlPatterns = await this.buildCleanXmlPatterns(); @@ -109,18 +102,18 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea // iterate on removePatterns for (const cleanXmlPattern of cleanXmlPatterns) { const findPattern = rootFolder + cleanXmlPattern.globPattern; - const matchingXmlFiles = await glob(findPattern, { cwd: process.cwd() }); + const matchingXmlFiles = await glob(findPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); // Iterate on matching files for (const xmlFile of matchingXmlFiles) { let updated = false; - const xml = await fs.readFile(xmlFile, "utf8"); - const doc = new xmldom.DOMParser().parseFromString(xml); + const xml = await fs.readFile(xmlFile, 'utf8'); + const doc = new xmldom.DOMParser().parseFromString(xml, 'text/xml'); // Iterate on xpaths for (const xpathItem of cleanXmlPattern.xpaths) { - const nodes = xpathSelect(xpathItem, doc); - for (const node of nodes) { + const nodes = xpathSelect(xpathItem, doc as any); + for (const node of nodes as Node[]) { await this.removeXPath(xpathItem, doc, node); - uxLog(this, c.grey(`Removed xpath ${xpathItem} from ${xmlFile}`)); + uxLog("log", this, c.grey(`Removed xpath ${xpathItem} from ${xmlFile}`)); updated = true; counter++; } @@ -134,7 +127,7 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea // Summary const msg = `Updated ${c.green(c.bold(counter))} XML files`; - uxLog(this, c.cyan(msg)); + uxLog("log", this, c.grey(msg)); // Propose to add in permanent configuration if (this.globPattern && this.xpath) { await this.manageAddToPermanentConfig(this.globPattern, this.xpath); @@ -146,7 +139,7 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea public async buildCleanXmlPatterns() { // Input parameters if (this.globPattern && this.xpath) { - uxLog(this, c.cyan("Using configuration from input arguments...")); + uxLog("log", this, c.grey('Using configuration from input arguments...')); return [ { globPattern: this.globPattern, @@ -155,30 +148,34 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea ]; } // Stored config - uxLog(this, c.cyan(`Using configuration from property ${c.bold("cleanXmlPatterns")} in .sfdx-hardis.yml config file...`)); - const config = await getConfig("branch"); + uxLog( + "log", + this, + c.grey(`Using configuration from property ${c.bold('cleanXmlPatterns')} in .sfdx-hardis.yml config file...`) + ); + const config = await getConfig('branch'); return config.cleanXmlPatterns || []; } public async removeXPath(xPathItem, doc, node) { const parentNodeName = this.findRemoveParentNodeName(xPathItem); const parentNode = this.findParentNode(node, parentNodeName); - if (parentNode) { - doc.removeChild(parentNode); + if (parentNode && parentNode.parentNode) { + parentNode.parentNode.removeChild(parentNode); } } public findRemoveParentNodeName(xpathItem: string) { - const splits = xpathItem.split("//ns:").filter((str) => str !== ""); + const splits = xpathItem.split('//ns:').filter((str) => str !== ''); if (splits[0]) { return splits[0]; } - throw new SfdxError(`[sfdx-hardis] xpath should start with //ns:PARENT-TAG-NAME//ns:`); + throw new SfError(`[sfdx-hardis] xpath should start with //ns:PARENT-TAG-NAME//ns:`); } public findParentNode(node: any, parentNodeName: string) { if (node == null) { - throw new SfdxError(`[sfdx-hardis] Parent node named ${parentNodeName} not found`); + throw new SfError(`[sfdx-hardis] Parent node named ${parentNodeName} not found`); } if (node.localName === parentNodeName) { return node; @@ -189,16 +186,21 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea // Propose user to perform such cleaning at each future hardis:work:save command public async manageAddToPermanentConfig(globPattern: string, xpath: string) { if (!isCI) { - const config = await getConfig("project"); + const config = await getConfig('project'); let cleanXmlPatterns = config.cleanXmlPatterns || []; - const alreadyDefined = cleanXmlPatterns.filter((item: any) => item.globPattern === globPattern && item.xpaths.includes(xpath)); + const alreadyDefined = cleanXmlPatterns.filter( + (item: any) => item.globPattern === globPattern && item.xpaths.includes(xpath) + ); if (alreadyDefined.length > 0) { return; } // prompt user const addConfigRes = await prompts({ - type: "confirm", - message: c.cyanBright(`Do you want to ALWAYS apply removal of xpath ${xpath} from files of pattern ${globPattern} ?`), + type: 'confirm', + message: c.cyanBright( + `Do you want to ALWAYS apply removal of xpath ${xpath} from files of pattern ${globPattern} ?` + ), + description: 'Choose whether to save this xpath removal as a permanent cleaning rule', }); if (addConfigRes.value === true) { let updated = false; @@ -220,10 +222,10 @@ Note: If globpattern and xpath are not sent, elements defined in property **clea } // Update config with sorted new value cleanXmlPatterns = sortArray(cleanXmlPatterns, { - by: ["globPattern"], - order: ["asc"], + by: ['globPattern'], + order: ['asc'], }); - await setConfig("project", { cleanXmlPatterns: cleanXmlPatterns }); + await setConfig('project', { cleanXmlPatterns: cleanXmlPatterns }); } } } diff --git a/src/commands/hardis/project/configure/auth.ts b/src/commands/hardis/project/configure/auth.ts index 38c799674..b2764373b 100644 --- a/src/commands/hardis/project/configure/auth.ts +++ b/src/commands/hardis/project/configure/auth.ts @@ -1,156 +1,262 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { execSfdxJson, generateSSLCertificate, promptInstanceUrl, uxLog } from "../../../../common/utils"; -import { getOrgAliasUsername, promptOrg } from "../../../../common/utils/orgUtils"; -import { prompts } from "../../../../common/utils/prompts"; -import { checkConfig, getConfig, setConfig, setInConfigFile } from "../../../../config"; -import { WebSocketClient } from "../../../../common/websocketClient"; +import { + SfCommand, + Flags, + optionalOrgFlagWithDeprecations, + optionalHubFlagWithDeprecations, +} from '@salesforce/sf-plugins-core'; +import { fs, Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import * as yaml from 'js-yaml'; +import { execSfdxJson, generateSSLCertificate, git, promptInstanceUrl, uxLog } from '../../../../common/utils/index.js'; +import { getOrgAliasUsername, promptOrg } from '../../../../common/utils/orgUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { checkConfig, getConfig, setConfig, setInConfigFile } from '../../../../config/index.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ConfigureAuth extends SfCommand { + public static title = 'Configure authentication'; -export default class ConfigureAuth extends SfdxCommand { - public static title = "Configure authentication"; + public static description: string = ` +## Command Behavior - public static description = "Configure authentication from git branch to target org"; +**Configures authentication between a Git branch and a target Salesforce org for CI/CD deployments.** - public static examples = ["$ sfdx hardis:project:configure:auth"]; +This command facilitates the setup of automated CI/CD pipelines, enabling seamless deployments from specific Git branches to designated Salesforce orgs. It supports both standard Salesforce orgs and Dev Hub configurations, catering to various enterprise deployment workflows. + +Key functionalities include: + +- **Org Selection/Login:** Guides the user to select an existing Salesforce org or log in to a new one. +- **Git Branch Association:** Allows associating a specific Git branch with the chosen Salesforce org. +- **Merge Target Definition:** Enables defining target Git branches into which the configured branch can merge, ensuring controlled deployment flows. +- **Salesforce Username Configuration:** Prompts for the Salesforce username to be used by the CI server for deployments. +- **SSL Certificate Generation:** Automatically generates an SSL certificate for secure authentication. + +
+Technical explanations + +The command's implementation involves several key technical aspects: + +- **SF CLI Integration:** Utilizes +@salesforce/sf-plugins-core + for command structure and flag parsing. +- **Interactive Prompts:** Employs the +prompts + library for interactive user input, guiding the configuration process. +- **Git Integration:** Interacts with Git to retrieve branch information using +\`git().branch(["--list", "-r"])\` +. +- **Configuration Management:** Leverages internal utilities (\`checkConfig\`, \`getConfig\`, \`setConfig\`, \`setInConfigFile\`) to read from and write to project-specific configuration files (e.g., \`.sfdx-hardis..yml\`). +- **Salesforce CLI Execution:** Executes Salesforce CLI commands programmatically via \`execSfdxJson\` for org interactions. +- **SSL Certificate Generation:** Calls \`generateSSLCertificate\` to create necessary SSL certificates for JWT-based authentication. +- **WebSocket Communication:** Uses \`WebSocketClient\` for potential communication with external tools or processes, such as restarting the command in VS Code. +- **Dependency Check:** Ensures the presence of \`openssl\` on the system, which is required for SSL certificate generation. +`; + + public static examples = ['$ sf hardis:project:configure:auth']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - devhub: flags.boolean({ - char: "b", + public static flags: any = { + devhub: Flags.boolean({ + char: 'b', default: false, - description: "Configure project DevHub", + description: 'Configure project DevHub', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': optionalOrgFlagWithDeprecations, + 'target-dev-hub': optionalHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static supportsUsername = true; - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static supportsDevhubUsername = true; - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; - protected static requiresDependencies = ["openssl"]; + protected static requiresDependencies = ['openssl']; /* jscpd:ignore-end */ public async run(): Promise { - const devHub = this.flags.devhub || false; + const { flags } = await this.parse(ConfigureAuth); + const devHub = flags.devhub || false; + + uxLog("action", this, c.cyan(`This command will configure the authentication between a git branch and ${devHub ? "Dev Hub" : "a Salesforce org"}.`)); // Ask user to login to org - const prevUserName = devHub ? this.hubOrg?.getUsername() : this.org?.getUsername(); - /*uxLog(this, c.cyan("Please select org login into the org you want to configure the SFDX Authentication")); - await this.config.runHook("auth", { - checkAuth: true, - Command: this, - alias: "CONFIGURE_CI", - devHub, - }); */ + const prevUserName = devHub ? flags['target-dev-hub']?.getUsername() : flags['target-org']?.getUsername(); await promptOrg(this, { setDefault: true, devHub: devHub, - promptMessage: "Please select org login into the org you want to configure the SFDX Authentication", + promptMessage: `Please select or login into ${devHub ? "your Dev Hub org" : "the org you want to configure the SF CLI Authentication"}`, + defaultOrgUsername: devHub ? flags['target-dev-hub']?.getUsername() : flags['target-org']?.getUsername(), }); await checkConfig(this); // Check if the user has changed. If yes, ask to run the command again - const configGetRes = await execSfdxJson("sfdx config:get " + (devHub ? "defaultdevhubusername" : "defaultusername"), this, { + uxLog("action", this, c.cyan(`Checking if the org username has changed from ${c.bold(prevUserName)}...`)); + const configGetRes = await execSfdxJson('sf config get ' + (devHub ? 'target-dev-hub' : 'target-org'), this, { output: false, fail: false, }); - let newUsername = configGetRes?.result[0]?.value || ""; + let newUsername = configGetRes?.result[0]?.value || ''; newUsername = (await getOrgAliasUsername(newUsername)) || newUsername; if (prevUserName !== newUsername) { // Restart command so the org is selected as default org (will help to select profiles) - const infoMsg = "Default org changed. Please restart the same command if VsCode does not do that automatically for you :)"; - uxLog(this, c.yellow(infoMsg)); - const currentCommand = "sfdx " + this.id + " " + this.argv.join(" "); - WebSocketClient.sendMessage({ - event: "runSfdxHardisCommand", - sfdxHardisCommand: currentCommand, - }); + const infoMsg = + 'Default org changed. Please restart the same command if VsCode does not do that automatically for you :)'; + uxLog("warning", this, c.yellow(infoMsg)); + const currentCommand = 'sf ' + this.id + ' ' + this.argv.join(' '); + WebSocketClient.sendRunSfdxHardisCommandMessage(currentCommand); return { outputString: infoMsg }; } - const config = await getConfig("project"); + const config = await getConfig('project'); // Get branch name to configure if not Dev Hub - let branchName = ""; - let instanceUrl = "https://login.salesforce.com"; + let branchName = ''; + let instanceUrl = 'https://login.salesforce.com'; + const branches = await git().branch(["--list", "-r"]); if (!devHub) { const branchResponse = await prompts({ - type: "text", - name: "value", - message: c.cyanBright("What is the name of the git branch you want to configure ? Examples: developpement,recette,production"), + type: 'select', + name: 'value', + message: c.cyanBright( + 'What is the name of the git branch you want to configure Automated CI/CD deployments from ? (Ex: integration,uat,preprod,main)' + ), + choices: branches.all.map((branch: string) => { + return { + title: branch.replace('origin/', ''), + value: branch.replace('origin/', ''), + }; + }), + description: 'Enter the git branch name for this org configuration', + placeholder: 'Select the git branch name', }); - branchName = branchResponse.value.replace(/\s/g, "-"); + branchName = branchResponse.value.replace(/\s/g, '-'); /* if (["main", "master"].includes(branchName)) { - throw new SfdxError("You can not use main or master as deployment branch name. Maybe you want to use production ?"); + throw new SfError("You can not use main or master as deployment branch name. Maybe you want to use production ?"); } */ - instanceUrl = await promptInstanceUrl(["login", "test"], `${branchName} related org`, { - instanceUrl: devHub ? this.hubOrg.getConnection().instanceUrl : this.org.getConnection().instanceUrl, + } + + instanceUrl = await promptInstanceUrl( + devHub ? ["login"] : ['login', "test"], + devHub ? "Dev Hub org" : `${branchName} related org`, { + instanceUrl: devHub + ? flags['target-dev-hub']?.getConnection()?.instanceUrl || "" + : flags['target-org']?.getConnection()?.instanceUrl || "", + }); + + // Request merge targets + if (!devHub) { + let initialMergeTargets: string[] = []; + const branchConfigFile = `./config/branches/.sfdx-hardis.${branchName}.yml`; + if (fs.existsSync(branchConfigFile)) { + const branchConfig: any = yaml.load(fs.readFileSync(branchConfigFile, 'utf8')); + if (branchConfig && branchConfig.mergeTargets) { + initialMergeTargets = branchConfig.mergeTargets; + } + } + const mergeTargetsResponse = await prompts({ + type: 'multiselect', + name: 'value', + message: c.cyanBright( + `What are the target git branches that ${branchName} will be able to merge in ? (Ex: for integration, the target will be uat)` + ), + choices: branches.all.map((branch: string) => { + return { + title: branch.replace('origin/', ''), + value: branch.replace('origin/', ''), + }; + }), + initial: initialMergeTargets, + description: 'Select the git branches that this branch will be able to merge in', + placeholder: 'Select the target git branches', }); + const mergeTargets = mergeTargetsResponse.value.map((branch: string) => branch.replace(/\s/g, '-')); + // Update config file + await setInConfigFile( + [], + { + mergeTargets: mergeTargets, + }, + branchConfigFile + ); } + // Request username const usernameResponse = await prompts({ - type: "text", - name: "value", - initial: (devHub ? this.hubOrg.getUsername() : this.org.getUsername()) || "", + type: 'text', + name: 'value', + initial: (devHub ? flags['target-dev-hub']?.getUsername() || "" : flags['target-org'].getUsername() || "") || '', message: c.cyanBright( - `What is the Salesforce username that will be ${ - devHub ? "used as Dev Hub" : "used for deployments by CI server" - } ? Example: admin.sfdx@myclient.com`, + `What is the Salesforce username that will be ${devHub ? 'used as Dev Hub' : 'used for deployments by CI server' + } ? Example: admin.sfdx@myclient.com` ), + description: 'Enter the Salesforce username for this configuration', + placeholder: 'Ex: admin.sfdx@myclient.com', }); if (devHub) { - await setConfig("project", { + if (!config.devHubAlias || config.devHubAlias === '') { + const devHubAliasResponse = await prompts({ + type: 'text', + name: 'value', + message: c.cyanBright('What is the alias you want to set for your Dev Hub ?'), + description: 'Enter the alias for your Dev Hub', + initial: config.projectName ? 'DevHub_' + config.projectName : 'DevHub', + placeholder: 'Ex: MyCompany_DevHub', + }); + config.devHubAlias = devHubAliasResponse.value; + await setConfig('project', { + devHubAlias: config.devHubAlias, + }); + } + const configFile = await setConfig('project', { + devHubInstanceUrl: instanceUrl, devHubUsername: usernameResponse.value, }); + WebSocketClient.sendReportFileMessage(configFile!, 'Updated project config file', 'report'); } else { // Update config file + const branchConfigFile = `./config/branches/.sfdx-hardis.${branchName}.yml`; await setInConfigFile( [], { targetUsername: usernameResponse.value, instanceUrl, }, - `./config/branches/.sfdx-hardis.${branchName}.yml`, + branchConfigFile ); + WebSocketClient.sendReportFileMessage(branchConfigFile, `Updated ${branchName} config file`, 'report'); } + WebSocketClient.sendRefreshPipelineMessage(); + // Generate SSL certificate (requires openssl to be installed on computer) - const certFolder = devHub ? "./config/.jwt" : "./config/branches/.jwt"; + const certFolder = devHub ? './config/.jwt' : './config/branches/.jwt'; const certName = devHub ? config.devHubAlias : branchName; - const orgConn = devHub ? this.hubOrg?.getConnection() : this.org?.getConnection(); + const orgConn = devHub ? flags['target-dev-hub']?.getConnection() : flags['target-org']?.getConnection(); const sslGenOptions = { - targetUsername: devHub ? this.hubOrg?.getUsername() : this.org?.getUsername(), + targetUsername: devHub ? flags['target-dev-hub']?.getUsername() : flags['target-org']?.getUsername(), }; await generateSSLCertificate(certName, certFolder, this, orgConn, sslGenOptions); + + uxLog("action", this, c.green(`Branch ${devHub ? '(DevHub)' : branchName} successfully configured for authentication!`)); + uxLog("warning", this, c.yellow('Make sure you have set the environment variables in your CI/CD platform')); + uxLog("warning", this, c.yellow('Don\'t forget to commit the sfdx-hardis config file and the encrypted certificated key in git!')); + // Return an object to be displayed with --json - return { outputString: "Configured branch for authentication" }; + return { outputString: 'Configured branch for authentication' }; } } diff --git a/src/commands/hardis/project/convert/profilestopermsets.ts b/src/commands/hardis/project/convert/profilestopermsets.ts index 9bcb6d0a9..0b8e846b6 100644 --- a/src/commands/hardis/project/convert/profilestopermsets.ts +++ b/src/commands/hardis/project/convert/profilestopermsets.ts @@ -1,73 +1,89 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { execCommand, uxLog } from "../../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { execCommand, uxLog } from '../../../../common/utils/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ConvertProfilesToPermSets extends SfCommand { + public static title = 'Convert Profiles into Permission Sets'; -export default class ConvertProfilesToPermSets extends SfdxCommand { - public static title = "Convert Profiles into Permission Sets"; + public static description: string = ` +## Command Behavior - public static description = "Creates permission sets from existing profiles, with id PS_PROFILENAME"; +**Converts existing Salesforce Profiles into Permission Sets, facilitating a more granular and recommended security model.** - public static examples = ["$ sfdx hardis:project:convert:profilestopermsets"]; +This command helps in migrating permissions from Profiles to Permission Sets, which is a best practice for managing user access in Salesforce. It creates a new Permission Set for each specified Profile, adopting a naming convention of \`PS_PROFILENAME\`. - protected static flagsConfig = { - except: flags.array({ - char: "e", +Key functionalities: + +- **Profile to Permission Set Conversion:** Automatically extracts permissions from a Profile and creates a corresponding Permission Set. +- **Naming Convention:** New Permission Sets are named with a \`PS_\` prefix followed by the Profile name (e.g., \`PS_Standard_User\`). +- **Exclusion Filter:** Allows you to exclude specific Profiles from the conversion process using the \`--except\` flag. + +
+Technical explanations + +The command's technical implementation involves: + +- **External Plugin Integration:** It relies on the \`shane-sfdx-plugins\` (specifically the \`sf shane:profile:convert\` command) to perform the actual conversion. +- **File System Scan:** It reads the contents of the \`force-app/main/default/profiles\` directory to identify all available Profile metadata files. +- **Command Execution:** For each identified Profile (that is not excluded), it constructs and executes the \`sf shane:profile:convert\` command with the appropriate Profile name and desired Permission Set name. +- **Error Handling:** Includes basic error handling for the external command execution. +
+`; + + public static examples = ['$ sf hardis:project:convert:profilestopermsets']; + + public static flags: any = { + except: Flags.string({ + char: 'e', default: [], - description: "List of filters", + description: 'List of filters', + multiple: true, }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; /* jscpd:ignore-end */ // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["shane-sfdx-plugins"]; + protected static requiresSfdxPlugins = ['shane-sfdx-plugins']; public async run(): Promise { - const except = this.flags.except || []; + const { flags } = await this.parse(ConvertProfilesToPermSets); + const except = flags.except || []; - uxLog(this, c.cyan("This command will convert profiles into permission sets")); + uxLog("action", this, c.cyan('This command will convert profiles into permission sets')); - const sourceRootFolder = path.join(process.cwd() + "/force-app/main/default"); - const profilesFolder = path.join(sourceRootFolder, "profiles"); + const sourceRootFolder = path.join(process.cwd() + '/force-app/main/default'); + const profilesFolder = path.join(sourceRootFolder, 'profiles'); const objectsFolderContent = await fs.readdir(profilesFolder); for (const profileFile of objectsFolderContent) { - if (profileFile.includes(".profile-meta.xml")) { - const profileName = path.basename(profileFile).replace(".profile-meta.xml", ""); + if (profileFile.includes('.profile-meta.xml')) { + const profileName = path.basename(profileFile).replace('.profile-meta.xml', ''); if (except.filter((str) => profileName.toLowerCase().includes(str)).length > 0) { continue; } - const psName = "PS_" + profileName.split(" ").join("_"); - uxLog(this, c.cyan(`Generating Permission set ${c.green(psName)} from profile ${c.green(profileName)}`)); - const convertCommand = "sfdx shane:profile:convert" + ` -p "${profileName}"` + ` -n "${psName}"` + " -e"; + const psName = 'PS_' + profileName.split(' ').join('_'); + uxLog("action", this, c.cyan(`Generating Permission set ${c.green(psName)} from profile ${c.green(profileName)}`)); + const convertCommand = 'sf shane:profile:convert' + ` -p "${profileName}"` + ` -n "${psName}"` + ' -e'; await execCommand(convertCommand, this, { fail: true, output: true }); } } diff --git a/src/commands/hardis/project/create.ts b/src/commands/hardis/project/create.ts index 894c8c43c..4d7af41df 100644 --- a/src/commands/hardis/project/create.ts +++ b/src/commands/hardis/project/create.ts @@ -1,84 +1,77 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { ensureGitRepository, execCommand, uxLog } from "../../../common/utils"; -import { prompts } from "../../../common/utils/prompts"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { getConfig, setConfig } from "../../../config"; -import { WebSocketClient } from "../../../common/websocketClient"; -import { isSfdxProject } from "../../../common/utils/projectUtils"; -import { PACKAGE_ROOT_DIR } from "../../../settings"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class ProjectCreate extends SfdxCommand { - public static title = "Login"; - - public static description = "Create a new SFDX Project"; - - public static examples = ["$ sfdx hardis:project:create"]; - - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { ensureGitRepository, execCommand, uxLog } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { CONSTANTS, getConfig, promptForProjectName, setConfig } from '../../../config/index.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { isSfdxProject } from '../../../common/utils/projectUtils.js'; +import { PACKAGE_ROOT_DIR } from '../../../settings.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class ProjectCreate extends SfCommand { + public static title = 'Login'; + + public static description = 'Create a new SFDX Project'; + + public static examples = ['$ sf hardis:project:create']; + + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debugMode || false; + const { flags } = await this.parse(ProjectCreate); + this.debugMode = flags.debug || false; // Check git repo await ensureGitRepository({ clone: true }); const devHubPrompt = await prompts({ - name: "orgType", - type: "select", - message: "To perform implementation, will your project use scratch org or source tracked sandboxes only ?", + name: 'orgType', + type: 'select', + message: 'To perform implementation, will your project use scratch org or source tracked sandboxes only ?', + description: 'Choose the type of development orgs your project will use', + placeholder: 'Select org type', choices: [ { - title: "Scratch orgs only", - value: "scratch", + title: 'Scratch orgs only', + value: 'scratch', }, { - title: "Source tracked sandboxes only", - value: "sandbox", + title: 'Source tracked sandboxes only', + value: 'sandbox', }, { - title: "Source tracked sandboxes and scratch orgs", - value: "sandboxAndScratch", + title: 'Source tracked sandboxes and scratch orgs', + value: 'sandboxAndScratch', }, ], }); - if (["scratch", "sandboxAndScratch"].includes(devHubPrompt.orgType)) { + if (['scratch', 'sandboxAndScratch'].includes(devHubPrompt.orgType)) { // Connect to DevHub - await this.config.runHook("auth", { + await this.config.runHook('auth', { checkAuth: true, Command: this, devHub: true, @@ -86,21 +79,18 @@ export default class ProjectCreate extends SfdxCommand { }); } // Project name - let config = await getConfig("project"); + let config = await getConfig('project'); let projectName = config.projectName; + let setProjectName = false; if (projectName == null) { // User prompts - const projectRes = await prompts({ - type: "text", - name: "projectName", - message: "What is the name of your project ? (example: MyClient)", - }); - projectName = projectRes.projectName.toLowerCase().replace(" ", "_"); + projectName = await promptForProjectName(); + setProjectName = true; } // Create sfdx project only if not existing if (!isSfdxProject()) { - const createCommand = "sfdx force:project:create" + ` --projectname "${projectName}"` + " --manifest"; + const createCommand = 'sf project generate' + ` --name "${projectName}"` + ' --manifest'; await execCommand(createCommand, this, { output: true, fail: true, @@ -114,36 +104,52 @@ export default class ProjectCreate extends SfdxCommand { await fs.rm(path.join(process.cwd(), projectName), { recursive: true }); } // Copy default project files - uxLog(this, "Copying default files..."); - await fs.copy(path.join(PACKAGE_ROOT_DIR, "defaults/ci", "."), process.cwd(), { overwrite: false }); + uxLog("action", this, 'Copying default files...'); + await fs.copy(path.join(PACKAGE_ROOT_DIR, 'defaults/ci', '.'), process.cwd(), { overwrite: false }); + + if (setProjectName) { + await setConfig('project', { projectName: projectName }); + } - config = await getConfig("project"); + config = await getConfig('project'); if (config.developmentBranch == null) { // User prompts const devBranchRes = await prompts({ - type: "text", - name: "devBranch", + type: 'text', + name: 'devBranch', message: - "What is the name of your default development branch ? (Examples: if you manage RUN and BUILD, it can be integration. If you manage RUN only, it can be preprod)", - initial: "integration", + 'What is the name of your default development branch ? (Examples: if you manage RUN and BUILD, it can be integration. If you manage RUN only, it can be preprod)', + initial: 'integration', + description: 'Enter the name of your main development branch', + placeholder: 'Ex: integration', }); - await setConfig("project", { developmentBranch: devBranchRes.devBranch }); + await setConfig('project', { developmentBranch: devBranchRes.devBranch }); } - await setConfig("project", { autoCleanTypes: ["destructivechanges"] }); - + // Initialize autoCleanTypes + const defaultAutoCleanTypes = [ + 'destructivechanges', + 'flowPositions', + 'minimizeProfiles']; + await setConfig('project', { + autoCleanTypes: defaultAutoCleanTypes + }); + uxLog("warning", this, c.yellow(`autoCleanTypes ${defaultAutoCleanTypes.join(",")} has been activated on the new project.`)); + uxLog("warning", this, c.bold(c.yellow(`If you install CI/CD on an existing org with many rights in Profiles, you might remove "minimizeProfiles" from .sfdx-hardis.yml autoCleanTypes property `))); // Message instructions uxLog( + "action", this, c.cyan( - "SFDX Project has been created. You can continue the steps in documentation at https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-home/", - ), + `SFDX Project has been created. You can continue the steps in documentation at ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-home/` + ) ); // Trigger commands refresh on VsCode WebSocket Client - WebSocketClient.sendMessage({ event: "refreshCommands" }); + WebSocketClient.sendRefreshCommandsMessage(); // Return an object to be displayed with --json - return { outputString: "Created SFDX Project" }; + return { outputString: 'Created SFDX Project' }; } + } diff --git a/src/commands/hardis/project/deploy/notify.ts b/src/commands/hardis/project/deploy/notify.ts new file mode 100644 index 000000000..ddb99c135 --- /dev/null +++ b/src/commands/hardis/project/deploy/notify.ts @@ -0,0 +1,176 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { CONSTANTS } from '../../../../config/index.js'; +import { buildCheckDeployCommitSummary, handlePostDeploymentNotifications } from '../../../../common/utils/gitUtils.js'; +import { GitProvider, PullRequestData } from '../../../../common/gitProvider/index.js'; +import c from "chalk" +import { uxLog } from '../../../../common/utils/index.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DeployNotify extends SfCommand { + public static title = 'Deployment Notifications'; + + public static description = `Post notifications related to: + +- **Deployment simulation** _(use with --check-only)_ + +- **Deployment process** _(to call only if your deployment is successful)_ + +### Integrations + +According to the [integrations you configured](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integrations-home/), notifications can contain deployment information and [Flow Visual Git Diff](${CONSTANTS.DOC_URL_ROOT}/salesforce-deployment-assistant-home/#flow-visual-git-diff) + + - GitHub, Gitlab, Azure DevOps, Bitbucket comments on Pull Requests (including Flows Visual Git Diff) + + - Slack, Microsoft Teams, Email deployment summary after a successful deployment + + - JIRA tags and comments on tickets that just has been deployed + +![](${CONSTANTS.DOC_URL_ROOT}/assets/images/screenshot-jira-gitlab.jpg) + +![](${CONSTANTS.DOC_URL_ROOT}/assets/images/screenshot-jira-slack.jpg) + +### Flows Visual Git Diff + +- Visually show you the differences on a diagram + +- Display the update details without having to open any XML ! + +🟩 = added + +🟥 = removed + +🟧 = updated + +![](${CONSTANTS.DOC_URL_ROOT}/assets/images/flow-visual-git-diff.jpg) + +![](${CONSTANTS.DOC_URL_ROOT}/assets/images/flow-visual-git-diff-2.jpg) + +### In custom CI/CD workflow + +Example of usage in a custom CI/CD pipeline: + +\`\`\`bash +# Disable exit-on-error temporarily +set +e + +# Run the deploy command +sf project deploy start [....] +RET_CODE=$? + +# Re-enable exit-on-error +set -e + +# Determine MYSTATUS based on return code +if [ $RET_CODE -eq 0 ]; then + MYSTATUS="valid" +else + MYSTATUS="invalid" +fi + +# Run the notify command with MYSTATUS +sf hardis:project:deploy:notify --check-only --deploy-status "$MYSTATUS" +\`\`\` + +### Other usages + +This command is for custom SF Cli pipelines, if you are a sfdx-hardis user, it is already embedded in sf hardis:deploy:smart. + +You can also use [sfdx-hardis wrapper commands of SF deployment commands](${CONSTANTS.DOC_URL_ROOT}/salesforce-deployment-assistant-setup/#using-custom-cicd-pipeline) +` + + public static examples = [ + '$ sf hardis:project:deploy:notify --check-only --deploy-status valid --message "This deployment check is valid\\n\\nYahooo !!"', + '$ sf hardis:project:deploy:notify --check-only --deploy-status invalid --message "This deployment check has failed !\\n\\Oh no !!"', + '$ sf hardis:project:deploy:notify --deploy-status valid --message "This deployment has been processed !\\n\\nYahooo !!"' + ]; + + public static flags: any = { + "check-only": Flags.boolean({ + char: 'c', + default: false, + description: `Use this option to send notifications from a Deployment simulation job`, + }), + "deploy-status": Flags.string({ + char: 's', + options: ["valid", "invalid", "unknown"], + default: "unknown", + description: `Send success, failure or unknown (default) to indicate if the deployment or deployment simulation is in success or not`, + }), + message: Flags.string({ + char: "m", + default: "", + description: "Custom message that you want to be added in notifications (string or markdown format)" + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + /* jscpd:ignore-end */ + + protected checkOnly = false; + protected message = ""; + protected debugMode = false; + protected deployStatus: "valid" | "invalid" | "unknown" = "unknown" + + public async run(): Promise { + const { flags } = await this.parse(DeployNotify); + this.checkOnly = flags["check-only"] === true ? true : false; + this.deployStatus = flags["deploy-status"] || "unknown"; + this.message = flags.message || ""; + this.debugMode = flags.debug || false; + await setConnectionVariables(flags['target-org']?.getConnection(), true); + + // Deployment check mode + if (this.checkOnly) { + uxLog("action", this, c.cyan("Handling Pull Request comments for a deployment check job...")); + // Add deployment info + await buildCheckDeployCommitSummary(); + const prData: Partial = { + messageKey: "deployment", + title: + (this.checkOnly && this.deployStatus === "valid") ? "✅ Deployment check success" : + (!this.checkOnly && this.deployStatus === "valid") ? "✅ Deployment success" : + (this.checkOnly && this.deployStatus === "invalid") ? "❌ Deployment check failure" : + (!this.checkOnly && this.deployStatus === "invalid") ? "❌ Deployment failure" : + (this.checkOnly && this.deployStatus === "unknown") ? "🤷 Deployment check status unknown" : + "🤷 Deployment status unknown", + deployErrorsMarkdownBody: this.message, + status: this.deployStatus === "valid" ? "valid" : this.deployStatus === "invalid" ? "invalid" : "tovalidate", + }; + globalThis.pullRequestData = Object.assign(globalThis.pullRequestData || {}, prData); + // Post comments :) + await GitProvider.managePostPullRequestComment(); + } + + // Post notifications after successful deployment + else if (this.checkOnly === false && this.deployStatus === "valid") { + await handlePostDeploymentNotifications(flags, flags["target-org"].getUsername(), false, false, this.debugMode, this.message); + } + // Fallback + else { + uxLog("warning", this, c.yellow("No notification has been sent")); + uxLog("warning", this, c.yellow("- Pull Request comments are sent if --check-only is true")); + uxLog("warning", this, c.yellow("- Slack / Teams / Email / JIRA messages are sent only if --check-only is false and --deploy-status is valid")); + } + + return { message: "Processed notifications" } + } +} \ No newline at end of file diff --git a/src/commands/hardis/project/deploy/quick.ts b/src/commands/hardis/project/deploy/quick.ts new file mode 100644 index 000000000..4ab539f54 --- /dev/null +++ b/src/commands/hardis/project/deploy/quick.ts @@ -0,0 +1,128 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { AnyJson } from "@salesforce/ts-types"; +import { wrapSfdxCoreCommand } from "../../../../common/utils/wrapUtils.js"; +import { checkDeploymentOrgCoverage, executePrePostCommands, extractOrgCoverageFromLog } from '../../../../common/utils/deployUtils.js'; +import { GitProvider } from '../../../../common/gitProvider/index.js'; +import { handlePostDeploymentNotifications } from '../../../../common/utils/gitUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +export default class ProjectDeployStart extends SfCommand { + public static description = `sfdx-hardis wrapper for **sf project deploy quick** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_quick_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +\`\`\`yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +\`\`\` +`; + + public static aliases = [ + "hardis:deploy:quick" + ] + + public static flags: any = { + "api-version": Flags.integer({ + char: "a", + description: "api-version", + }), + async: Flags.boolean({ + description: "async", + exclusive: ["wait"], + }), + "target-org": Flags.requiredOrg(), + tests: Flags.string({ + description: "tests", + }), + "--job-id": Flags.string({ + char: "i", + description: "job-id", + }), + "--use-most-recent": Flags.boolean({ + char: "r", + description: "use-most-recent", + }), + wait: Flags.integer({ + char: "w", + default: 33, + min: 1, + description: "wait", + exclusive: ["async"], + }), + debug: Flags.boolean({ + default: false, + description: "debug", + }), + }; + + public static requiresProject = true; + + public async run(): Promise { + const { flags } = await this.parse(ProjectDeployStart); + const conn = flags["target-org"].getConnection(); + await setConnectionVariables(flags['target-org']?.getConnection(), true); + // Run pre deployment commands if defined + await executePrePostCommands('commandsPreDeploy', { success: true, checkOnly: false, conn: conn }); + const result = await wrapSfdxCoreCommand("sf project deploy start", this.argv, this, flags.debug); + // Check org coverage if requested + if (flags['coverage-formatters'] && result.stdout) { + const orgCoveragePercent = await extractOrgCoverageFromLog(result.stdout + result.stderr || ''); + const checkOnly = false; + if (orgCoveragePercent) { + try { + await checkDeploymentOrgCoverage(Number(orgCoveragePercent), { check: checkOnly }); + } catch (errCoverage) { + await GitProvider.managePostPullRequestComment(); + throw errCoverage; + } + } + } + // Run post deployment commands if defined + await executePrePostCommands('commandsPostDeploy', { success: process.exitCode === 0, checkOnly: false, conn: conn }); + // Post success deployment notifications + if (process.exitCode === 0) { + await handlePostDeploymentNotifications(flags, flags["target-org"].getUsername(), false, false, flags["debug"]); + } + return result; + } +} + +/* jscpd:ignore-end */ \ No newline at end of file diff --git a/src/commands/hardis/project/deploy/simulate.ts b/src/commands/hardis/project/deploy/simulate.ts new file mode 100644 index 000000000..1f453971d --- /dev/null +++ b/src/commands/hardis/project/deploy/simulate.ts @@ -0,0 +1,102 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { promptOrgUsernameDefault, setConnectionVariables } from '../../../../common/utils/orgUtils.js'; +import { wrapSfdxCoreCommand } from '../../../../common/utils/wrapUtils.js'; + + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DeploySimulate extends SfCommand { + public static title = 'Simulate the deployment of a metadata in an org prompted to the user\nUsed by VsCode Extension'; + + public static description: string = ` +## Command Behavior + +**Simulates the deployment of Salesforce metadata to a target org, primarily used by the VS Code Extension for quick validation.** + +This command allows developers to perform a dry run of a metadata deployment without actually committing changes to the Salesforce org. This is incredibly useful for: + +- **Pre-Deployment Validation:** Identifying potential errors, warnings, or conflicts before a full deployment. +- **Troubleshooting:** Quickly testing metadata changes and debugging issues in a safe environment. +- **Local Development:** Validating changes to individual metadata components (e.g., a Permission Set) without needing to run a full CI/CD pipeline. + +Key functionalities: + +- **Source Specification:** Takes a source file or directory (\`--source-dir\`) containing the metadata to be simulated. +- **Target Org Selection:** Prompts the user to select a Salesforce org for the simulation. This allows for flexible testing across different environments. +- **Dry Run Execution:** Executes the Salesforce CLI's \`sf project deploy start --dry-run\` command, which performs all validation steps but does not save any changes to the org. + +This command is primarily used by the VS Code Extension to provide immediate feedback to developers. + +
+Technical explanations + +The command's technical implementation involves: + +- **Interactive Org Prompt:** Uses \`promptOrgUsernameDefault\` to allow the user to select the target Salesforce org for the deployment simulation. +- **Salesforce CLI Integration:** It constructs and executes the \`sf project deploy start\` command with the \`--dry-run\` and \`--ignore-conflicts\` flags. The \`--source-dir\` and \`--target-org\` flags are dynamically populated based on user input. +- **\`wrapSfdxCoreCommand\`:** This utility is used to execute the Salesforce CLI command and capture its output. +- **Connection Variables:** Ensures Salesforce connection variables are set using \`setConnectionVariables\`. +
+`; + + public static examples = [ + '$ sf hardis:project:deploy:simulate --source-dir force-app/defaut/main/permissionset/PS_Admin.permissionset-meta.xml', + ]; + + // public static args = [{name: 'file'}]; + + public static flags: any = { + "source-dir": Flags.string({ + char: "f", + description: "Source file or directory to simulate the deployment", + multiple: true, + required: true + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + /* jscpd:ignore-end */ + + protected debugMode = false; + + public async run(): Promise { + const { flags } = await this.parse(DeploySimulate); + const sourceDirOrFile = flags["source-dir"]; + this.debugMode = flags.debug || false; + + // Prompt target org to user + const orgUsername = await promptOrgUsernameDefault(this, + flags['target-org'].getUsername(), + { devHub: false, setDefault: false, message: `Do you want to use org ${flags['target-org'].getConnection().instanceUrl} to simulate deployment of metadata ${sourceDirOrFile} ?`, quickOrgList: true }); + + await setConnectionVariables(flags['target-org']?.getConnection(), true); + + // Build command + const simulateDeployCommand = "sf project deploy start" + + ` --source-dir "${sourceDirOrFile}"` + + ` --target-org ${orgUsername}` + + ` --ignore-conflicts` + + ` --dry-run`; + + // Simulate deployment + const result = await wrapSfdxCoreCommand(simulateDeployCommand, [], this, flags.debug); + return result; + } +} diff --git a/src/commands/hardis/project/deploy/smart.ts b/src/commands/hardis/project/deploy/smart.ts new file mode 100644 index 000000000..e1dd7e031 --- /dev/null +++ b/src/commands/hardis/project/deploy/smart.ts @@ -0,0 +1,764 @@ +/* jscpd:ignore-start */ +/* +To test locally, you can call the command like that: + +Gitlab: CI=true CI_SFDX_HARDIS_GITLAB_TOKEN=XXX CI_PROJECT_ID=YYY CI_JOB_TOKEN=xxx NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --target-org nicolas.vuillamy@cloudity.com.demointeg + +Azure: CI=true SYSTEM_ACCESSTOKEN=XXX SYSTEM_COLLECTIONURI=https://dev.azure.com/MyAzureCollection/ BUILD_REPOSITORY_ID=XXX CI_JOB_TOKEN=xxx NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --target-org nicolas.vuillamy@cloudity.com.muuuurf + +- Before, you need to make a sf alias set myBranch=myUsername +- You can find CI_PROJECT_ID with https://gitlab.com/api/v4/projects?search=YOUR-REPO-NAME + +*/ + +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { MetadataUtils } from '../../../../common/metadata-utils/index.js'; +import { + createTempDir, + getCurrentGitBranch, + getLatestGitCommit, + isCI, + uxLog, +} from '../../../../common/utils/index.js'; +import { CONSTANTS, getConfig } from '../../../../config/index.js'; +import { smartDeploy, removePackageXmlContent, createEmptyPackageXml } from '../../../../common/utils/deployUtils.js'; +import { extendPackageFileWithDependencies } from '../../../../common/utils/deltaUtils.js'; +import { isProductionOrg, promptOrgUsernameDefault, setConnectionVariables } from '../../../../common/utils/orgUtils.js'; +import { getApexTestClasses } from '../../../../common/utils/classUtils.js'; +import { listMajorOrgs, restoreListViewMine } from '../../../../common/utils/orgConfigUtils.js'; +import { GitProvider } from '../../../../common/gitProvider/index.js'; +import { buildCheckDeployCommitSummary, callSfdxGitDelta, getGitDeltaScope, handlePostDeploymentNotifications } from '../../../../common/utils/gitUtils.js'; +import { parsePackageXmlFile } from '../../../../common/utils/xmlUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class SmartDeploy extends SfCommand { + public static title = 'Smart Deploy sfdx sources to org'; + + public static aliases = [ + "hardis:project:deploy:sources:dx" + ] + + public static description = `Smart deploy of SFDX sources to target org, with many useful options. + +In case of errors, [tips to fix them](${CONSTANTS.DOC_URL_ROOT}/deployTips/) will be included within the error messages. + +### Quick Deploy + +In case Pull Request comments are configured on the project, Quick Deploy will try to be used (equivalent to button Quick Deploy) + +If you do not want to use QuickDeploy, define variable \`SFDX_HARDIS_QUICK_DEPLOY=false\` + +- [GitHub Pull Requests comments config](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integration-github/) +- [Gitlab Merge requests notes config](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integration-gitlab/) +- [Azure Pull Requests comments config](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integration-azure/) + +### Delta deployments + +To activate delta deployments, define property \`useDeltaDeployment: true\` in \`config/.sfdx-hardis.yml\`. + +This will activate delta deployments only between minor and major branches (major to major remains full deployment mode) + +If you want to force the delta deployment into major orgs (ex: preprod to prod), this is not recommended but you can use env variable ALWAYS_ENABLE_DELTA_DEPLOYMENT=true + +### Smart Deployments Tests + +Not all metadata updates can break test classes, use Smart Deployment Tests to skip running test classes if ALL the following conditions are met: + +- Delta deployment is activated and applicable to the source and target branches +- Delta deployed metadatas are all matching the list of **NOT_IMPACTING_METADATA_TYPES** (see below) +- Target org is not a production org + +Activate Smart Deployment tests with: + +- env variable \`USE_SMART_DEPLOYMENT_TESTS=true\` +- .sfdx-hardis.yml config property \`useSmartDeploymentTests: true\` + +Defaut list for **NOT_IMPACTING_METADATA_TYPES** (can be overridden with comma-separated list on env var NOT_IMPACTING_METADATA_TYPES) + +- Audience +- AuraDefinitionBundle +- Bot +- BotVersion +- ContentAsset +- CustomObjectTranslation +- CustomSite +- CustomTab +- Dashboard +- ExperienceBundle +- Flexipage +- GlobalValueSetTranslation +- Layout +- LightningComponentBundle +- NavigationMenu +- ReportType +- Report +- SiteDotCom +- StandardValueSetTranslation +- StaticResource +- Translations + +Note: if you want to disable Smart test classes for a PR, add **nosmart** in the text of the latest commit. + +### Dynamic deployment items / Overwrite management + +If necessary,you can define the following files (that supports wildcards *): + +- \`manifest/package-no-overwrite.xml\`: Every element defined in this file will be deployed only if it is not existing yet in the target org (can be useful with ListView for example, if the client wants to update them directly in production org). + - Can be overridden for a branch using .sfdx-hardis.yml property **packageNoOverwritePath** or environment variable PACKAGE_NO_OVERWRITE_PATH (for example, define: \`packageNoOverwritePath: manifest/package-no-overwrite-main.xml\` in config file \`config/.sfdx-hardis.main.yml\`) +- \`manifest/packageXmlOnChange.xml\`: Every element defined in this file will not be deployed if it already has a similar definition in target org (can be useful for SharingRules for example) + +See [Overwrite management documentation](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-config-overwrite/) + +### Packages installation + +You can define a list of package to install during deployments using property \`installedPackages\` + +- If \`INSTALL_PACKAGES_DURING_CHECK_DEPLOY\` is defined as \`true\` (or \`installPackagesDuringCheckDeploy: true\` in \`.sfdx-hardis.yml\`), packages will be installed even if the command is called with \`--check\` mode +- You can automatically update this property by listing all packages installed on an org using command \`sf hardis:org:retrieve:packageconfig\` + +Example: + +\`\`\`yaml +installedPackages: + - Id: 0A35r0000009EtECAU + SubscriberPackageId: 033i0000000LVMYAA4 + SubscriberPackageName: Marketing Cloud + SubscriberPackageNamespace: et4ae5 + SubscriberPackageVersionId: 04t6S000000l11iQAA + SubscriberPackageVersionName: Marketing Cloud + SubscriberPackageVersionNumber: 236.0.0.2 + installOnScratchOrgs: true // true or false depending you want to install this package when creating a new scratch org + installDuringDeployments: true // set as true to install package during a deployment using sf hardis:project:deploy:smart + installationkey: xxxxxxxxxxxxxxxxxxxx // if the package has a password, write it in this property + - Id: 0A35r0000009F9CCAU + SubscriberPackageId: 033b0000000Pf2AAAS + SubscriberPackageName: Declarative Lookup Rollup Summaries Tool + SubscriberPackageNamespace: dlrs + SubscriberPackageVersionId: 04t5p000001BmLvAAK + SubscriberPackageVersionName: Release + SubscriberPackageVersionNumber: 2.15.0.9 + installOnScratchOrgs: true + installDuringDeployments: true +\`\`\` + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +\`\`\`yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +\`\`\` + +### Pull Requests Custom Behaviors + +If some words are found **in the Pull Request description**, special behaviors will be applied + +| Word | Behavior | +| :--- | :--- | +| NO_DELTA | Even if delta deployments are activated, a deployment in mode **full** will be performed for this Pull Request | +| PURGE_FLOW_VERSIONS | After deployment, inactive and obsolete Flow Versions will be deleted (equivalent to command sf hardis:org:purge:flow)
**Caution: This will also purge active Flow Interviews !** | +| DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT | If a file manifest/destructiveChanges.xml is found, it will be executed in a separate step, after the deployment of the main package | + +> For example, define \`PURGE_FLOW_VERSIONS\` and \`DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT\` in your Pull Request comments if you want to delete fields that are used in an active flow. + +Note: it is also possible to define these behaviors as ENV variables: + +- For all deployments (example: \`PURGE_FLOW_VERSIONS=true\`) +- For a specific branch, by appending the target branch name (example: \`PURGE_FLOW_VERSIONS_UAT=true\`) + +### Deployment plan (deprecated) + +If you need to deploy in multiple steps, you can define a property \`deploymentPlan\` in \`.sfdx-hardis.yml\`. + +- If a file \`manifest/package.xml\` is found, it will be placed with order 0 in the deployment plan + +- If a file \`manifest/destructiveChanges.xml\` is found, it will be executed as --postdestructivechanges + +- If env var \`SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES\` is defined as \`false\` , split of package.xml will be applied + +Example: + +\`\`\`yaml +deploymentPlan: + packages: + - label: Deploy Flow-Workflow + packageXmlFile: manifest/splits/packageXmlFlowWorkflow.xml + order: 6 + - label: Deploy SharingRules - Case + packageXmlFile: manifest/splits/packageXmlSharingRulesCase.xml + order: 30 + waitAfter: 30 +\`\`\` + +### Automated fixes post deployments + +#### List view with scope Mine + +If you defined a property **listViewsToSetToMine** in your .sfdx-hardis.yml, related ListViews will be set to Mine ( see command <${CONSTANTS.DOC_URL_ROOT}/hardis/org/fix/listviewmine/> ) + +Example: + +\`\`\`yaml +listViewsToSetToMine: + - "Operation__c:MyCurrentOperations" + - "Operation__c:MyFinalizedOperations" + - "Opportunity:Default_Opportunity_Pipeline" + - "Opportunity:MyCurrentSubscriptions" + - "Opportunity:MySubscriptions" + - "Account:MyActivePartners" +\`\`\` + +Troubleshooting: if you need to fix ListViews with mine from an alpine-linux based docker image, use this workaround in your dockerfile: + +\`\`\`dockerfile +# Do not use puppeteer embedded chromium +RUN apk add --update --no-cache chromium +ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" +ENV CHROMIUM_PATH="/usr/bin/chromium-browser" +ENV PUPPETEER_EXECUTABLE_PATH="$\\{CHROMIUM_PATH}" // remove \\ before { +\`\`\` + +If you need to increase the deployment waiting time (sf project deploy start --wait arg), you can define env variable SFDX_DEPLOY_WAIT_MINUTES (default: 120) + +If you need notifications to be sent using the current Pull Request and not the one just merged ([see use case](https://github.com/hardisgroupcom/sfdx-hardis/issues/637#issuecomment-2230798904)), define env variable SFDX_HARDIS_DEPLOY_BEFORE_MERGE=true + +If you want to disable the calculation and display of Flow Visual Git Diff in Pull Request comments, define variable **SFDX_DISABLE_FLOW_DIFF=true** +`; + + public static examples = [ + '$ sf hardis:project:deploy:smart', + '$ sf hardis:project:deploy:smart --check', + '$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTests', + "$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTests --runtests '^(?!FLI|MyPrefix).*'", + '$ sf hardis:project:deploy:smart --check --testlevel RunRepositoryTestsExceptSeeAllData', + '$ sf hardis:project:deploy:smart', + '$ FORCE_TARGET_BRANCH=preprod NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org nicolas.vuillamy@myclient.com.preprod', + '$ SYSTEM_ACCESSTOKEN=xxxxxx SYSTEM_COLLECTIONURI=https://dev.azure.com/xxxxxxx/ SYSTEM_TEAMPROJECT="xxxxxxx" BUILD_REPOSITORY_ID=xxxxx SYSTEM_PULLREQUEST_PULLREQUESTID=1418 FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my.salesforce@org.com', + '$ CI_SFDX_HARDIS_BITBUCKET_TOKEN=xxxxxx BITBUCKET_WORKSPACE=sfdxhardis-demo BITBUCKET_REPO_SLUG=test BITBUCKET_BUILD_NUMBER=1 BITBUCKET_BRANCH=uat BITBUCKET_PR_ID=2 FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my-salesforce-org@client.com', + '$ GITHUB_TOKEN=xxxx GITHUB_REPOSITORY=my-user/my-repo FORCE_TARGET_BRANCH=uat NODE_OPTIONS=--inspect-brk sf hardis:project:deploy:smart --check --websocket localhost:2702 --skipauth --target-org my-salesforce-org@client.com' + ]; + + + public static flags: any = { + check: Flags.boolean({ + char: 'c', + default: false, + description: messages.getMessage('checkOnly'), + }), + testlevel: Flags.string({ + char: 'l', + options: [ + 'NoTestRun', + 'RunSpecifiedTests', + 'RunRepositoryTests', + 'RunRepositoryTestsExceptSeeAllData', + 'RunLocalTests', + 'RunAllTestsInOrg', + ], + description: messages.getMessage('testLevelExtended'), + }), + runtests: Flags.string({ + char: 'r', + description: `If testlevel=RunSpecifiedTests, please provide a list of classes. +If testlevel=RunRepositoryTests, can contain a regular expression to keep only class names matching it. If not set, will run all test classes found in the repo.`, + }), + packagexml: Flags.string({ + char: 'p', + description: 'Path to package.xml containing what you want to deploy in target org', + }), + delta: Flags.boolean({ + default: false, + description: 'Applies sfdx-git-delta to package.xml before other deployment processes', + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + 'target-org': requiredOrgFlagWithDeprecations, + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + protected checkOnly = false; + protected configInfo: any = {}; + protected testLevel; + protected testClasses; + protected smartDeployOptions: { + targetUsername: string; + conn: any; // Connection from Salesforce + testClasses: string; + postDestructiveChanges?: string; + preDestructiveChanges?: string; + delta?: boolean; + destructiveChangesAfterDeployment?: boolean; + extraCommands?: any[]; + }; + protected packageXmlFile: string; + protected delta = false; + protected debugMode = false; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(SmartDeploy); + this.configInfo = await getConfig('branch'); + this.checkOnly = flags.check || false; + const deltaFromArgs = flags.delta || false; + const packageXml = flags.packagexml || null; + this.debugMode = flags.debug || false; + const currentGitBranch = await getCurrentGitBranch(); + // Get target org + let targetUsername = flags['target-org'].getUsername(); + if (!isCI) { + uxLog("warning", this, c.yellow("Just to be sure, please select the org you want to use for this command :)")) + targetUsername = await promptOrgUsernameDefault(this, targetUsername, { devHub: false, setDefault: false, scratch: false }); + } + + await setConnectionVariables(flags['target-org']?.getConnection(), true); + + await this.initTestLevelAndTestClasses(flags); + + await this.handlePackages(targetUsername); + + // Compute commitsSummary and store it in globalThis.pullRequestData.commitsSummary + if (this.checkOnly) { + await buildCheckDeployCommitSummary() + } + + // Get package.xml & destructiveChanges.xml + await this.initPackageXmlAndDestructiveChanges(packageXml, targetUsername, flags); + + // Compute and apply delta if required + await this.handleDeltaDeployment(deltaFromArgs, targetUsername, currentGitBranch); + + // Set smart deploy options + await this.setAdditionalOptions(targetUsername); + + // Process deployment (or deployment check) + const { messages, quickDeploy, deployXmlCount } = await smartDeploy( + this.packageXmlFile, + this.checkOnly, + this.testLevel, + this.debugMode, + this, + this.smartDeployOptions + ); + + const deployExecuted = !this.checkOnly && deployXmlCount > 0 ? true : false; + + // Set ListViews to scope Mine if defined in .sfdx-hardis.yml + if (this.configInfo.listViewsToSetToMine && deployExecuted) { + await restoreListViewMine(this.configInfo.listViewsToSetToMine, flags['target-org'].getConnection(), { + debug: this.debugMode, + }); + } + + // Send notification of deployment success + if (deployExecuted) { + await handlePostDeploymentNotifications(flags, targetUsername, quickDeploy, this.delta, this.debugMode); + } + // Return result + return { orgId: flags['target-org'].getOrgId(), outputString: messages.join('\n') }; + } + + + private async setAdditionalOptions(targetUsername: string) { + const prInfo = await GitProvider.getPullRequestInfo({ useCache: true }); + if (prInfo) { + this.smartDeployOptions.extraCommands = this.smartDeployOptions.extraCommands || []; + if (prInfo.customBehaviors?.purgeFlowVersions === true) { + this.smartDeployOptions.extraCommands.push({ + id: `PURGE_FLOW_VERSIONS`, + command: `sf hardis:org:purge:flow --no-prompt --delete-flow-interviews --target-org ${targetUsername}`, + label: 'Purge Flow Versions (added from PR config)', + skipIfError: true, + context: 'process-deployment-only', + }) + uxLog("action", this, c.cyan('[SmartDeploy] Purge Flow Versions command added to deployment options (from PR config)')); + } + if (prInfo.customBehaviors?.destructiveChangesAfterDeployment === true) { + if (this.smartDeployOptions.postDestructiveChanges) { + this.smartDeployOptions.destructiveChangesAfterDeployment = prInfo.customBehaviors?.destructiveChangesAfterDeployment; + const emptyPackageXml = await createEmptyPackageXml(); + const deployCommand = + `sf project deploy` + + ' start' + + ` --manifest "${emptyPackageXml}"` + + ' --ignore-warnings' + + ' --ignore-conflicts' + + ` --post-destructive-changes ${this.smartDeployOptions.postDestructiveChanges}` + + ` --target-org ${targetUsername}` + + ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || '120'}` + + (process.env.SFDX_DEPLOY_DEV_DEBUG ? ' --dev-debug' : '') + + ` --json`; + this.smartDeployOptions.extraCommands.push({ + id: `DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT`, + command: deployCommand, + label: 'Destructive Changes After Deployment (added from PR config)', + skipIfError: true, + context: 'process-deployment-only', + }); + uxLog("action", this, c.cyan('[SmartDeploy] Destructive Changes After Deployment command added to deployment options (from PR config)')); + } + else { + uxLog("warning", this, c.yellow('[SmartDeploy] Destructive Changes After Deployment is set to true in PR config, but no postDestructiveChanges file found. Skipping this step.')); + } + } + } + } + + private async handleDeltaDeployment(deltaFromArgs: any, targetUsername: string, currentGitBranch: string | null) { + this.delta = false; + if ((deltaFromArgs === true || + process.env.USE_DELTA_DEPLOYMENT === 'true' || + this.configInfo.useDeltaDeployment === true) && + (await this.isDeltaAllowed()) === true) { + this.delta = true; + this.smartDeployOptions.delta = true; + // Define delta deployment depending on context + let fromCommit = 'HEAD^'; + let toCommit = 'HEAD'; + if (this.checkOnly) { + // In deployment check context + const prInfo = await GitProvider.getPullRequestInfo({ useCache: true }); + const deltaScope = await getGitDeltaScope( + prInfo?.sourceBranch || currentGitBranch || "", + prInfo?.targetBranch || process.env.FORCE_TARGET_BRANCH || "" + ); + fromCommit = deltaScope.fromCommit; + toCommit = deltaScope?.toCommit?.hash || ''; + } + // call delta + uxLog("action", this, c.cyan('[DeltaDeployment] Generating git delta package.xml and destructiveChanges.xml ...')); + const tmpDir = await createTempDir(); + await callSfdxGitDelta(fromCommit, toCommit, tmpDir, { debug: this.debugMode }); + const packageXmlFileDeltaDeploy = path.join(tmpDir, 'package', 'packageDelta.xml'); + await fs.copy(this.packageXmlFile, packageXmlFileDeltaDeploy); + this.packageXmlFile = packageXmlFileDeltaDeploy; + + // Update package.xml + const diffPackageXml = path.join(tmpDir, 'package', 'package.xml'); + + // Extend delta with dependencies if required + if (process.env.USE_DELTA_DEPLOYMENT_WITH_DEPENDENCIES === 'true' || this.configInfo.useDeltaDeploymentWithDependencies === true) { + uxLog("action", this, c.cyan('[DeltaDeployment] Extending package.xml with dependencies ...')); + await extendPackageFileWithDependencies(diffPackageXml, this.packageXmlFile); + } + + await removePackageXmlContent(this.packageXmlFile, diffPackageXml, true, { + debugMode: this.debugMode, + keepEmptyTypes: false, + }); + + const deltaContent = await fs.readFile(this.packageXmlFile, 'utf8'); + uxLog("action", this, c.cyan('[DeltaDeployment] Final Delta package.xml to deploy:\n' + c.green(deltaContent))); + + const smartDeploymentTestsAllowed = await this.isSmartDeploymentTestsAllowed() + if (smartDeploymentTestsAllowed) { + uxLog("action", this, c.cyan("[SmartDeploymentTests] Smart Deployment tests activated: analyzing delta package content...")); + const deltaPackageContent = await parsePackageXmlFile(this.packageXmlFile); + const metadataTypesInDelta = Object.keys(deltaPackageContent); + const impactingMetadataTypesInDelta: string[] = [] + for (const metadataTypeInDelta of metadataTypesInDelta) { + if (!CONSTANTS.NOT_IMPACTING_METADATA_TYPES.includes(metadataTypeInDelta)) { + impactingMetadataTypesInDelta.push(metadataTypeInDelta); + } + } + if (impactingMetadataTypesInDelta.length === 0 && !(await isProductionOrg(targetUsername, {}))) { + uxLog("success", this, c.green("[SmartDeploymentTests] No Impacting metadata in delta package.xml: Skip test classes as the deployed items seem safe :)")); + this.testLevel = "NoTestRun"; + this.testClasses = ""; + } + else { + if (impactingMetadataTypesInDelta.length > 0) { + uxLog("warning", this, c.yellow(`[SmartDeploymentTests] Impacting metadata in delta package.xml (${impactingMetadataTypesInDelta.join(",")}): do not skip test classes.`)); + } else { + uxLog("warning", this, c.yellow("[SmartDeploymentTests] Production org as deployment target: do not skip test classes")); + } + } + + } + + // Update destructiveChanges.xml + if (this.smartDeployOptions.postDestructiveChanges) { + const destructiveXmlFileDeploy = path.join(tmpDir, 'destructiveChanges', 'destructiveChangesDelta.xml'); + await fs.copy(this.smartDeployOptions.postDestructiveChanges, destructiveXmlFileDeploy); + const diffDestructiveChangesXml = path.join(tmpDir, 'destructiveChanges', 'destructiveChanges.xml'); + await removePackageXmlContent(destructiveXmlFileDeploy, diffDestructiveChangesXml, true, { + debugMode: this.debugMode, + keepEmptyTypes: false, + }); + this.smartDeployOptions.postDestructiveChanges = destructiveXmlFileDeploy; + const deltaContentDelete = await fs.readFile(destructiveXmlFileDeploy, 'utf8'); + uxLog("action", this, c.cyan('[DeltaDeployment] Final Delta destructiveChanges.xml to delete:\n' + c.yellow(deltaContentDelete))); + } + } + } + + private async initPackageXmlAndDestructiveChanges(packageXml: any, targetUsername: any, flags) { + this.packageXmlFile = + packageXml || + process.env.PACKAGE_XML_TO_DEPLOY || + this.configInfo.packageXmlToDeploy || + fs.existsSync('./manifest/package.xml') + ? './manifest/package.xml' + : './config/package.xml'; + this.smartDeployOptions = { + targetUsername: targetUsername, + conn: flags['target-org']?.getConnection(), + testClasses: this.testClasses, + extraCommands: [] + }; + // Get destructiveChanges.xml and add it in options if existing + const postDestructiveChanges = process.env.PACKAGE_XML_TO_DELETE || + this.configInfo.packageXmlToDelete || + fs.existsSync('./manifest/destructiveChanges.xml') + ? './manifest/destructiveChanges.xml' + : './config/destructiveChanges.xml'; + if (fs.existsSync(postDestructiveChanges)) { + this.smartDeployOptions.postDestructiveChanges = postDestructiveChanges; + } + + // Get preDestructiveChanges.xml and add it in options if existing + const preDestructiveChanges = process.env.PACKAGE_XML_TO_DELETE_PRE_DEPLOY || + this.configInfo.packageXmlToDeletePreDeploy || + fs.existsSync('./manifest/preDestructiveChanges.xml') + ? './manifest/preDestructiveChanges.xml' + : './config/preDestructiveChanges.xml'; + if (fs.existsSync(preDestructiveChanges)) { + this.smartDeployOptions.preDestructiveChanges = preDestructiveChanges; + } + } + + private async handlePackages(targetUsername: any) { + const packages = this.configInfo.installedPackages || []; + const missingPackages: any[] = []; + const installPackages = this.checkOnly === false || + process.env.INSTALL_PACKAGES_DURING_CHECK_DEPLOY === 'true' || + this.configInfo.installPackagesDuringCheckDeploy === true; + if (packages.length > 0 && installPackages) { + // Install packages only if we are in real deployment mode + await MetadataUtils.installPackagesOnOrg(packages, targetUsername, this, 'deploy'); + } else if (packages.length > 0 && this.checkOnly === true) { + // If check mode, warn if there are missing packages + const alreadyInstalled = await MetadataUtils.listInstalledPackages(targetUsername, this); + for (const package1 of packages) { + if (alreadyInstalled.filter( + (installedPackage: any) => package1.SubscriberPackageVersionId === installedPackage.SubscriberPackageVersionId + ).length === 0 && + package1.installDuringDeployments === true) { + missingPackages.push(package1); + } + } + } + + // Display missing packages message + if (missingPackages.length > 0) { + for (const package1 of missingPackages) { + uxLog( + "warning", + this, + c.yellow( + `You may need to install package ${c.bold(package1.SubscriberPackageName)} ${c.bold( + package1.SubscriberPackageVersionId + )} in target org to validate the deployment check` + ) + ); + } + uxLog("other", this, ''); + uxLog( + "warning", + this, + c.yellow( + c.italic( + `If you want deployment checks to automatically install packages, please define ${c.bold( + 'INSTALL_PACKAGES_DURING_CHECK_DEPLOY=true' + )} in ENV vars, or property ${c.bold('installPackagesDuringCheckDeploy: true')} in .sfdx-hardis.yml` + ) + ) + ); + } + } + + private async initTestLevelAndTestClasses(flags) { + const givenTestlevel = flags.testlevel || this.configInfo.testLevel || 'RunLocalTests'; + this.testClasses = flags.runtests || this.configInfo.runtests || ''; + + // Auto-detect all APEX test classes within project in order to run "dynamic" RunSpecifiedTests deployment + if (['RunRepositoryTests', 'RunRepositoryTestsExceptSeeAllData'].includes(givenTestlevel)) { + const testClassList = await getApexTestClasses( + this.testClasses, + givenTestlevel === 'RunRepositoryTestsExceptSeeAllData' + ); + if (Array.isArray(testClassList) && testClassList.length) { + flags.testlevel = 'RunSpecifiedTests'; + this.testClasses = testClassList.join(" "); + } else { + // Default back to RunLocalTests in case if repository has zero tests + flags.testlevel = 'RunLocalTests'; + this.testClasses = ''; + } + } + + this.testLevel = flags.testlevel || this.configInfo.testLevel || 'RunLocalTests'; + + // Test classes are only valid for RunSpecifiedTests + if (this.testLevel != 'RunSpecifiedTests') { + this.testClasses = ''; + } + } + + async isDeltaAllowed() { + const prInfo = await GitProvider.getPullRequestInfo({ useCache: true }); + if (prInfo?.customBehaviors?.noDeltaDeployment === true) { + uxLog( + "warning", + this, + c.yellow(`[DeltaDeployment] Delta deployment has been disabled for this Pull Request`) + ); + return false; + } + if (process.env?.DISABLE_DELTA_DEPLOYMENT === 'true') { + uxLog( + "warning", + this, + c.yellow(`[DeltaDeployment] Delta deployment has been explicitly disabled with variable DISABLE_DELTA_DEPLOYMENT=true`) + ); + return false; + } + const latestCommit = await getLatestGitCommit(); + if (latestCommit && this.isNoDelta(latestCommit)) { + uxLog("warning", this, c.yellow(c.bold((`[DeltaDeployment] Latest commit contains string "nodelta" so disable delta for this time :)`)))); + return false; + } + if (this.checkOnly === false && !(process.env?.USE_DELTA_DEPLOYMENT_AFTER_MERGE === 'true')) { + uxLog( + "warning", + this, + c.yellow( + "[DeltaDeployment] We'll try to deploy using Quick Deployment feature. If not available, it's safer to use full deployment for a merge job." + ) + ); + uxLog( + "warning", + this, + c.yellow( + '[DeltaDeployment] If you want to use delta deployment anyway, define env variable USE_DELTA_DEPLOYMENT_AFTER_MERGE=true' + ) + ); + return false; + } + if (process.env?.ALWAYS_ENABLE_DELTA_DEPLOYMENT === 'true') { + uxLog( + "warning", + this, + c.yellow(`[DeltaDeployment] Delta deployment has been explicitly enabled with variable ALWAYS_ENABLE_DELTA_DEPLOYMENT=true`) + ); + uxLog( + "warning", + this, + c.yellow( + `[DeltaDeployment] It is not recommended to use delta deployments for merges between major branches, use this config at your own responsibility` + ) + ); + return true; + } + let currentBranch = await getCurrentGitBranch(); + let parentBranch = process.env.FORCE_TARGET_BRANCH || null; + if (prInfo) { + currentBranch = prInfo.sourceBranch; + parentBranch = prInfo.targetBranch; + } + const majorOrgs = await listMajorOrgs(); + uxLog("log", this, c.grey('Major orgs with auth configured:\n' + JSON.stringify(majorOrgs, null, 2))); + const currentBranchIsMajor = majorOrgs.some((majorOrg) => majorOrg.branchName === currentBranch); + const parentBranchIsMajor = majorOrgs.some((majorOrg) => majorOrg.branchName === parentBranch); + if (currentBranchIsMajor && (parentBranchIsMajor === true || parentBranch == null)) { + uxLog( + "warning", + this, + c.yellow( + `This is not safe to use delta between major branches (${c.bold(currentBranch)} to ${c.bold( + parentBranch + )}): using full deployment mode` + ) + ); + return false; + } + uxLog( + "action", + this, + c.cyan( + `[DeltaDeployment] Delta allowed between minor branch (${currentBranch}) and major branch (${parentBranch}): using delta deployment mode` + ) + ); + return true; + } + + isNoDelta(latestCommit) { + return latestCommit?.body?.trim().includes('nodelta') || latestCommit?.message?.trim().includes('nodelta') || + latestCommit?.body?.trim().includes('no delta') || latestCommit?.message?.trim().includes('no delta') + } + + async isSmartDeploymentTestsAllowed() { + if (process.env?.USE_SMART_DEPLOYMENT_TESTS === 'true' || this.configInfo?.useSmartDeploymentTests === true) { + const latestCommit = await getLatestGitCommit(); + if (latestCommit && this.isNoSmartDeploymentTests(latestCommit)) { + uxLog("warning", this, c.yellow(c.bold((`[SmartDeploymentTests] Latest commit contains string "nosmart" so disable smartDeploymentTests for this time :)`)))); + return false; + } + return true; + } + return false; + } + + isNoSmartDeploymentTests(latestCommit) { + return latestCommit?.body?.trim().includes('nosmart') || latestCommit?.message?.trim().includes('nosmart') || + latestCommit?.body?.trim().includes('no smart') || latestCommit?.message?.trim().includes('no smart') + } +} diff --git a/src/commands/hardis/project/deploy/sources/dx.ts b/src/commands/hardis/project/deploy/sources/dx.ts deleted file mode 100644 index 7bc6ae6f0..000000000 --- a/src/commands/hardis/project/deploy/sources/dx.ts +++ /dev/null @@ -1,576 +0,0 @@ -/* jscpd:ignore-start */ -/* -To test locally, you can call the command like that: - -Gitlab: CI=true CI_SFDX_HARDIS_GITLAB_TOKEN=XXX CI_PROJECT_ID=YYY CI_JOB_TOKEN=xxx NODE_OPTIONS=--inspect-brk sfdx hardis:project:deploy:sources:dx --targetusername nicolas.vuillamy@cloudity.com.demointeg - -Azure: CI=true SYSTEM_ACCESSTOKEN=XXX SYSTEM_COLLECTIONURI=https://dev.azure.com/MyAzureCollection/ BUILD_REPOSITORY_ID=XXX CI_JOB_TOKEN=xxx NODE_OPTIONS=--inspect-brk sfdx hardis:project:deploy:sources:dx --targetusername nicolas.vuillamy@cloudity.com.muuuurf - -- Before, you need to make a sfdx alias:set myBranch=myUsername -- You can find CI_PROJECT_ID with https://gitlab.com/api/v4/projects?search=YOUR-REPO-NAME - -*/ - -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { MetadataUtils } from "../../../../../common/metadata-utils"; -import { createTempDir, getCurrentGitBranch, getLatestGitCommit, isCI, uxLog } from "../../../../../common/utils"; -import { getConfig } from "../../../../../config"; -import { forceSourceDeploy, removePackageXmlContent } from "../../../../../common/utils/deployUtils"; -import { promptOrg } from "../../../../../common/utils/orgUtils"; -import { getApexTestClasses } from "../../../../../common/utils/classUtils"; -import { listMajorOrgs, restoreListViewMine } from "../../../../../common/utils/orgConfigUtils"; -import { NotifProvider, UtilsNotifs } from "../../../../../common/notifProvider"; -import { GitProvider } from "../../../../../common/gitProvider"; -import { callSfdxGitDelta, computeCommitsSummary, getGitDeltaScope } from "../../../../../common/utils/gitUtils"; -import { getBranchMarkdown, getNotificationButtons, getOrgMarkdown } from "../../../../../common/utils/notifUtils"; -import { MessageAttachment } from "@slack/web-api"; -import { TicketProvider } from "../../../../../common/ticketProvider"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class DxSources extends SfdxCommand { - public static title = "Deploy sfdx sources to org"; - - public static description = `Deploy SFDX source to org, following deploymentPlan in .sfdx-hardis.yml - -In case of errors, [tips to fix them](https://sfdx-hardis.cloudity.com/deployTips/) will be included within the error messages. - -### Quick Deploy - -In case Pull Request comments are configured on the project, Quick Deploy will try to be used (equivalent to button Quick Deploy) - -If you do not want to use QuickDeploy, define variable \`SFDX_HARDIS_QUICK_DEPLOY=false\` - -- [GitHub Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-github/) -- [Gitlab Merge requests notes config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-gitlab/) -- [Azure Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure/) - -### Delta deployments - -To activate delta deployments, define property \`useDeltaDeployment: true\` in \`config/.sfdx-hardis.yml\`. - -This will activate delta deployments only between minor and major branches (major to major remains full deployment mode) - -If you want to force the delta deployment into major orgs (ex: preprod to prod), this is not recommended but you can use env variable ALWAYS_ENABLE_DELTA_DEPLOYMENT=true - -### Dynamic deployment items / Overwrite management - -If necessary,you can define the following files (that supports wildcards *): - -- \`manifest/package-no-overwrite.xml\`: Every element defined in this file will be deployed only if it is not existing yet in the target org (can be useful with ListView for example, if the client wants to update them directly in production org) -- \`manifest/packageXmlOnChange.xml\`: Every element defined in this file will not be deployed if it already has a similar definition in target org (can be useful for SharingRules for example) - -See [Overwrite management documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-overwrite/) - -### Deployment plan - -If you need to deploy in multiple steps, you can define a property \`deploymentPlan\` in \`.sfdx-hardis.yml\`. - -- If a file \`manifest/package.xml\` is found, it will be placed with order 0 in the deployment plan - -- If a file \`manifest/destructiveChanges.xml\` is found, it will be executed as --postdestructivechanges - -- If env var \`SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES\` is defined as \`false\` , split of package.xml will be applied - -Example: - -\`\`\`yaml -deploymentPlan: - packages: - - label: Deploy Flow-Workflow - packageXmlFile: manifest/splits/packageXmlFlowWorkflow.xml - order: 6 - - label: Deploy SharingRules - Case - packageXmlFile: manifest/splits/packageXmlSharingRulesCase.xml - order: 30 - waitAfter: 30 -\`\`\` - -### Packages installation - -You can define a list of package to install during deployments using property \`installedPackages\` - -- If \`INSTALL_PACKAGES_DURING_CHECK_DEPLOY\` is defined as \`true\` (or \`installPackagesDuringCheckDeploy: true\` in \`.sfdx-hardis.yml\`), packages will be installed even if the command is called with \`--check\` mode -- You can automatically update this property by listing all packages installed on an org using command \`sfdx hardis:org:retrieve:packageconfig\` - -Example: - -\`\`\`yaml -installedPackages: - - Id: 0A35r0000009EtECAU - SubscriberPackageId: 033i0000000LVMYAA4 - SubscriberPackageName: Marketing Cloud - SubscriberPackageNamespace: et4ae5 - SubscriberPackageVersionId: 04t6S000000l11iQAA - SubscriberPackageVersionName: Marketing Cloud - SubscriberPackageVersionNumber: 236.0.0.2 - installOnScratchOrgs: true // true or false depending you want to install this package when creating a new scratch org - installDuringDeployments: true // set as true to install package during a deployment using sfdx hardis:project:deploy:sources:dx - installationkey: xxxxxxxxxxxxxxxxxxxx // if the package has a password, write it in this property - - Id: 0A35r0000009F9CCAU - SubscriberPackageId: 033b0000000Pf2AAAS - SubscriberPackageName: Declarative Lookup Rollup Summaries Tool - SubscriberPackageNamespace: dlrs - SubscriberPackageVersionId: 04t5p000001BmLvAAK - SubscriberPackageVersionName: Release - SubscriberPackageVersionNumber: 2.15.0.9 - installOnScratchOrgs: true - installDuringDeployments: true -\`\`\` - -### Deployment pre or post commands - -You can define command lines to run before or after a deployment - -If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** - -Example: - -\`\`\`yaml -commandsPreDeploy: - - id: knowledgeUnassign - label: Remove KnowledgeUser right to the user who has it - command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json - - id: knowledgeAssign - label: Assign Knowledge user to the deployment user - command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json -commandsPostDeploy: - - id: knowledgeUnassign - label: Remove KnowledgeUser right to the user who has it - command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json - - id: knowledgeAssign - label: Assign Knowledge user to desired username - command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json -\`\`\` - -### Automated fixes post deployments - -#### List view with scope Mine - -If you defined a property **listViewsToSetToMine** in your .sfdx-hardis.yml, related ListViews will be set to Mine ( see command ) - -Example: - -\`\`\`yaml -listViewsToSetToMine: - - "Operation__c:MyCurrentOperations" - - "Operation__c:MyFinalizedOperations" - - "Opportunity:Default_Opportunity_Pipeline" - - "Opportunity:MyCurrentSubscriptions" - - "Opportunity:MySubscriptions" - - "Account:MyActivePartners" -\`\`\` - -Troubleshooting: if you need to fix ListViews with mine from an alpine-linux based docker image, use this workaround in your dockerfile: - -\`\`\`dockerfile -# Do not use puppeteer embedded chromium -RUN apk add --update --no-cache chromium -ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" -ENV CHROMIUM_PATH="/usr/bin/chromium-browser" -ENV PUPPETEER_EXECUTABLE_PATH="$\\{CHROMIUM_PATH}" // remove \\ before { -\`\`\` - -If you need to increase the deployment waiting time (force:source:deploy --wait arg), you can define env variable SFDX_DEPLOY_WAIT_MINUTES - -If you need notifications to be sent using the current Pull Request and not the one just merged ([see use case](https://github.com/hardisgroupcom/sfdx-hardis/issues/637#issuecomment-2230798904)), define env variable SFDX_HARDIS_DEPLOY_BEFORE_MERGE=true -`; - - public static examples = [ - "$ sfdx hardis:project:deploy:sources:dx", - "$ sfdx hardis:project:deploy:sources:dx --check", - "$ sfdx hardis:project:deploy:sources:dx --check --testlevel RunRepositoryTests", - "$ sfdx hardis:project:deploy:sources:dx --check --testlevel RunRepositoryTests --runtests '^(?!FLI|MyPrefix).*'", - "$ sfdx hardis:project:deploy:sources:dx --check --testlevel RunRepositoryTestsExceptSeeAllData", - ]; - - protected static flagsConfig = { - check: flags.boolean({ - char: "c", - default: false, - description: messages.getMessage("checkOnly"), - }), - testlevel: flags.enum({ - char: "l", - options: ["NoTestRun", "RunSpecifiedTests", "RunRepositoryTests", "RunRepositoryTestsExceptSeeAllData", "RunLocalTests", "RunAllTestsInOrg"], - description: messages.getMessage("testLevelExtended"), - }), - runtests: flags.string({ - char: "r", - description: `If testlevel=RunSpecifiedTests, please provide a list of classes. -If testlevel=RunRepositoryTests, can contain a regular expression to keep only class names matching it. If not set, will run all test classes found in the repo.`, - }), - packagexml: flags.string({ - char: "p", - description: "Path to package.xml containing what you want to deploy in target org", - }), - delta: flags.boolean({ - default: false, - description: "Applies sfdx-git-delta to package.xml before other deployment processes", - }), - debug: flags.boolean({ - char: "d", - default: false, - description: messages.getMessage("debugMode"), - }), - websocket: flags.string({ - description: messages.getMessage("websocket"), - }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", - }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; - - protected checkOnly = false; - protected configInfo: any = {}; - protected debugMode = false; - - /* jscpd:ignore-end */ - - public async run(): Promise { - this.configInfo = await getConfig("branch"); - this.checkOnly = this.flags.check || false; - const deltaFromArgs = this.flags.delta || false; - - const givenTestlevel = this.flags.testlevel || this.configInfo.testLevel || "RunLocalTests"; - let testClasses = this.flags.runtests || this.configInfo.runtests || ""; - - // Auto-detect all APEX test classes within project in order to run "dynamic" RunSpecifiedTests deployment - if (["RunRepositoryTests", "RunRepositoryTestsExceptSeeAllData"].includes(givenTestlevel)) { - const testClassList = await getApexTestClasses(testClasses, givenTestlevel === "RunRepositoryTestsExceptSeeAllData"); - if (Array.isArray(testClassList) && testClassList.length) { - this.flags.testlevel = "RunSpecifiedTests"; - testClasses = testClassList.join(); - } else { - // Default back to RunLocalTests in case if repository has zero tests - this.flags.testlevel = "RunLocalTests"; - testClasses = ""; - } - } - - const testlevel = this.flags.testlevel || this.configInfo.testLevel || "RunLocalTests"; - - // Test classes are only valid for RunSpecifiedTests - if (testlevel != "RunSpecifiedTests") { - testClasses = ""; - } - - const packageXml = this.flags.packagexml || null; - this.debugMode = this.flags.debug || false; - const currentGitBranch = await getCurrentGitBranch(); - - // Get target org - let targetUsername = this.org.getUsername(); - if (!isCI) { - const targetOrg = await promptOrg(this, { devHub: false, setDefault: false, scratch: false }); - targetUsername = targetOrg.username; - } - - // Install packages - const packages = this.configInfo.installedPackages || []; - const missingPackages = []; - const installPackages = - this.checkOnly === false || - process.env.INSTALL_PACKAGES_DURING_CHECK_DEPLOY === "true" || - this.configInfo.installPackagesDuringCheckDeploy === true; - if (packages.length > 0 && installPackages) { - // Install packages only if we are in real deployment mode - await MetadataUtils.installPackagesOnOrg(packages, targetUsername, this, "deploy"); - } else if (packages.length > 0 && this.checkOnly === true) { - // If check mode, warn if there are missing packages - const alreadyInstalled = await MetadataUtils.listInstalledPackages(targetUsername, this); - for (const package1 of packages) { - if ( - alreadyInstalled.filter((installedPackage: any) => package1.SubscriberPackageVersionId === installedPackage.SubscriberPackageVersionId) - .length === 0 && - package1.installDuringDeployments === true - ) { - missingPackages.push(package1); - } - } - } - - // Display missing packages message - if (missingPackages.length > 0) { - for (const package1 of missingPackages) { - uxLog( - this, - c.yellow( - `You may need to install package ${c.bold(package1.SubscriberPackageName)} ${c.bold( - package1.SubscriberPackageVersionId, - )} in target org to validate the deployment check`, - ), - ); - } - uxLog(this, ""); - uxLog( - this, - c.yellow( - c.italic( - `If you want deployment checks to automatically install packages, please define ${c.bold( - "INSTALL_PACKAGES_DURING_CHECK_DEPLOY=true", - )} in ENV vars, or property ${c.bold("installPackagesDuringCheckDeploy: true")} in .sfdx-hardis.yml`, - ), - ), - ); - } - - // Compute commitsSummary and store it in globalThis.pullRequestData.commitsSummary - if (this.checkOnly) { - try { - const pullRequestInfo = await GitProvider.getPullRequestInfo(); - const commitsSummary = await computeCommitsSummary(true, pullRequestInfo); - const prDataCommitsSummary = { commitsSummary: commitsSummary.markdown }; - globalThis.pullRequestData = Object.assign(globalThis.pullRequestData || {}, prDataCommitsSummary); - } catch (e3) { - uxLog(this, c.yellow("Unable to compute git summary:\n" + e3)); - } - } - - // Get package.xml - let packageXmlFile = - packageXml || process.env.PACKAGE_XML_TO_DEPLOY || this.configInfo.packageXmlToDeploy || fs.existsSync("./manifest/package.xml") - ? "./manifest/package.xml" - : "./config/package.xml"; - const forceSourceDeployOptions: any = { - targetUsername: targetUsername, - conn: this.org?.getConnection(), - testClasses: testClasses, - }; - // Get destructiveChanges.xml and add it in options if existing - const postDestructiveChanges = - process.env.PACKAGE_XML_TO_DELETE || this.configInfo.packageXmlToDelete || fs.existsSync("./manifest/destructiveChanges.xml") - ? "./manifest/destructiveChanges.xml" - : "./config/destructiveChanges.xml"; - if (fs.existsSync(postDestructiveChanges)) { - forceSourceDeployOptions.postDestructiveChanges = postDestructiveChanges; - } - - // Get preDestructiveChanges.xml and add it in options if existing - const preDestructiveChanges = - process.env.PACKAGE_XML_TO_DELETE_PRE_DEPLOY || - this.configInfo.packageXmlToDeletePreDeploy || - fs.existsSync("./manifest/preDestructiveChanges.xml") - ? "./manifest/preDestructiveChanges.xml" - : "./config/preDestructiveChanges.xml"; - if (fs.existsSync(preDestructiveChanges)) { - forceSourceDeployOptions.preDestructiveChanges = preDestructiveChanges; - } - - // Compute and apply delta if required - let delta = false; - if ( - (deltaFromArgs === true || process.env.USE_DELTA_DEPLOYMENT === "true" || this.configInfo.useDeltaDeployment === true) && - (await this.isDeltaAllowed()) === true - ) { - delta = true; - forceSourceDeployOptions.delta = true; - // Define delta deployment depending on context - let fromCommit = "HEAD"; - let toCommit = "HEAD^"; - if (this.checkOnly) { - // In deployment check context - const prInfo = await GitProvider.getPullRequestInfo(); - const deltaScope = await getGitDeltaScope(prInfo?.sourceBranch || currentGitBranch, prInfo?.targetBranch || process.env.FORCE_TARGET_BRANCH); - fromCommit = deltaScope.fromCommit; - toCommit = deltaScope.toCommit.hash; - } - // call delta - uxLog(this, c.cyan("Generating git delta package.xml and destructiveChanges.xml ...")); - const tmpDir = await createTempDir(); - await callSfdxGitDelta(fromCommit, toCommit, tmpDir, { debug: this.debugMode }); - - // Update package.xml - const packageXmlFileDeltaDeploy = path.join(tmpDir, "package", "packageDelta.xml"); - await fs.copy(packageXmlFile, packageXmlFileDeltaDeploy); - packageXmlFile = packageXmlFileDeltaDeploy; - const diffPackageXml = path.join(tmpDir, "package", "package.xml"); - await removePackageXmlContent(packageXmlFile, diffPackageXml, true, { debugMode: this.debugMode, keepEmptyTypes: false }); - - const deltaContent = await fs.readFile(packageXmlFile, "utf8"); - uxLog(this, c.cyan("Final Delta package.xml to deploy:\n" + c.green(deltaContent))); - - // Update destructiveChanges.xml - if (forceSourceDeployOptions.postDestructiveChanges) { - const destructiveXmlFileDeploy = path.join(tmpDir, "destructiveChanges", "destructiveChangesDelta.xml"); - await fs.copy(forceSourceDeployOptions.postDestructiveChanges, destructiveXmlFileDeploy); - const diffDestructiveChangesXml = path.join(tmpDir, "destructiveChanges", "destructiveChanges.xml"); - await removePackageXmlContent(destructiveXmlFileDeploy, diffDestructiveChangesXml, true, { - debugMode: this.debugMode, - keepEmptyTypes: false, - }); - forceSourceDeployOptions.postDestructiveChanges = destructiveXmlFileDeploy; - const deltaContentDelete = await fs.readFile(destructiveXmlFileDeploy, "utf8"); - uxLog(this, c.cyan("Final Delta destructiveChanges.xml to delete:\n" + c.yellow(deltaContentDelete))); - } - } - - // Process deployment (or deployment check) - const { messages, quickDeploy, deployXmlCount } = await forceSourceDeploy( - packageXmlFile, - this.checkOnly, - testlevel, - this.debugMode, - this, - forceSourceDeployOptions, - ); - - const deployExecuted = !this.checkOnly && deployXmlCount > 0 ? true : false; - - // Set ListViews to scope Mine if defined in .sfdx-hardis.yml - if (this.configInfo.listViewsToSetToMine && deployExecuted) { - await restoreListViewMine(this.configInfo.listViewsToSetToMine, this.org.getConnection(), { debug: this.debugMode }); - } - - // Send notification of deployment success - if (deployExecuted) { - const pullRequestInfo = await GitProvider.getPullRequestInfo(); - const attachments: MessageAttachment[] = []; - try { - // Build notification attachments & handle ticketing systems comments - const commitsSummary = await this.collectNotifAttachments(attachments, pullRequestInfo); - await TicketProvider.postDeploymentActions(commitsSummary.tickets, this.org?.getConnection()?.instanceUrl || targetUsername, pullRequestInfo); - } catch (e4) { - uxLog(this, c.yellow("Unable to handle commit info on TicketProvider post deployment actions:\n" + e4.message) + "\n" + c.gray(e4.stack)); - } - - const orgMarkdown = await getOrgMarkdown(this.org?.getConnection()?.instanceUrl || targetUsername); - const branchMarkdown = await getBranchMarkdown(); - let notifMessage = `Deployment has been successfully processed from branch ${branchMarkdown} to org ${orgMarkdown}`; - notifMessage += quickDeploy ? " (🚀 quick deployment)" : delta ? " (🌙 delta deployment)" : " (🌕 full deployment)"; - - const notifButtons = await getNotificationButtons(); - if (pullRequestInfo) { - if (this.debugMode) { - uxLog(this, c.gray("PR info:\n" + JSON.stringify(pullRequestInfo))); - } - const prUrl = pullRequestInfo.web_url || pullRequestInfo.html_url || pullRequestInfo.url; - const prAuthor = pullRequestInfo?.authorName || pullRequestInfo?.author?.login || pullRequestInfo?.author?.name || null; - notifMessage += `\nRelated: <${prUrl}|${pullRequestInfo.title}>` + (prAuthor ? ` by ${prAuthor}` : ""); - const prButtonText = "View Pull Request"; - notifButtons.push({ text: prButtonText, url: prUrl }); - } else { - uxLog(this, c.yellow("WARNING: Unable to get Pull Request info, notif won't have a button URL")); - } - globalThis.jsForceConn = this?.org?.getConnection(); // Required for some notifications providers like Email - NotifProvider.postNotifications({ - type: "DEPLOYMENT", - text: notifMessage, - buttons: notifButtons, - severity: "success", - attachments: attachments, - logElements: [], - data: { metric: 0 }, // Todo: if delta used, count the number of items deployed - metrics: { - DeployedItems: 0, // Todo: if delta used, count the number of items deployed - }, - }); - } - return { orgId: this.org.getOrgId(), outputString: messages.join("\n") }; - } - - private async collectNotifAttachments(attachments: MessageAttachment[], pullRequestInfo: any) { - const commitsSummary = await computeCommitsSummary(false, pullRequestInfo); - // Tickets attachment - if (commitsSummary.tickets.length > 0) { - attachments.push({ - text: `*Tickets*\n${commitsSummary.tickets - .map((ticket) => { - if (ticket.foundOnServer) { - return "• " + UtilsNotifs.markdownLink(ticket.url, ticket.id) + " " + ticket.subject; - } else { - return "• " + UtilsNotifs.markdownLink(ticket.url, ticket.id); - } - }) - .join("\n")}`, - }); - } - // Manual actions attachment - if (commitsSummary.manualActions.length > 0) { - attachments.push({ - text: `*Manual actions*\n${commitsSummary.manualActions - .map((manualAction) => { - return "• " + manualAction; - }) - .join("\n")}`, - }); - } - // Commits attachment - if (commitsSummary.logResults.length > 0) { - attachments.push({ - text: `*Commits*\n${commitsSummary.logResults - .map((logResult) => { - return "• " + logResult.message + ", by " + logResult.author_name; - }) - .join("\n")}`, - }); - } - return commitsSummary; - } - - async isDeltaAllowed() { - if (process.env?.DISABLE_DELTA_DEPLOYMENT === "true") { - uxLog(this, c.yellow(`Delta deployment has been explicitly disabled with variable DISABLE_DELTA_DEPLOYMENT=true`)); - return false; - } - const latestCommit = await getLatestGitCommit(); - if (latestCommit && (latestCommit?.body?.includes("nodelta") || latestCommit?.message?.includes("nodelta"))) { - uxLog(this, c.yellow(`Latest commit contains string "nodelta" so disable delta for this time :)`)); - return false; - } - if (this.checkOnly === false && !(process.env?.USE_DELTA_DEPLOYMENT_AFTER_MERGE === "true")) { - uxLog( - this, - c.yellow("We'll try to deploy using Quick Deployment feature. If not available, it's safer to use full deployment for a merge job."), - ); - uxLog(this, c.yellow("If you want to use delta deployment anyway, define env variable USE_DELTA_DEPLOYMENT_AFTER_MERGE=true")); - return false; - } - if (process.env?.ALWAYS_ENABLE_DELTA_DEPLOYMENT === "true") { - uxLog(this, c.yellow(`Delta deployment has been explicitly enabled with variable ALWAYS_ENABLE_DELTA_DEPLOYMENT=true`)); - uxLog( - this, - c.yellow(`It is recommended to use delta deployments for merges between major branches, use this config at your own responsibility`), - ); - return true; - } - let currentBranch = await getCurrentGitBranch(); - let parentBranch = process.env.FORCE_TARGET_BRANCH || null; - const prInfo = await GitProvider.getPullRequestInfo(); - if (prInfo) { - currentBranch = prInfo.sourceBranch; - parentBranch = prInfo.targetBranch; - } - const majorOrgs = await listMajorOrgs(); - uxLog(this, c.grey("Major orgs with auth configured:\n" + JSON.stringify(majorOrgs, null, 2))); - const currentBranchIsMajor = majorOrgs.some((majorOrg) => majorOrg.branchName === currentBranch); - const parentBranchIsMajor = majorOrgs.some((majorOrg) => majorOrg.branchName === parentBranch); - if (currentBranchIsMajor && (parentBranchIsMajor === true || parentBranch == null)) { - uxLog( - this, - c.yellow( - `This is not safe to use delta between major branches (${c.bold(currentBranch)} to ${c.bold(parentBranch)}): using full deployment mode`, - ), - ); - return false; - } - uxLog(this, c.cyan(`Delta allowed between minor branch (${currentBranch}) and major branch (${parentBranch}): using delta deployment mode`)); - return true; - } -} diff --git a/src/commands/hardis/project/deploy/sources/metadata.ts b/src/commands/hardis/project/deploy/sources/metadata.ts index ca616bd29..f2fe4274a 100644 --- a/src/commands/hardis/project/deploy/sources/metadata.ts +++ b/src/commands/hardis/project/deploy/sources/metadata.ts @@ -1,101 +1,104 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { MetadataUtils } from "../../../../../common/metadata-utils"; -import { createTempDir, execCommand, uxLog } from "../../../../../common/utils"; -import { deployDestructiveChanges, deployMetadatas } from "../../../../../common/utils/deployUtils"; -import { getConfig } from "../../../../../config"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class DxSources extends SfdxCommand { - public static title = "Deploy metadata sources to org"; - - public static description = messages.getMessage("deployMetadatas"); - - public static examples = ["$ sfdx hardis:project:deploy:sources:metadata"]; - - protected static flagsConfig = { - check: flags.boolean({ - char: "c", +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { MetadataUtils } from '../../../../../common/metadata-utils/index.js'; +import { createTempDir, execCommand, uxLog } from '../../../../../common/utils/index.js'; +import { deployDestructiveChanges, deployMetadatas } from '../../../../../common/utils/deployUtils.js'; +import { getConfig } from '../../../../../config/index.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class DxSources extends SfCommand { + public static title = 'Deploy metadata sources to org'; + + public static description = messages.getMessage('deployMetadatas'); + + public static examples = ['$ sf hardis:project:deploy:sources:metadata']; + + public static flags: any = { + check: Flags.boolean({ + char: 'c', default: false, - description: messages.getMessage("checkOnly"), + description: messages.getMessage('checkOnly'), }), - deploydir: flags.string({ - char: "x", - default: ".", - description: "Deploy directory", + deploydir: Flags.string({ + char: 'x', + default: '.', + description: 'Deploy directory', }), - packagexml: flags.string({ - char: "p", - description: "Path to package.xml file to deploy", + packagexml: Flags.string({ + char: 'p', + description: 'Path to package.xml file to deploy', }), - filter: flags.boolean({ - char: "f", + filter: Flags.boolean({ + char: 'f', default: false, - description: "Filter metadatas before deploying", + description: 'Filter metadatas before deploying', }), - destructivepackagexml: flags.string({ - char: "k", - description: "Path to destructiveChanges.xml file to deploy", + destructivepackagexml: Flags.string({ + char: 'k', + description: 'Path to destructiveChanges.xml file to deploy', }), - testlevel: flags.enum({ - char: "l", - default: "RunLocalTests", - options: ["NoTestRun", "RunSpecifiedTests", "RunLocalTests", "RunAllTestsInOrg"], - description: messages.getMessage("testLevel"), + testlevel: Flags.string({ + char: 'l', + default: 'RunLocalTests', + options: ['NoTestRun', 'RunSpecifiedTests', 'RunLocalTests', 'RunAllTestsInOrg'], + description: messages.getMessage('testLevel'), }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials"]; + protected static requiresSfdxPlugins = ['sfdx-essentials']; protected configInfo: any = {}; - protected deployDir: any = "."; + protected deployDir: any = '.'; /* jscpd:ignore-end */ public async run(): Promise { - const check = this.flags.check || false; - const packageXml = this.flags.packagexml || null; - const filter = this.flags.filter || false; - const destructivePackageXml = this.flags.destructivepackagexml || null; - const testlevel = this.flags.testlevel || "RunLocalTests"; - const debugMode = this.flags.debug || false; - this.deployDir = this.flags.deploydir || "."; - this.configInfo = await getConfig("branch"); + const { flags } = await this.parse(DxSources); + uxLog("error", this, c.red('This command is deprecated and will be removed in January 2025')); + uxLog("error", this, c.red('Nobody used Metadata format anymore :)')); + uxLog( + "error", + this, + c.red('If you think it should be kept and maintained, please post an issue on sfdx-hardis GitHub repository') + ); + + const check = flags.check || false; + const packageXml = flags.packagexml || null; + const filter = flags.filter || false; + const destructivePackageXml = flags.destructivepackagexml || null; + const testlevel = flags.testlevel || 'RunLocalTests'; + const debugMode = flags.debug || false; + this.deployDir = flags.deploydir || '.'; + this.configInfo = await getConfig('branch'); // Install packages const packages = this.configInfo.installedPackages || []; if (packages.length > 0 && !check) { - await MetadataUtils.installPackagesOnOrg(packages, null, this, "deploy"); + await MetadataUtils.installPackagesOnOrg(packages, null, this, 'deploy'); } const destructiveProcessed = false; @@ -103,18 +106,26 @@ export default class DxSources extends SfdxCommand { // Deploy sources const packageXmlFile = - packageXml || process.env.PACKAGE_XML_TO_DEPLOY || this.configInfo.packageXmlToDeploy || fs.existsSync("./manifest/package.xml") - ? "./manifest/package.xml" - : fs.existsSync("./package.xml") - ? "./package.xml" - : fs.existsSync(path.join(this.deployDir, "package.xml")) - ? path.join(this.deployDir, "package.xml") - : "./config/package.xml"; + packageXml || + process.env.PACKAGE_XML_TO_DEPLOY || + this.configInfo.packageXmlToDeploy || + fs.existsSync('./manifest/package.xml') + ? './manifest/package.xml' + : fs.existsSync('./package.xml') + ? './package.xml' + : fs.existsSync(path.join(this.deployDir, 'package.xml')) + ? path.join(this.deployDir, 'package.xml') + : './config/package.xml'; if (fs.existsSync(packageXmlFile)) { // Filter if necessary if (filter) { const tmpDir = await createTempDir(); - const filterCommand = "sfdx essentials:metadata:filter-from-packagexml" + ` -i ${this.deployDir}` + ` -p ${packageXmlFile}` + ` -o ${tmpDir}`; + // sfdx-essentials still here but deprecated and will be removed + const filterCommand = + 'sfdx essentials:metadata:filter-from-packagexml' + + ` -i ${this.deployDir}` + + ` -p ${packageXmlFile}` + + ` -o ${tmpDir}`; this.deployDir = tmpDir; await execCommand(filterCommand, this, { output: true, @@ -127,46 +138,45 @@ export default class DxSources extends SfdxCommand { deployDir: this.deployDir, testlevel, check, - soap: true, debug: debugMode, tryOnce: true, }); - let message = ""; + let message = ''; if (deployRes.status === 0) { deployProcessed = true; - message = "[sfdx-hardis] Successfully deployed sfdx project sources to Salesforce org"; - uxLog(this, c.green(message)); + message = '[sfdx-hardis] Successfully deployed sfdx project sources to Salesforce org'; + uxLog("success", this, c.green(message)); } else { - message = "[sfdx-hardis] Unable to deploy sfdx project sources to Salesforce org"; - uxLog(this, c.red(deployRes.errorMessage)); + message = '[sfdx-hardis] Unable to deploy sfdx project sources to Salesforce org'; + uxLog("error", this, c.red(deployRes.errorMessage)); } } else { - uxLog(this, "No package.xml found so no deployment has been performed"); + uxLog("log", this, 'No package.xml found so no deployment has been performed'); } // Deploy destructive changes const packageDeletedXmlFile = destructivePackageXml || - process.env.PACKAGE_XML_TO_DELETE || - this.configInfo.packageXmlToDelete || - fs.existsSync("./manifest/destructiveChanges.xml") - ? "./manifest/destructiveChanges.xml" - : fs.existsSync("./destructiveChanges.xml") - ? "./destructiveChanges.xml" - : fs.existsSync(path.join(this.deployDir, "destructiveChanges.xml")) - ? path.join(this.deployDir, "destructiveChanges.xml") - : "./config/destructiveChanges.xml"; + process.env.PACKAGE_XML_TO_DELETE || + this.configInfo.packageXmlToDelete || + fs.existsSync('./manifest/destructiveChanges.xml') + ? './manifest/destructiveChanges.xml' + : fs.existsSync('./destructiveChanges.xml') + ? './destructiveChanges.xml' + : fs.existsSync(path.join(this.deployDir, 'destructiveChanges.xml')) + ? path.join(this.deployDir, 'destructiveChanges.xml') + : './config/destructiveChanges.xml'; if (fs.existsSync(packageDeletedXmlFile)) { await deployDestructiveChanges(packageDeletedXmlFile, { debug: debugMode, check }, this); } else { - uxLog(this, "No destructivePackage.Xml found so no destructive deployment has been performed"); + uxLog("log", this, 'No destructivePackage.Xml found so no destructive deployment has been performed'); } return { - orgId: this.org.getOrgId(), + orgId: flags['target-org'].getOrgId(), deployProcessed, destructiveProcessed, - outputString: "", + outputString: '', }; } } diff --git a/src/commands/hardis/project/deploy/start.ts b/src/commands/hardis/project/deploy/start.ts new file mode 100644 index 000000000..28d5590e0 --- /dev/null +++ b/src/commands/hardis/project/deploy/start.ts @@ -0,0 +1,190 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { AnyJson } from "@salesforce/ts-types"; +import { wrapSfdxCoreCommand } from "../../../../common/utils/wrapUtils.js"; +import { checkDeploymentOrgCoverage, executePrePostCommands, extractOrgCoverageFromLog } from '../../../../common/utils/deployUtils.js'; +import { GitProvider } from '../../../../common/gitProvider/index.js'; +import { buildCheckDeployCommitSummary, handlePostDeploymentNotifications } from '../../../../common/utils/gitUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +export default class ProjectDeployStart extends SfCommand { + public static description = `sfdx-hardis wrapper for **sf project deploy start** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_start_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +\`\`\`yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +\`\`\` +`; + + public static aliases = [ + "hardis:deploy:start" + ] + + public static flags: any = { + "api-version": Flags.integer({ + char: "a", + description: "api-version", + }), + async: Flags.boolean({ + description: "async", + exclusive: ["wait"], + }), + "dry-run": Flags.boolean({ + description: "dry-run", + default: false, + }), + "ignore-conflicts": Flags.boolean({ + char: "c", + description: "ignore-conflicts", + default: false, + }), + "ignore-errors": Flags.boolean({ + char: "r", + description: "ignore-errors", + default: false, + }), + "ignore-warnings": Flags.boolean({ + char: "g", + description: "ignore-warnings", + default: false, + }), + manifest: Flags.string({ + char: "x", + description: "manifest", + }), + metadata: Flags.string({ + char: "m", + description: "metadata", + multiple: true, + }), + "metadata-dir": Flags.string({ + description: "metadata-dir", + }), + "single-package": Flags.boolean({ + dependsOn: ["metadata-dir"], + description: "single-package", + }), + "source-dir": Flags.string({ + char: "d", + description: "source-dir", + multiple: true, + }), + "target-org": Flags.requiredOrg(), + tests: Flags.string({ + description: "tests", + }), + "test-level": Flags.string({ + description: "test-level", + }), + wait: Flags.integer({ + char: "w", + default: 33, + min: 1, + description: "wait", + exclusive: ["async"], + }), + "purge-on-delete": Flags.boolean({ + description: "purge-on-delete", + }), + "pre-destructive-changes": Flags.string({ + dependsOn: ["manifest"], + description: "pre-destructive-changes", + }), + "post-destructive-changes": Flags.string({ + dependsOn: ["manifest"], + description: "post-destructive-changes", + }), + "coverage-formatters": Flags.string({ + description: "coverage-formatters", + }), + junit: Flags.boolean({ + description: "junit", + }), + "results-dir": Flags.string({ + description: "results-dir", + }), + debug: Flags.boolean({ + default: false, + description: "debug", + }), + }; + + public static requiresProject = true; + + public async run(): Promise { + const { flags } = await this.parse(ProjectDeployStart); + const conn = flags["target-org"].getConnection(); + const checkOnly = flags["dry-run"] === true; + await setConnectionVariables(flags['target-org']?.getConnection(), true); + // Compute data for PR comments & flow diffs + if (checkOnly) { + await buildCheckDeployCommitSummary() + } + // Run pre deployment commands if defined + await executePrePostCommands('commandsPreDeploy', { success: true, checkOnly: checkOnly, conn: conn }); + const result = await wrapSfdxCoreCommand("sf project deploy start", this.argv, this, flags.debug); + // Check org coverage if requested + if (flags['coverage-formatters'] && result.stdout) { + const orgCoveragePercent = await extractOrgCoverageFromLog(result.stdout + result.stderr || ''); + if (orgCoveragePercent) { + try { + await checkDeploymentOrgCoverage(Number(orgCoveragePercent), { check: checkOnly }); + } catch (errCoverage) { + await GitProvider.managePostPullRequestComment(); + throw errCoverage; + } + } + } + // Run post deployment commands if defined + await executePrePostCommands('commandsPostDeploy', { success: process.exitCode === 0, checkOnly: checkOnly, conn: conn }); + // Post comment if deployment check success + if (checkOnly) { + await GitProvider.managePostPullRequestComment(); + } + // Post success deployment notifications + if (process.exitCode === 0 && !checkOnly) { + await handlePostDeploymentNotifications(flags, flags["target-org"].getUsername(), false, false, flags["debug"]); + } + return result; + } +} +/* jscpd:ignore-end */ \ No newline at end of file diff --git a/src/commands/hardis/project/deploy/validate.ts b/src/commands/hardis/project/deploy/validate.ts new file mode 100644 index 000000000..4208b2edf --- /dev/null +++ b/src/commands/hardis/project/deploy/validate.ts @@ -0,0 +1,180 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { AnyJson } from "@salesforce/ts-types"; +import { wrapSfdxCoreCommand } from "../../../../common/utils/wrapUtils.js"; +import { checkDeploymentOrgCoverage, executePrePostCommands, extractOrgCoverageFromLog } from '../../../../common/utils/deployUtils.js'; +import { GitProvider } from '../../../../common/gitProvider/index.js'; +import { buildCheckDeployCommitSummary } from '../../../../common/utils/gitUtils.js'; +import { setConnectionVariables } from '../../../../common/utils/orgUtils.js'; + +export default class ProjectDeployValidate extends SfCommand { + public static description = `sfdx-hardis wrapper for **sf project deploy validate** that displays tips to solve deployment errors. + +Note: Use **--json** argument to have better results + +[![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) + +[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_project_commands_unified.htm#cli_reference_project_deploy_validate_unified) + +### Deployment pre or post commands + +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) + +If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** + +Example: + +\`\`\`yaml +commandsPreDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to the deployment user + command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + +commandsPostDeploy: + - id: knowledgeUnassign + label: Remove KnowledgeUser right to the user who has it + command: sf data update record --sobject User --where "UserPermissionsKnowledgeUser='true'" --values "UserPermissionsKnowledgeUser='false'" --json + - id: knowledgeAssign + label: Assign Knowledge user to desired username + command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true +\`\`\` +`; + + public static aliases = [ + "hardis:deploy:validate" + ] + public static flags: any = { + "api-version": Flags.integer({ + char: "a", + description: "api-version", + }), + async: Flags.boolean({ + description: "async", + exclusive: ["wait"], + }), + "dry-run": Flags.boolean({ + description: "dry-run", + default: false, + }), + "ignore-conflicts": Flags.boolean({ + char: "c", + description: "ignore-conflicts", + default: false, + }), + "ignore-errors": Flags.boolean({ + char: "r", + description: "ignore-errors", + default: false, + }), + "ignore-warnings": Flags.boolean({ + char: "g", + description: "ignore-warnings", + default: false, + }), + manifest: Flags.string({ + char: "x", + description: "manifest", + }), + metadata: Flags.string({ + char: "m", + description: "metadata", + multiple: true, + }), + "metadata-dir": Flags.string({ + description: "metadata-dir", + }), + "single-package": Flags.boolean({ + dependsOn: ["metadata-dir"], + description: "single-package", + }), + "source-dir": Flags.string({ + char: "d", + description: "source-dir", + multiple: true, + }), + "target-org": Flags.requiredOrg(), + tests: Flags.string({ + description: "tests", + }), + "test-level": Flags.string({ + description: "test-level", + }), + wait: Flags.integer({ + char: "w", + default: 33, + min: 1, + description: "wait", + exclusive: ["async"], + }), + "purge-on-delete": Flags.boolean({ + description: "purge-on-delete", + }), + "pre-destructive-changes": Flags.string({ + dependsOn: ["manifest"], + description: "pre-destructive-changes", + }), + "post-destructive-changes": Flags.string({ + dependsOn: ["manifest"], + description: "post-destructive-changes", + }), + "coverage-formatters": Flags.string({ + description: "coverage-formatters", + }), + junit: Flags.boolean({ + description: "junit", + }), + "results-dir": Flags.string({ + description: "results-dir", + }), + debug: Flags.boolean({ + default: false, + description: "debug", + }), + }; + + public static requiresProject = true; + + public async run(): Promise { + const { flags } = await this.parse(ProjectDeployValidate); + const conn = flags["target-org"].getConnection(); + await setConnectionVariables(flags['target-org']?.getConnection(), true); + // Compute data for PR comments & flow diffs + await buildCheckDeployCommitSummary(); + // Run pre deployment commands if defined + await executePrePostCommands('commandsPreDeploy', { success: true, checkOnly: true, conn: conn }); + const result = await wrapSfdxCoreCommand("sf project deploy start", this.argv, this, flags.debug); + // Check org coverage if requested + if (flags['coverage-formatters'] && result.stdout) { + const orgCoveragePercent = await extractOrgCoverageFromLog(result.stdout + result.stderr || ''); + const checkOnly = true; + if (orgCoveragePercent) { + try { + await checkDeploymentOrgCoverage(Number(orgCoveragePercent), { check: checkOnly }); + } catch (errCoverage) { + await GitProvider.managePostPullRequestComment(); + throw errCoverage; + } + } + } + // Run post deployment commands if defined + await executePrePostCommands('commandsPostDeploy', { success: process.exitCode === 0, checkOnly: true, conn: conn }); + await GitProvider.managePostPullRequestComment(); + return result; + } +} +/* jscpd:ignore-end */ \ No newline at end of file diff --git a/src/commands/hardis/project/fix/profiletabs.ts b/src/commands/hardis/project/fix/profiletabs.ts index 5f218d0c6..56e1e9e63 100644 --- a/src/commands/hardis/project/fix/profiletabs.ts +++ b/src/commands/hardis/project/fix/profiletabs.ts @@ -1,69 +1,88 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { glob } from "glob"; -import * as sortArray from "sort-array"; -import { uxLog } from "../../../../common/utils"; -import { soqlQueryTooling } from "../../../../common/utils/apiUtils"; -import { prompts } from "../../../../common/utils/prompts"; -import { parseXmlFile, writeXmlFile } from "../../../../common/utils/xmlUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class FixV53Flexipages extends SfdxCommand { - public static title = "Fix profiles to add tabs that are not retrieved by SF CLI"; - - public static description = `Interactive prompts to add tab visibilities that are not retrieved by force:source:pull`; - - public static examples = ["$ sfdx hardis:project:fix:profiletabs"]; - - protected static flagsConfig = { - path: flags.string({ - char: "p", +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { glob } from 'glob'; +import sortArray from 'sort-array'; +import { uxLog } from '../../../../common/utils/index.js'; +import { soqlQueryTooling } from '../../../../common/utils/apiUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { parseXmlFile, writeXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class FixV53Flexipages extends SfCommand { + public static title = 'Fix profiles to add tabs that are not retrieved by SF CLI'; + + public static description: string = ` +## Command Behavior + +**Interactively updates tab visibility settings in Salesforce profiles, addressing a common issue where tab visibilities are not correctly retrieved by \`sf project retrieve start\`.** + +This command provides a user-friendly interface to manage tab settings within your profile XML files, ensuring that your local project accurately reflects the intended tab configurations in your Salesforce org. + +Key functionalities: + +- **Interactive Tab Selection:** Displays a multi-select menu of all available tabs in your org, allowing you to choose which tabs to update. +- **Visibility Control:** Lets you set the visibility for the selected tabs to either \`DefaultOn\` (Visible) or \`Hidden\`. +- **Profile Selection:** Presents a multi-select menu of all .profile-meta.xml files in your project, allowing you to apply the tab visibility changes to specific profiles. +- **XML Updates:** Modifies the section of the selected profile XML files to reflect the chosen tab settings. If a tab visibility setting already exists for a selected tab, it will be updated; otherwise, a new one will be added. +- **Sorted Output:** The in the updated profile XML files are sorted alphabetically for consistency and readability. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Tooling API):** It queries the \`TabDefinition\` object using \`soqlQueryTooling\` to retrieve a list of all available tabs in the target org. +- **File Discovery:** Uses \`glob\` to find all .profile-meta.xml files within the specified project path. +- **Interactive Prompts:** Leverages the \`prompts\` library to create interactive menus for selecting tabs, visibility settings, and profiles. +- **XML Parsing and Manipulation:** Uses \`parseXmlFile\` to read the content of profile XML files and \`writeXmlFile\` to write the modified content back. It manipulates the \`tabVisibilities\` array within the parsed XML to add or update tab settings. +- **Array Sorting:** Employs the \`sort-array\` library to sort the \`tabVisibilities\` alphabetically by tab name. +- **Logging:** Provides feedback to the user about which profiles have been updated and a summary of the changes. +
+`; + + + public static examples = ['$ sf hardis:project:fix:profiletabs']; + + public static flags: any = { + path: Flags.string({ + char: 'p', default: process.cwd(), - description: "Root folder", + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + 'target-org': requiredOrgFlagWithDeprecations, + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; protected pathToBrowse: string; protected debugMode = false; public async run(): Promise { - this.pathToBrowse = this.flags.path || process.cwd(); - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(FixV53Flexipages); + this.pathToBrowse = flags.path || process.cwd(); + this.debugMode = flags.debug || false; /* jscpd:ignore-end */ // List available tabs in org - const tabsRequest = "SELECT Label,DurableId,Name,SobjectName FROM TabDefinition ORDER BY Label"; - const tabsResult = await soqlQueryTooling(tabsRequest, this.org.getConnection()); + const tabsRequest = 'SELECT Label,DurableId,Name,SobjectName FROM TabDefinition ORDER BY Label'; + const tabsResult = await soqlQueryTooling(tabsRequest, (flags['target-org'] as any).getConnection()); const choices = tabsResult.records.map((tab) => { return { title: `${tab.Label} (${tab.Name} on SObject ${tab.SobjectName})}`, @@ -74,23 +93,26 @@ export default class FixV53Flexipages extends SfdxCommand { // Prompt tabs to add to Profiles const promptTabsToAdd = await prompts([ { - type: "multiselect", - name: "tabs", - message: "Please select the tabs you want to display or hide in Profile(s)", + type: 'multiselect', + name: 'tabs', + message: 'Please select the tabs you want to display or hide in Profile(s)', + description: 'Choose which tabs should be configured for profiles', choices: choices, }, { - type: "select", - name: "visibility", - message: "Please select the flag you want the tabs to be applied on profiles you will select", + type: 'select', + name: 'visibility', + message: 'Please select the flag you want the tabs to be applied on profiles you will select', + description: 'Choose the visibility setting for the selected tabs', + placeholder: 'Select visibility', choices: [ { - title: "Visible (DefaultOn)", - value: "DefaultOn", + title: 'Visible (DefaultOn)', + value: 'DefaultOn', }, { - title: "Hidden", - value: "Hidden", + title: 'Hidden', + value: 'Hidden', }, ], }, @@ -101,14 +123,19 @@ export default class FixV53Flexipages extends SfdxCommand { // Prompt profiles to user const globPattern = this.pathToBrowse + `/**/*.profile-meta.xml`; - const profileSourceFiles = await glob(globPattern, { cwd: this.pathToBrowse }); + const profileSourceFiles = await glob(globPattern, { cwd: this.pathToBrowse, ignore: GLOB_IGNORE_PATTERNS }); const promptProfilesToUpdate = await prompts({ - type: "multiselect", - name: "profiles", - message: "Please select the profiles you want to update to apply tabs [" + tabsToUpdate.join(", ") + "] with visibility " + visibility, + type: 'multiselect', + name: 'profiles', + message: + 'Please select the profiles you want to update to apply tabs [' + + tabsToUpdate.join(', ') + + '] with visibility ' + + visibility, + description: 'Choose which profiles should receive the tab visibility updates', choices: profileSourceFiles.map((profileFile) => { return { - title: profileFile.replace(/\\/g, "/").split("/").pop().replace(".profile-meta.xml", ""), + title: (profileFile.replace(/\\/g, '/').split('/').pop() || '').replace('.profile-meta.xml', ''), value: profileFile, }; }), @@ -117,7 +144,7 @@ export default class FixV53Flexipages extends SfdxCommand { // Apply updates on Profiles for (const profileFile of promptProfilesToUpdate.profiles) { const profile = await parseXmlFile(profileFile); - let tabVisibilities = profile.Profile["tabVisibilities"] || []; + let tabVisibilities = profile.Profile['tabVisibilities'] || []; for (const tabName of tabsToUpdate) { // Update existing tabVisibility if (tabVisibilities.filter((tabVisibility) => tabVisibility.tab[0] === tabName).length > 0) { @@ -144,19 +171,19 @@ export default class FixV53Flexipages extends SfdxCommand { }; }), { - by: ["key"], - order: ["asc"], - }, - ).map((sorted) => sorted.value); - profile.Profile["tabVisibilities"] = sortedTabVisibility; + by: ['key'], + order: ['asc'], + } + ).map((sorted: any) => sorted.value); + profile.Profile['tabVisibilities'] = sortedTabVisibility; // Update Profile XML File await writeXmlFile(profileFile, profile); - uxLog(this, c.grey("Updated " + profileFile)); + uxLog("log", this, c.grey('Updated ' + profileFile)); } // Summary const msg = `Updated ${c.green(c.bold(promptProfilesToUpdate.profiles.length))} profiles.`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg, updatedNumber: promptProfilesToUpdate.profiles.length }; } diff --git a/src/commands/hardis/project/fix/v53flexipages.ts b/src/commands/hardis/project/fix/v53flexipages.ts index 1eb54a5e2..698477270 100644 --- a/src/commands/hardis/project/fix/v53flexipages.ts +++ b/src/commands/hardis/project/fix/v53flexipages.ts @@ -1,92 +1,108 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import { uxLog } from "../../../../common/utils"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import { uxLog } from '../../../../common/utils/index.js'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class FixV53Flexipages extends SfCommand { + public static title = 'Fix flexipages for v53'; -export default class FixV53Flexipages extends SfdxCommand { - public static title = "Fix flexipages for v53"; + public static description: string = ` +## Command Behavior - public static description = `Fix flexipages for apiVersion v53 (Winter22). +**Fixes Salesforce FlexiPages for compatibility with API Version 53.0 (Winter '22 release) by adding missing identifiers to component instances.** -Note: Update api version to 53.0 in package.xml and sfdx-project.json`; +Salesforce introduced a change in API Version 53.0 that requires \`identifier\` tags within \`componentInstance\` and \`fieldInstance\` elements in FlexiPage metadata. If these identifiers are missing, deployments to orgs with API version 53.0 or higher will fail. This command automates the process of adding these missing identifiers, ensuring your FlexiPages remain deployable. - public static examples = ["$ sfdx hardis:project:fix:v53flexipages"]; +Key functionalities: - protected static flagsConfig = { - path: flags.string({ - char: "p", +- **Targeted FlexiPage Processing:** Scans all .flexipage-meta.xml files within the specified root folder (defaults to current working directory). +- **Identifier Injection:** Inserts a unique \`identifier\` tag (e.g., \`SFDX_HARDIS_REPLACEMENT_ID\`) into \`componentInstance\` and \`fieldInstance\` elements that lack one. + +**Important Note:** After running this command, ensure you update your \`apiVersion\` to \`53.0\` (or higher) in your \`package.xml\` and \`sfdx-project.json\` files. + +
+Technical explanations + +The command's technical implementation involves: + +- **File Discovery:** Uses \`glob\` to find all .flexipage-meta.xml files. +- **Content Reading:** Reads the XML content of each FlexiPage file. +- **Regular Expression Replacement:** Employs a set of regular expressions to identify specific XML patterns (componentName.../componentName.../componentInstance, componentName.../componentName.../visibilityRule, fieldItem.../fieldItem.../fieldInstance) that are missing the \`identifier\` tag. +- **Dynamic ID Generation:** For each match, it generates a unique identifier (e.g., \`sfdxHardisIdX\`) and injects it into the XML structure. +- **File Writing:** If changes are made, the modified XML content is written back to the FlexiPage file using \`fs.writeFile\`. +- **Logging:** Provides messages about which FlexiPages are being processed and a summary of the total number of identifiers added. +
+`; + + public static examples = ['$ sf hardis:project:fix:v53flexipages']; + + public static flags: any = { + path: Flags.string({ + char: 'p', default: process.cwd(), - description: "Root folder", + description: 'Root folder', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected pathToBrowse: string; protected debugMode = false; public async run(): Promise { - this.pathToBrowse = this.flags.path || process.cwd(); - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(FixV53Flexipages); + this.pathToBrowse = flags.path || process.cwd(); + this.debugMode = flags.debug || false; // Delete standard files when necessary - uxLog(this, c.cyan(`Adding identifiers to componentInstance in flexipages`)); + uxLog("action", this, c.cyan(`Adding identifiers to componentInstance in flexipages`)); /* jscpd:ignore-end */ const globPattern = this.pathToBrowse + `/**/*.flexipage-meta.xml`; let counter = 0; - const flexipages = []; - const flexipageSourceFiles = await glob(globPattern, { cwd: this.pathToBrowse }); - uxLog(this, c.grey(`Found ${flexipageSourceFiles.length} flexipages`)); + const flexipages: any[] = []; + const flexipageSourceFiles = await glob(globPattern, { cwd: this.pathToBrowse, ignore: GLOB_IGNORE_PATTERNS }); + uxLog("log", this, c.grey(`Found ${flexipageSourceFiles.length} flexipages`)); const regexAndReplacements = [ { regex: /(.*<\/componentName>\n.*<\/componentInstance>)/gim, - replace: "", + replace: '', replaceWith: `\n SFDX_HARDIS_REPLACEMENT_ID`, }, { regex: /(.*<\/componentName>\n.*)/gim, - replace: "", + replace: '', replaceWith: `\n SFDX_HARDIS_REPLACEMENT_ID`, }, { regex: /(.*<\/fieldItem>\n.*<\/fieldInstance>)/gim, - replace: "", + replace: '', replaceWith: `\n SFDX_HARDIS_REPLACEMENT_ID`, }, ]; for (const flexiFile of flexipageSourceFiles) { - let flexipageRawXml = await fs.readFile(flexiFile, "utf8"); + let flexipageRawXml = await fs.readFile(flexiFile, 'utf8'); let found = false; for (const replaceParams of regexAndReplacements) { const regex = replaceParams.regex; @@ -100,8 +116,8 @@ Note: Update api version to 53.0 in package.xml and sfdx-project.json`; // Iterate thru the regex matches m.forEach((match, groupIndex) => { console.log(`Found match, group ${groupIndex}: ${match}`); - const newId = "sfdxHardisId" + counter; - const replaceWith = replaceParams.replaceWith.replace("SFDX_HARDIS_REPLACEMENT_ID", newId); + const newId = 'sfdxHardisId' + counter; + const replaceWith = replaceParams.replaceWith.replace('SFDX_HARDIS_REPLACEMENT_ID', newId); const replacementWithIdentifier = match.replace(replaceParams.replace, replaceWith); flexipageRawXml = flexipageRawXml.replace(match, replacementWithIdentifier); if (!flexipages.includes(flexiFile)) { @@ -112,14 +128,14 @@ Note: Update api version to 53.0 in package.xml and sfdx-project.json`; } if (found) { await fs.writeFile(flexiFile, flexipageRawXml); - uxLog(this, c.grey("Updated " + flexiFile)); + uxLog("log", this, c.grey('Updated ' + flexiFile)); } } } // Summary const msg = `Added ${c.green(c.bold(counter))} identifiers in ${c.green(c.bold(flexipages.length))} flexipages`; - uxLog(this, c.cyan(msg)); + uxLog("action", this, c.cyan(msg)); // Return an object to be displayed with --json return { outputString: msg, updatedNumber: counter, updated: flexipages }; } diff --git a/src/commands/hardis/project/generate/bypass.ts b/src/commands/hardis/project/generate/bypass.ts new file mode 100644 index 000000000..a8e2e3722 --- /dev/null +++ b/src/commands/hardis/project/generate/bypass.ts @@ -0,0 +1,824 @@ +import { + requiredOrgFlagWithDeprecations, + SfCommand, +} from "@salesforce/sf-plugins-core"; +import { Flags } from "@salesforce/sf-plugins-core"; +import { Connection, SfError, Messages } from "@salesforce/core"; +import { AnyJson } from "@salesforce/ts-types"; +import { + soqlQuery, + soqlQueryTooling, +} from "../../../../common/utils/apiUtils.js"; +import { execCommand, uxLog, uxLogTable } from "../../../../common/utils/index.js"; +import { prompts } from "../../../../common/utils/prompts.js"; +import c from "chalk"; +import path from "path"; +import fs from "fs"; +import * as fsExtra from "fs-extra"; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages("sfdx-hardis", "org"); + +import { + parseXmlFile, + writeXmlFile, +} from "../../../../common/utils/xmlUtils.js"; +import { MetadataUtils } from "../../../../common/metadata-utils/index.js"; + +// Constants +const ALLOWED_AUTOMATIONS = ["Flow", "Trigger", "VR"]; +const CREDITS_TEXT = + "by sfdx-hardis : https://sfdx-hardis.cloudity.com/hardis/project/generate/bypass/"; + +const STATUS = { + ADDED: "added", + SKIPPED: "skipped", + IGNORED: "ignored", + FAILED: "failed", +}; + +export default class HardisProjectGenerateBypass extends SfCommand { + private skipCredits = false; + private retrieveFromOrg; + + public static flags: any = { + "target-org": requiredOrgFlagWithDeprecations, + objects: Flags.string({ + aliases: ["sObjects"], + char: "s", + description: + "Comma-separated list of sObjects to bypass (e.g., Account,Contact,Opportunity). If omitted, you will be prompted to select.", + required: false, + }), + automations: Flags.string({ + char: "a", + description: `Comma-separated automations to bypass: ${ALLOWED_AUTOMATIONS.join( + ", " + )}`, + required: false, + }), + websocket: Flags.string({ + description: messages.getMessage("websocket"), + }), + skipauth: Flags.boolean({ + description: + "Skip authentication check when a default username is required", + }), + "skip-credits": Flags.boolean({ + aliases: ["skipCredits"], + char: "k", + description: 'Omit the "Generated by" line in the XML files', + required: false, + default: false, + }), + "apply-to-vrs": Flags.boolean({ + aliases: ["applyToVrs"], + description: "Apply bypass to Validation Rules", + required: false, + default: false, + }), + "apply-to-triggers": Flags.boolean({ + aliases: ["applyToTriggers"], + description: "Apply bypass to Triggers", + required: false, + default: false, + }), + "metadata-source": Flags.string({ + char: "r", + aliases: ["metadataSource"], + description: + "Source of metadata elements to apply bypass to. Options: 'org' or 'local'.", + required: false, + }), + }; + + public static description = ` +## Command Behavior + +**Generates custom permissions and permission sets to bypass specified Salesforce automations (Flows, Triggers, and Validation Rules) for specific sObjects.** + +This command provides a controlled mechanism to temporarily or permanently disable automations for certain sObjects, which is invaluable for: + +- **Data Loading:** Bypassing validation rules or triggers during large data imports. +- **Troubleshooting:** Isolating automation issues by temporarily disabling them. +- **Development:** Allowing developers to work on specific sObjects without triggering complex automations. + +Key functionalities: + +- **sObject Selection:** You can specify a comma-separated list of sObjects to bypass (e.g., \`Account,Contact\`). If omitted, an interactive prompt will allow you to select from available sObjects. +- **Automation Type Selection:** Choose which types of automations to bypass: \`Flow\`, \`Trigger\`, or \`VR\` (Validation Rules). If omitted, an interactive prompt will guide your selection. +- **Automatic Bypass Application:** Optionally, the command can automatically inject bypass logic into Validation Rules and Triggers. This involves modifying the Apex code for Triggers and the XML for Validation Rules. +- **Metadata Source:** You can choose to retrieve the metadata elements (Validation Rules, Triggers) from the org (\`--metadata-source org\`) or use local files (\`--metadata-source local\`). Retrieving from the org is recommended for accuracy. +- **Custom Permission and Permission Set Generation:** For each selected sObject and automation type, it generates: + - A **Custom Permission** (e.g., \`BypassAccountFlows\`) that acts as the bypass switch. + - A **Permission Set** (e.g., \`BypassAccountFlows\`) that grants the generated Custom Permission. +- **Credits Inclusion:** By default, generated XML files include a comment indicating they were generated by sfdx-hardis. This can be skipped using \`--skip-credits\`. + +
+Technical explanations + +The command's technical implementation involves: + +- **SOQL Queries (Tooling API):** It queries \`EntityDefinition\` to list customizable sObjects and \`ValidationRule\` and \`ApexTrigger\` to find existing automations. +- **Interactive Prompts:** Uses the \`prompts\` library to guide the user through selecting sObjects, automation types, and bypass application options. +- **XML Generation:** Dynamically generates XML content for Custom Permissions and Permission Sets, including descriptions and labels that clearly indicate their purpose. +- **File System Operations:** Uses \`fs-extra\` to create directories and write the generated Custom Permission and Permission Set XML files. +- **Metadata Retrieval (for Bypass Application):** If \`apply-to-vrs\` or \`apply-to-triggers\` is used and \`metadata-source\` is \`org\`, it retrieves the relevant Validation Rule or Apex Trigger metadata from the org using \`sf project retrieve start\`. +- **XML/Apex Code Modification:** + - For Validation Rules, it modifies the \`errorConditionFormula\` in the XML to include a check for the bypass Custom Permission. + - For Apex Triggers, it injects an \`if\` statement at the beginning of the trigger body to check for the bypass Custom Permission. +- **\`parseXmlFile\` and \`writeXmlFile\`:** Used for reading and writing XML metadata files. +- **\`execCommand\`:** Used for executing Salesforce CLI commands, particularly for metadata retrieval. +- **Error Handling:** Includes checks for invalid sObject or automation selections and provides informative error messages. +
+`; + + public static examples = [ + "$ sf hardis:project:generate:bypass", + "$ sf hardis:project:generate:bypass --sObjects Account,Contact,Opportunity", + "$ sf hardis:project:generate:bypass --automations Flow,Trigger,VR", + "$ sf hardis:project:generate:bypass --sObjects Account,Opportunity --automations Flow,Trigger", + "$ sf hardis:project:generate:bypass --skipCredits", + "$ sf hardis:project:generate:bypass --apply-to-vrs", + "$ sf hardis:project:generate:bypass --apply-to-triggers", + "$ sf hardis:project:generate:bypass --metadata-source org", + ]; + + // Main run method + public async run(): Promise { + // Collect options + const { flags } = await this.parse(HardisProjectGenerateBypass); + const connection = flags["target-org"].getConnection(); + if ( + flags["metadata-source"] !== undefined && + flags["metadata-source"] !== null + ) { + this.retrieveFromOrg = + String(flags["metadata-source"]).trim().toLowerCase() === "org"; + } + this.skipCredits = flags["skip-credits"] || false; + let applyToTriggers = flags["apply-to-triggers"] || null; + let applyToVrs = flags["apply-to-vrs"] || null; + const sObjects = flags.objects || null; + const automations = flags.automations || null; + + const availableSObjects = await this.getFilteredSObjects(connection); + let targetSObjects = {}; + let targetAutomations = []; + + // Filter objects + if (sObjects) { + const sObjectsFromFlag = flags.sObjects.split(",").map((s) => s.trim()); + targetSObjects = Object.fromEntries( + Object.entries(availableSObjects).filter(([key]) => { + const res = sObjectsFromFlag.includes(key); + if (!res) { + uxLog( + "warning", + this, + c.yellow( + `Warning: sObject "${key}" is not available or not customizable. Skipping.` + ) + ); + } + return res; + }) + ); + } + + if (automations) { + targetAutomations = automations + .split(",") + .map((s) => s.trim()) + .filter((s) => ALLOWED_AUTOMATIONS.includes(s)); + } + + // Generate global bypasses + uxLog("action", this, c.cyan(`Generating global bypasses...`)); + this.generateFiles({ All: "All" }, ALLOWED_AUTOMATIONS); + + // Handle prompts if needed + const promptsNeeded: any = []; + if (!Object.keys(targetSObjects).length) { + promptsNeeded.push({ + type: "multiselect", + name: "sobjects", + message: "Select sObjects for bypass", + description: "Choose which sObjects should have automation bypass functionality", + choices: Object.entries(availableSObjects).map(([devName, label]) => ({ + title: label, + value: devName, + })), + }); + } + + if (!targetAutomations.length) { + promptsNeeded.push({ + type: "multiselect", + name: "automations", + message: "Select automations to bypass", + description: "Choose which types of automation should be bypassed", + choices: ALLOWED_AUTOMATIONS.map((a) => ({ title: a, value: a })), + }); + } + + if (applyToVrs == null && applyToTriggers == null) { + promptsNeeded.push({ + type: "multiselect", + name: "applyTo", + message: + "To which automations do you want to automatically apply the bypass?", + description: "Select which automation types should have automatic bypass logic applied", + choices: [ + { title: "Validation Rules", value: "applyToVrs" }, + { title: "Triggers", value: "applyToTriggers" }, + ], + }); + } + + if (this.retrieveFromOrg == undefined || this.retrieveFromOrg == null) { + promptsNeeded.push({ + type: "select", + name: "elementSource", + message: "Where do you want to get the elements to apply bypass to?", + description: "Choose the source for retrieving automation elements", + placeholder: "Select source", + choices: [ + { title: "Retrieve from org (recommended)", value: "org" }, + { title: "Use local elements in the project", value: "local" }, + ], + }); + } + + if (promptsNeeded.length) { + const promptResults = await prompts(promptsNeeded); + if (promptResults.sobjects) { + targetSObjects = Object.fromEntries( + Object.entries(availableSObjects).filter(([key]) => + promptResults.sobjects.includes(key) + ) + ); + } + if (promptResults.automations) { + targetAutomations = promptResults.automations; + } + + if (!applyToTriggers) { + applyToTriggers = promptResults.applyTo?.includes("applyToTriggers"); + } + if (!applyToVrs) { + applyToVrs = promptResults.applyTo?.includes("applyToVrs"); + } + + if (promptResults.elementSource) { + this.retrieveFromOrg = promptResults.elementSource === "org"; + } + } + + // Validate selections + if (!Object.keys(targetSObjects).length) { + throw new SfError(c.red("ERROR: You must select at least one sObject.")); + } + if (!targetAutomations.length) { + throw new SfError( + c.red("ERROR: You must select at least one automation type.") + ); + } + + // Generate files and apply bypasses + uxLog("action", this, c.cyan(`Generating bypass files for selected sObjects and automations...`)); + this.generateFiles(targetSObjects, targetAutomations); + + if (applyToVrs) { + uxLog("action", this, c.cyan(`Applying bypass to Validation Rules...`)); + await this.applyBypassToValidationRules(connection, targetSObjects); + } + + if (applyToTriggers) { + uxLog("action", this, c.cyan(`Applying bypass to Triggers...`)); + await this.applyBypassToTriggers(connection, targetSObjects); + } + + return { + outputString: "Generated bypass custom permissions and permission sets", + }; + } + + // Query methods + public async querySObjects(connection: Connection) { + const sObjectsQuery = ` + Select Id, Label, DeveloperName, QualifiedApiName, DurableId, IsTriggerable, IsCustomizable, IsApexTriggerable + FROM EntityDefinition WHERE IsTriggerable = true AND IsCustomizable = true and IsCustomSetting = false ORDER BY DeveloperName`; + const results = await soqlQuery(sObjectsQuery, connection); + uxLog("log", this, c.grey(`Found ${results.records.length} sObjects.`)); + return results; + } + + public async getFilteredSObjects( + connection: Connection + ): Promise<{ [key: string]: string }> { + const sObjectResults = await this.querySObjects(connection); + const sObjectsDict: { [key: string]: string } = {}; + + for (const record of sObjectResults.records) { + if ( + !record.DeveloperName.endsWith("__Share") && + !record.DeveloperName.endsWith("__ChangeEvent") + ) { + sObjectsDict[ + record.DeveloperName + ] = `${record.Label} (${record.QualifiedApiName})`; + } + } + + return sObjectsDict; + } + + public async queryTriggers(connection: Connection) { + const query = `SELECT Id, Name, Status, IsValid, Body, BodyCrc, TableEnumOrId, ManageableState From ApexTrigger WHERE ManageableState != 'installed'`; + const results = await soqlQueryTooling(query, connection); + uxLog("log", this, c.grey(`Found ${results.records.length} Triggers.`)); + return results; + } + + public filterTriggerResults(triggerResults, sObjects) { + return triggerResults.records.filter((trigger) => { + const sObjectApiNameWithoutC = trigger.TableEnumOrId?.replace("__c", ""); + return ( + sObjectApiNameWithoutC && + Object.keys(sObjects).includes(sObjectApiNameWithoutC) && + trigger.Body != "(hidden)" + ); + }); + } + + public async queryValidationRules( + connection: Connection, + sObjects: { [key: string]: string } + ) { + const query = `SELECT ValidationName, EntityDefinition.QualifiedApiName, ManageableState FROM ValidationRule + WHERE ManageableState != 'installed' AND EntityDefinition.DeveloperName IN (${Object.keys( + sObjects + ) + .map((s) => `'${s}'`) + .join(", ")})`; + const results = await soqlQueryTooling(query, connection); + uxLog("log", this, c.grey(`Found ${results.records.length} Validation Rules.`)); + return results; + } + + // XML Generation + public generateXML( + type: "customPermission" | "permissionSet", + sObject: string, + automation: string + ): string { + const creditsText = this.skipCredits ? "" : `Generated ${CREDITS_TEXT}`; + + if (type === "customPermission") { + return ` + + false + + If assigned (through a Permission Set), this Custom Permission will disable the execution of ${automation}s defined on the ${sObject} sObject.${creditsText} + `; + } else { + return ` + + + true + Bypass${sObject}${automation}s + + false + + If assigned, this Permission Set will disable the execution of ${automation}s defined on the ${sObject} sObject.${creditsText} + `; + } + } + + private generateXMLFiles(sObject: string, automation: string) { + const customPermissionFile = path.join( + `force-app/main/default/customPermissions/Bypass${sObject}${automation}s.customPermission-meta.xml` + ); + const permissionSetFile = path.join( + `force-app/main/default/permissionsets/Bypass${sObject}${automation}s.permissionset-meta.xml` + ); + + fsExtra.ensureDirSync(path.dirname(customPermissionFile)); + fs.writeFileSync( + customPermissionFile, + this.generateXML("customPermission", sObject, automation), + "utf-8" + ); + fsExtra.ensureDirSync(path.dirname(permissionSetFile)); + fs.writeFileSync( + permissionSetFile, + this.generateXML("permissionSet", sObject, automation), + "utf-8" + ); + + uxLog( + "log", + this, + c.grey(`Created: ${path.basename(customPermissionFile)} for ${sObject}`) + ); + uxLog("log", this, c.grey(`Created: ${path.basename(permissionSetFile)} for ${sObject}`)); + } + + generateFiles( + targetSObjects: { [key: string]: string }, + targetAutomations: string[] + ): void { + Object.keys(targetSObjects).forEach((developerName) => { + targetAutomations.forEach((automation) => { + this.generateXMLFiles(developerName, automation); + }); + }); + } + + // Metadata handling + public async retrieveMetadataFiles( + records: any[], + metadataType: "ValidationRule" | "ApexTrigger" + ): Promise { + const recordsChunks = this.chunkArray(records); + const results: any[] = []; + + for (const chunk of recordsChunks) { + let command = `sf project retrieve start --metadata`; + command += chunk + .map((record: any) => { + return metadataType === "ValidationRule" + ? ` ValidationRule:${record.EntityDefinition.QualifiedApiName}.${record.ValidationName}` + : ` ApexTrigger:${record.Name}`; + }) + .join(" "); + + try { + const result = await execCommand( + `${command} --ignore-conflicts --json`, + this, + { + debug: false, + retry: { + retryDelay: 30, + retryStringConstraint: "error", + retryMaxAttempts: 3, + }, + } + ); + results.push(result); + } catch (error) { + uxLog("error", this, c.red(`Error retrieving ${metadataType}: ${error}`)); + } + } + + return results; + } + + public chunkArray(array: T[], chunkSize: number = 25): T[][] { + return Array.from({ length: Math.ceil(array.length / chunkSize) }, (_, i) => + array.slice(i * chunkSize, (i + 1) * chunkSize) + ); + } + + // Validation Rules + public async handleValidationRuleFile( + filePath: string, + sObject: string, + name: string + ) { + try { + const fileContent = await parseXmlFile(filePath); + if ( + !fileContent?.ValidationRule?.errorConditionFormula?.[0] || + typeof fileContent.ValidationRule.errorConditionFormula[0] !== "string" + ) { + return { + sObject, + name, + action: STATUS.FAILED, + comment: + "Invalid validation rule format or missing error condition formula", + }; + } + + const validationRuleContent = + fileContent.ValidationRule.errorConditionFormula[0]; + const bypassPermissionName = `$Permission.Bypass${sObject}VRs`; + + if ( + typeof validationRuleContent === "string" && + validationRuleContent.includes(bypassPermissionName) + ) { + return { + sObject, + name, + action: STATUS.IGNORED, + comment: "SFDX-Hardis Bypass already implemented", + }; + } + + if ( + typeof validationRuleContent === "string" && + /bypass/i.test(validationRuleContent) + ) { + return { + sObject, + name, + action: STATUS.SKIPPED, + comment: "Another bypass mechanism exists", + }; + } + + const creditsText = this.skipCredits + ? "" + : `/* Updated ${CREDITS_TEXT} */ + `; + fileContent.ValidationRule.errorConditionFormula[0] = `${creditsText} + AND( AND(NOT(${bypassPermissionName}), NOT($Permission.BypassAllVRs)), ${validationRuleContent})`; + await writeXmlFile(filePath, fileContent); + return { + sObject, + name, + action: STATUS.ADDED, + comment: "SFDX-Hardis Bypass implemented", + }; + } catch (error) { + return { + sObject, + name, + action: STATUS.FAILED, + comment: `Error processing file : ${error}`, + }; + } + } + + public async applyBypassToValidationRules( + connection: Connection, + sObjects: { [key: string]: string } + ): Promise { + const validationRuleRecords = await this.queryValidationRules( + connection, + sObjects + ); + + if (!validationRuleRecords || validationRuleRecords.records.length === 0) { + uxLog("log", this, c.grey("No validation rules found for the specified sObjects.")); + return; + } + + uxLog( + "log", + this, + c.grey(`Processing ${validationRuleRecords.records.length} Validation Rules.`) + ); + + const validationRulesTableReport: any = []; + const eligibleMetadataFilePaths: any = []; + + if (this.retrieveFromOrg) { + const retrievedValidationRulesChunks = await this.retrieveMetadataFiles( + validationRuleRecords.records, + "ValidationRule" + ); + for (const retrievedValidationRules of retrievedValidationRulesChunks) { + if ( + retrievedValidationRules?.status !== 1 && + retrievedValidationRules?.result?.files && + Array.isArray(retrievedValidationRules.result.files) && + retrievedValidationRules.result.files.length > 0 + ) { + for (const metadataFile of retrievedValidationRules.result.files) { + if ( + metadataFile?.type !== "ValidationRule" || + metadataFile?.problemType === "Error" + ) { + continue; + } + + const [sObject, name] = metadataFile.fullName.split(".") as [ + string, + string + ]; + const filePath = metadataFile.filePath; + eligibleMetadataFilePaths.push({ filePath, sObject, name }); + } + } else { + uxLog( + "log", + this, + c.grey("No Validation Rule files found in the retrieved metadata chunk.") + ); + } + } + } else { + if (validationRuleRecords?.records) { + for (const record of validationRuleRecords.records) { + const sObject = record.EntityDefinition.QualifiedApiName; + const name = record.ValidationName; + const filePath = await MetadataUtils.findMetaFileFromTypeAndName( + "ValidationRule", + name + ); + if (filePath === null) { + // TODO: add to report instead of log + uxLog( + "log", + this, + c.grey(`The validation rule ${name} for sObject ${sObject} does not have a corresponding metadata file locally. Skipping.`) + ); + } else { + eligibleMetadataFilePaths.push({ filePath, sObject, name }); + } + } + } + } + + for (const eligibleMetadataFilePath of eligibleMetadataFilePaths) { + validationRulesTableReport.push( + await this.handleValidationRuleFile( + eligibleMetadataFilePath.filePath, + eligibleMetadataFilePath.sObject, + eligibleMetadataFilePath.name + ) + ); + } + uxLog("action", this, c.cyan(`Validation Rules bypass report:`)); + uxLogTable(this, validationRulesTableReport); + } + + // Triggers + public async handleTriggerFile( + filePath: string, + name: string + ): Promise<{ [key: string]: string | null }> { + try { + if (!fs.existsSync(filePath)) { + return { + sObject: null, + name, + action: STATUS.FAILED, + comment: "File not found", + }; + } + + const fileContent = fs.readFileSync(filePath, "utf-8"); + + if (typeof fileContent !== "string") { + return { + sObject: null, + name, + action: STATUS.FAILED, + comment: "Invalid file content format", + }; + } + + const match = fileContent.match( + /trigger\s+\w+\s+on\s+(\w+)\s*\([^)]*\)\s*{\s*/i + ); + if (!match) { + return { + sObject: null, + name, + action: STATUS.FAILED, + comment: "Unable to detect sObject", + }; + } + + const sObject = match[1].replace(/__c$/, ""); + const bypassCheckLine = `if(FeatureManagement.checkPermission('Bypass${sObject}Triggers') || FeatureManagement.checkPermission('BypassAllTriggers')) { return; }`; + + if (fileContent.includes(bypassCheckLine)) { + return { + sObject, + name, + action: STATUS.IGNORED, + comment: "Bypass already implemented", + }; + } + + if (/bypass|PAD\.can/i.test(fileContent)) { + return { + sObject, + name, + action: STATUS.SKIPPED, + comment: "Another bypass exists", + }; + } + + const fullBypassLine = `${bypassCheckLine}${this.skipCredits ? "" : "// Updated " + CREDITS_TEXT + }`; + const openBraceIndex = fileContent.indexOf("{"); + const beforeBrace = fileContent.substring(0, openBraceIndex + 1); + const afterBrace = fileContent.substring(openBraceIndex + 1).trimStart(); + + fsExtra.ensureDirSync(path.dirname(filePath)); + fs.writeFileSync( + filePath, + `${beforeBrace}\n\t${fullBypassLine}\n\t${afterBrace}`, + "utf-8" + ); + return { + sObject, + name, + action: STATUS.ADDED, + comment: "Bypass implemented", + }; + } catch (error) { + return { + sObject: null, + name, + action: STATUS.FAILED, + comment: `Error processing file : ${error}`, + }; + } + } + + public async applyBypassToTriggers( + connection: Connection, + sObjects: { [key: string]: string } + ): Promise { + const triggerResults = await this.queryTriggers(connection); + + const filteredTriggersResults = this.filterTriggerResults( + triggerResults, + sObjects + ); + + if (!filteredTriggersResults || filteredTriggersResults?.length === 0) { + uxLog("log", this, c.grey("No triggers found for the specified sObjects.")); + return; + } + + const triggerReport: any = []; + + const eligibleMetadataFilePaths: any = []; + + if (this.retrieveFromOrg) { + const retrievedTriggersChunks = await this.retrieveMetadataFiles( + filteredTriggersResults, + "ApexTrigger" + ); + + for (const retrievedTriggers of retrievedTriggersChunks) { + if ( + retrievedTriggers?.status !== 1 && + retrievedTriggers?.result?.files && + Array.isArray(retrievedTriggers.result.files) && + retrievedTriggers.result.files.length > 0 + ) { + for (const metadataFile of retrievedTriggers.result.files) { + if ( + metadataFile?.type !== "ApexTrigger" || + !metadataFile?.filePath?.endsWith(".trigger") || + metadataFile?.problemType === "Error" + ) { + continue; + } + const name = metadataFile.fullName; + const filePath = metadataFile.filePath; + eligibleMetadataFilePaths.push({ filePath, name }); + } + } else { + uxLog( + "log", + this, + c.grey("No Trigger files found in the retrieved metadata chunk.") + ); + } + } + } else { + if (filteredTriggersResults) { + for (const record of filteredTriggersResults) { + const name = record.Name; + const filePath = await MetadataUtils.findMetaFileFromTypeAndName( + "ApexTrigger", + name + ); + if (filePath === null) { + // TODO: add to report instead of log + uxLog( + "log", + this, + c.grey(`The trigger ${name} does not have a corresponding metadata file locally. Skipping.`) + ); + } else { + eligibleMetadataFilePaths.push({ filePath, name }); + } + } + } + } + + for (const eligibleMetadataFilePath of eligibleMetadataFilePaths) { + triggerReport.push( + await this.handleTriggerFile( + eligibleMetadataFilePath.filePath, + eligibleMetadataFilePath.name + ) + ); + } + uxLog("action", this, c.cyan(`Trigger bypass report:`)); + uxLogTable(this, triggerReport); + } +} diff --git a/src/commands/hardis/project/generate/flow-git-diff.ts b/src/commands/hardis/project/generate/flow-git-diff.ts new file mode 100644 index 000000000..654bd0496 --- /dev/null +++ b/src/commands/hardis/project/generate/flow-git-diff.ts @@ -0,0 +1,157 @@ +/* jscpd:ignore-start */ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from "chalk"; +import * as path from "path"; +import { + ensureGitRepository, + git, + isCI, + uxLog, +} from '../../../../common/utils/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import moment from 'moment'; +import { generateFlowVisualGitDiff, generateHistoryDiffMarkdown } from '../../../../common/utils/mermaidUtils.js'; +import { MetadataUtils } from '../../../../common/metadata-utils/index.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class GenerateFlowGitDiff extends SfCommand { + public static title = 'Generate Flow Visual Gif Diff'; + + public static description = `Generate Flow Visual Git Diff markdown between 2 commits + +Note: This command might requires @mermaid-js/mermaid-cli to be installed. + +Run \`npm install @mermaid-js/mermaid-cli --global\` + `; + + public static examples = [ + '$ sf hardis:project:generate:flow-git-diff', + '$ sf hardis:project:generate:flow-git-diff --flow "force-app/main/default/flows/Opportunity_AfterUpdate_Cloudity.flow-meta.xml" --commit-before 8bd290e914c9dbdde859dad7e3c399776160d704 --commit-after e0835251bef6e400fb91e42f3a31022f37840f65' + ]; + + public static flags: any = { + flow: Flags.string({ + description: 'Path to flow file (will be prompted if not set)', + }), + "commit-before": Flags.string({ + description: 'Hash of the commit of the previous flow state, or "allStates" (will be prompted if not set)', + default: "" + }), + "commit-after": Flags.string({ + description: 'Hash of the commit of the new flow state (will be prompted if not set)', + default: "", + }), + debug: Flags.boolean({ + char: 'd', + default: false, + description: messages.getMessage('debugMode'), + }), + websocket: Flags.string({ + description: messages.getMessage('websocket'), + }), + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', + }), + }; + + // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; + + protected flowFile: string; + protected flowLabel: string; + commitBefore: string = ""; + commitAfter: string = ""; + protected debugMode = false; + + /* jscpd:ignore-end */ + + public async run(): Promise { + const { flags } = await this.parse(GenerateFlowGitDiff); + this.flowFile = flags.flow || ""; + this.commitBefore = flags["commit-before"] || ""; + this.commitAfter = flags["commit-after"] || ""; + this.debugMode = flags.debug || false; + // Check git repo + await ensureGitRepository(); + + // Prompt flow file if not send as input param + if (this.flowFile == "" && !isCI) { + this.flowFile = await MetadataUtils.promptFlow(); + } + this.flowLabel = path.basename(this.flowFile, ".flow-meta.xml"); + + // List states of flow file using git + const fileHistory = await git().log({ file: this.flowFile }); + if (fileHistory.all.length === 1) { + uxLog("success", this, c.green(`There is only one state for Flow ${this.flowFile}`)); + return {}; + } + + // Build prompt choices + let allChoices: any[] = []; + if ((this.commitBefore === "" || this.commitAfter === "") && !isCI) { + allChoices = fileHistory.all.map(log => { + return { + value: log.hash, + title: `${moment(log.date).format("ll")}: ${log.message}`, + description: `By ${log.author_name}(${log.author_email}) in ${log.refs}` + } + }); + } + + // Prompt commits if not sent as input parameter + if (this.commitBefore === "" && !isCI) { + const commitBeforeSelectRes = await prompts({ + type: 'select', + name: 'before', + message: 'Please select BEFORE UPDATE commit', + description: 'Choose the commit representing the state before your changes', + placeholder: 'Select a commit', + choices: [...allChoices, ...[ + { + title: "Calculate for all Flow states", + value: "allStates", + description: "Requires mkdocs-material to be read correctly. If you do not have it, we advise to select 2 commits for comparison." + } + ]] + }); + this.commitBefore = commitBeforeSelectRes.before; + } + let diffMdFile; + + if (this.commitBefore === "allStates") { + diffMdFile = await generateHistoryDiffMarkdown(this.flowFile, this.debugMode); + uxLog("warning", this, c.yellow(`It is recommended to use mkdocs-material to read it correctly (see https://sfdx-hardis.cloudity.com/hardis/doc/project2markdown/#doc-html-pages)`)); + } + else { + if (this.commitAfter === "" && !isCI) { + // Compute between 2 commits: prompt for the second one + const commitAfterSelectRes = await prompts({ + type: 'select', + name: 'after', + message: 'Please select AFTER UPDATE commit', + description: 'Choose the commit representing the state after your changes', + placeholder: 'Select a commit', + choices: allChoices + }) + this.commitAfter = commitAfterSelectRes.after; + } + // Generate diff + const { outputDiffMdFile } = await generateFlowVisualGitDiff(this.flowFile, this.commitBefore, this.commitAfter, { svgMd: true, pngMd: false, mermaidMd: this.debugMode, debug: this.debugMode }) + diffMdFile = outputDiffMdFile; + // Open file in a new VsCode tab if available + WebSocketClient.requestOpenFile(path.relative(process.cwd(), outputDiffMdFile)); + } + + // Return an object to be displayed with --json + return { + diffMdFile: diffMdFile + }; + } + +} diff --git a/src/commands/hardis/project/generate/gitdelta.ts b/src/commands/hardis/project/generate/gitdelta.ts index 1a58e28a6..533f98014 100644 --- a/src/commands/hardis/project/generate/gitdelta.ts +++ b/src/commands/hardis/project/generate/gitdelta.ts @@ -1,68 +1,94 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as path from "path"; -import { createTempDir, ensureGitRepository, git, gitCheckOutRemote, selectGitBranch, uxLog } from "../../../../common/utils"; -import { callSfdxGitDelta } from "../../../../common/utils/gitUtils"; -import { prompts } from "../../../../common/utils/prompts"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class GenerateGitDelta extends SfdxCommand { - public static title = "Generate Git Delta"; - - public static description = "Generate package.xml git delta between 2 commits"; - - public static examples = ["$ sfdx hardis:project:generate:gitdelta"]; - - protected static flagsConfig = { - branch: flags.string({ - description: "Git branch to use to generate delta", +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import * as path from 'path'; +import { + createTempDir, + ensureGitRepository, + git, + gitCheckOutRemote, + selectGitBranch, + uxLog, +} from '../../../../common/utils/index.js'; +import { callSfdxGitDelta } from '../../../../common/utils/gitUtils.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../../common/websocketClient.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class GenerateGitDelta extends SfCommand { + public static title = 'Generate Git Delta'; + + public static description = ` +## Command Behavior + +**Generates a \`package.xml\` and \`destructiveChanges.xml\` representing the metadata differences between two Git commits.** + +This command is a powerful tool for managing Salesforce metadata deployments by focusing only on the changes between specific points in your version control history. It leverages \`sfdx-git-delta\` to accurately identify added, modified, and deleted metadata components. + +Key functionalities: + +- **Commit-Based Comparison:** Allows you to specify a starting commit (\`--fromcommit\`) and an ending commit (\`--tocommit\`) to define the scope of the delta. If not provided, interactive prompts will guide you through selecting commits from your Git history. +- **Branch Selection:** You can specify a Git branch (\`--branch\`) to work with. If not provided, it will prompt you to select one. +- **\`package.xml\` Generation:** Creates a \`package.xml\` file that lists all metadata components that have been added or modified between the specified commits. +- **\`destructiveChanges.xml\` Generation:** Creates a \`destructiveChanges.xml\` file that lists all metadata components that have been deleted between the specified commits. +- **Temporary File Output:** The generated \`package.xml\` and \`destructiveChanges.xml\` files are placed in a temporary directory. + +
+Technical explanations + +The command's technical implementation involves: + +- **Git Integration:** Uses \`simple-git\` (\`git()\`) to interact with the Git repository, including fetching branches (\`git().fetch()\`), checking out branches (\`git().checkoutBranch()\`), and listing commit history (\`git().log()\`). +- **Interactive Prompts:** Leverages the \`prompts\` library to guide the user through selecting a Git branch and specific commits for delta generation if they are not provided as command-line arguments. +- **\`sfdx-git-delta\` Integration:** The core of the delta generation is handled by the \`callSfdxGitDelta\` utility function, which wraps the \`sfdx-git-delta\` tool. This tool performs the actual Git comparison and generates the \`package.xml\` and \`destructiveChanges.xml\` files. +- **Temporary Directory Management:** Uses \`createTempDir\` to create a temporary directory for storing the generated XML files, ensuring a clean working environment. +- **File System Operations:** Uses \`fs-extra\` to manage temporary files and directories. +- **User Feedback:** Provides clear messages to the user about the generated files and their locations. +
+`; + + public static examples = ['$ sf hardis:project:generate:gitdelta']; + + public static flags: any = { + branch: Flags.string({ + description: 'Git branch to use to generate delta', }), - fromcommit: flags.string({ - description: "Hash of commit to start from", + fromcommit: Flags.string({ + description: 'Hash of commit to start from', }), - tocommit: flags.string({ - description: "Hash of commit to stop at", + tocommit: Flags.string({ + description: 'Hash of commit to stop at', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - let gitBranch = this.flags.branch || null; - let fromCommit = this.flags.fromcommit || null; - let toCommit = this.flags.fromcommit || null; - this.debugMode = this.flags.debugMode || false; + const { flags } = await this.parse(GenerateGitDelta); + let gitBranch = flags.branch || null; + let fromCommit = flags.fromcommit || null; + let toCommit = flags.fromcommit || null; + this.debugMode = flags.debug || false; // Check git repo await ensureGitRepository(); @@ -74,8 +100,11 @@ export default class GenerateGitDelta extends SfdxCommand { } // List branch commits - const branchCommits = await git().log(["--first-parent"]); - const branchCommitsChoices = branchCommits.all.map((commit) => { + const branchCommits = await git().log(['--first-parent']); + let pos = 0; + const branchCommitsChoices = branchCommits.all.map((commit: any) => { + commit.pos = pos; + pos++; return { title: commit.message, description: `${commit.author_name} on ${new Date(commit.date).toLocaleString()}`, @@ -84,48 +113,74 @@ export default class GenerateGitDelta extends SfdxCommand { }); // Prompt fromCommit + let selectedFirstCommitLabel = ""; + let selectedFirstCommitPos = 0; if (fromCommit === null) { const headItem = { - title: "HEAD", + title: 'HEAD', description: `Current git HEAD`, - value: { hash: "HEAD" }, + value: { hash: 'HEAD' }, }; const commitFromResp = await prompts({ - type: "select", - name: "value", - message: "Please select the commit that you want to start from", + type: 'select', + name: 'value', + message: 'Please select the commit that you want to start from', + description: 'Choose the starting commit for the delta generation', + placeholder: 'Select a commit', choices: [headItem, ...branchCommitsChoices], }); fromCommit = commitFromResp.value.hash; + selectedFirstCommitLabel = commitFromResp.value.message; + selectedFirstCommitPos = commitFromResp.value.pos; } // Prompt toCommit if (toCommit === null) { const currentItem = { - title: "current", + title: 'current', description: `Local files not committed yet`, - value: { hash: "*" }, + value: { hash: '*' }, + }; + const singleCommitChoice = { + title: 'Single commit', + description: `Only for ${selectedFirstCommitLabel}`, + value: branchCommitsChoices[selectedFirstCommitPos + 1].value }; const commitToResp = await prompts({ - type: "select", - name: "value", - message: "Please select the commit hash that you want to go to", - choices: [currentItem, ...branchCommitsChoices], + type: 'select', + name: 'value', + message: 'Please select the commit hash that you want to go to', + description: 'Choose the ending commit for the delta generation', + placeholder: 'Select a commit', + choices: [currentItem, singleCommitChoice, ...branchCommitsChoices], }); toCommit = commitToResp.value.hash; } - // Generate package.xml & destructiveChanges.xml using sfdx git delta + // Generate package.xml & destructiveChanges.xml using sfdx-git-delta + uxLog("action", this, c.cyan(`Generating delta from commit ${c.bold(fromCommit)} to commit ${c.bold(toCommit)} on branch ${c.bold(gitBranch)}`)); const tmpDir = await createTempDir(); - await callSfdxGitDelta(fromCommit, toCommit, tmpDir, { debug: this.debugMode }); + await callSfdxGitDelta(fromCommit || '', toCommit || '', tmpDir, { debug: this.debugMode }); - const diffPackageXml = path.join(tmpDir, "package", "package.xml"); - const diffDestructiveChangesXml = path.join(tmpDir, "destructiveChanges", "destructiveChanges.xml"); + const diffPackageXml = path.join(tmpDir, 'package', 'package.xml'); + const diffDestructiveChangesXml = path.join(tmpDir, 'destructiveChanges', 'destructiveChanges.xml'); - uxLog(this, c.cyan(`Generated diff package.xml at ${c.green(diffPackageXml)}`)); - uxLog(this, c.cyan(`Generated diff destructiveChanges.xml at ${c.green(diffDestructiveChangesXml)}`)); + uxLog("log", this, c.grey(`Generated diff package.xml at ${c.green(diffPackageXml)}`)); + uxLog("log", this, c.grey(`Generated diff destructiveChanges.xml at ${c.green(diffDestructiveChangesXml)}`)); + + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(diffPackageXml, 'Git Delta package.xml', "report"); + WebSocketClient.sendReportFileMessage(diffDestructiveChangesXml, 'Git Delta destructiveChanges.xml', "report"); + } else { + WebSocketClient.requestOpenFile(diffPackageXml); + WebSocketClient.requestOpenFile(diffDestructiveChangesXml); + } // Return an object to be displayed with --json - return { outputString: "Generated package.xml", diffPackageXml: diffPackageXml, diffDestructiveChangesXml: diffDestructiveChangesXml }; + return { + outputString: 'Generated package.xml', + diffPackageXml: diffPackageXml, + diffDestructiveChangesXml: diffDestructiveChangesXml, + }; } } diff --git a/src/commands/hardis/project/lint.ts b/src/commands/hardis/project/lint.ts index 49a786b67..b8e59be6a 100644 --- a/src/commands/hardis/project/lint.ts +++ b/src/commands/hardis/project/lint.ts @@ -1,54 +1,67 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { isCI, uxLog } from "../../../common/utils"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { MegaLinterRunner } from "mega-linter-runner/lib"; +import { SfCommand, Flags, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { isCI, uxLog } from '../../../common/utils/index.js'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { MegaLinterRunner } from 'mega-linter-runner/lib/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class Lint extends SfCommand { + public static title = 'Lint'; -export default class ProjectCreate extends SfdxCommand { - public static title = "Lint"; + public static description = `## Command Behavior - public static description = "Apply syntactic analysis (linters) on the repository sources, using Mega-Linter"; +**Applies syntactic analysis (linting) to your repository sources using Mega-Linter, ensuring code quality and adherence to coding standards.** - public static examples = ["$ sfdx hardis:project:lint", "$ sfdx hardis:project:lint --fix"]; +This command integrates Mega-Linter, a comprehensive linter orchestrator, into your Salesforce DX project. It helps identify and fix code style violations, potential bugs, and other issues across various file types relevant to Salesforce development. - protected static flagsConfig = { - fix: flags.boolean({ - char: "f", +Key functionalities: + +- **Automated Linting:** Runs a suite of linters configured for Salesforce projects. +- **Fixing Issues (\`--fix\` flag):** Automatically attempts to fix detected linting issues, saving manual effort. +- **Configuration Management:** If \`.mega-linter.yml\` is not found, it guides you through the initial setup of Mega-Linter, prompting for the Salesforce flavor. +- **CI/CD Integration:** Designed to be used in CI/CD pipelines to enforce code quality gates. + +
+Technical explanations + +The command's technical implementation involves: + +- **Mega-Linter Integration:** It leverages the \`mega-linter-runner\` library to execute Mega-Linter. +- **Configuration Check:** Before running, it checks for the presence of \`.mega-linter.yml\`. If not found and not in a CI environment, it initiates an interactive setup process using \`MegaLinterRunner().run({ install: true })\`. +- **Linter Execution:** It calls \`MegaLinterRunner().run(megaLinterOptions)\` with the \`salesforce\` flavor and the \`fix\` flag (if provided). +- **Exit Code Handling:** The \`process.exitCode\` is set based on the Mega-Linter's exit status, allowing CI/CD pipelines to react to linting failures. +- **User Feedback:** Provides clear messages about the success or failure of the linting process. +
+`; + + public static examples = ['$ sf hardis:project:lint', '$ sf hardis:project:lint --fix']; + + public static flags: any = { + fix: Flags.boolean({ + char: 'f', default: false, - description: "Apply linters fixes", + description: 'Apply linters fixes', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': optionalOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - protected static supportsUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected fix = false; protected debugMode = false; @@ -56,39 +69,44 @@ export default class ProjectCreate extends SfdxCommand { /* jscpd:ignore-end */ public async run(): Promise { - this.fix = this.flags.fix || false; - this.debugMode = this.flags.debugMode || false; + const { flags } = await this.parse(Lint); + this.fix = flags.fix || false; + this.debugMode = flags.debug || false; // Check if Mega-Linter is configured - if (!fs.existsSync(".mega-linter.yml")) { + if (!fs.existsSync('.mega-linter.yml')) { if (isCI) { - throw new SfdxError( + throw new SfError( c.red( - "[sfdx-hardis] You must run sfdx hardis:project:lint locally to install Mega-Linter configuration before being able to run it from CI", - ), + '[sfdx-hardis] You must run sf hardis:project:lint locally to install Mega-Linter configuration before being able to run it from CI' + ) ); } else { // Configure Mega-Linter (yeoman generator) - uxLog(this, c.cyan("Mega-Linter needs to be configured. Please select Salesforce flavor in the following wizard")); + uxLog( + "action", + this, + c.cyan('Mega-Linter needs to be configured. Please select Salesforce flavor in the following wizard') + ); const megaLinter = new MegaLinterRunner(); const installRes = megaLinter.run({ install: true }); - console.assert(installRes.status === 0, "Mega-Linter configuration incomplete"); + console.assert(installRes.status === 0, 'Mega-Linter configuration incomplete'); } } // Run MegaLinter const megaLinter = new MegaLinterRunner(); - const megaLinterOptions = { flavor: "salesforce", fix: this.fix }; + const megaLinterOptions = { flavor: 'salesforce', fix: this.fix }; const res = await megaLinter.run(megaLinterOptions); process.exitCode = res.status; if (res.status === 0) { - uxLog(this, c.green(`Mega-Linter has been successful`)); + uxLog("success", this, c.green(`Mega-Linter has been successful`)); } else { - uxLog(this, c.red(`Mega-Linter found error(s)`)); + uxLog("error", this, c.red(`Mega-Linter found error(s)`)); } // Return an object to be displayed with --json - return { outputString: "Linted project sources", linterStatusCode: res.status }; + return { outputString: 'Linted project sources', linterStatusCode: res.status }; } } diff --git a/src/commands/hardis/project/metadata/findduplicates.ts b/src/commands/hardis/project/metadata/findduplicates.ts index 7a676e2ae..9131efec5 100644 --- a/src/commands/hardis/project/metadata/findduplicates.ts +++ b/src/commands/hardis/project/metadata/findduplicates.ts @@ -1,36 +1,36 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Logger, LoggerLevel, Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { uxLog } from "../../../../common/utils"; -import { parseXmlFile } from "../../../../common/utils/xmlUtils"; -import { getConfig } from "../../../../config"; -import { glob } from "glob"; -import { basename } from "path"; -import * as c from "chalk"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Logger, LoggerLevel, Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { uxLog } from '../../../../common/utils/index.js'; +import { parseXmlFile } from '../../../../common/utils/xmlUtils.js'; +import { getConfig } from '../../../../config/index.js'; +import { glob } from 'glob'; +import c from 'chalk'; +import { GLOB_IGNORE_PATTERNS } from '../../../../common/utils/projectUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, // or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -function getCommonPermissionPatterns(rootTagName: "Profile" | "PermissionSet") { - return [`${rootTagName}.fieldPermissions.field`, `${rootTagName}.objectPermissions.object`, `${rootTagName}.classAccesses.apexClass`]; +function getCommonPermissionPatterns(rootTagName: 'Profile' | 'PermissionSet') { + return [ + `${rootTagName}.fieldPermissions.field`, + `${rootTagName}.objectPermissions.object`, + `${rootTagName}.classAccesses.apexClass`, + ]; } -export default class Find extends SfdxCommand { +export default class Find extends SfCommand { protected static metadataDuplicateFindKeys = { - layout: ["Layout.layoutSections.layoutColumns.layoutItems.field", "Layout.quickActionListItems.quickActionName"], - profile: getCommonPermissionPatterns("Profile"), - labels: ["CustomLabels.labels.fullName"], - permissionset: getCommonPermissionPatterns("PermissionSet"), + layout: ['Layout.layoutSections.layoutColumns.layoutItems.field', 'Layout.quickActionListItems.quickActionName'], + profile: getCommonPermissionPatterns('Profile'), + labels: ['CustomLabels.labels.fullName'], + permissionset: getCommonPermissionPatterns('PermissionSet'), }; - public static title = "XML duplicate values finder"; + public static title = 'XML duplicate values finder'; public static description = `find duplicate values in XML file(s). Find duplicate values in XML file(s). Keys to be checked can be configured in \`config/sfdx-hardis.yml\` using property metadataDuplicateFindKeys. @@ -59,13 +59,13 @@ ${Find.metadataDuplicateFindKeys} `, ` -$ sfdx hardis:project:metadata:findduplicates --file layout.layout-meta.xml +$ sf hardis:project:metadata:findduplicates --file layout.layout-meta.xml [sfdx-hardis] Duplicate values in layout.layout-meta.xml - Key : Layout.layoutSections.layoutColumns.layoutItems.field - Values : Name `, ` -$ sfdx hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xml" +$ sf hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xml" [sfdx-hardis] hardis:project:metadata:findduplicates execution time 0:00:00.397 [sfdx-hardis] Duplicate values in layout1.layout-meta.xml - Key : Layout.layoutSections.layoutColumns.layoutItems.field @@ -80,63 +80,84 @@ $ sfdx hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xm protected configInfo: any; protected logLevel: LoggerLevel; - protected static flagsConfig = { - files: flags.array({ - char: "f", - description: "XML metadata files path", + public static flags: any = { + files: Flags.string({ + char: 'f', + description: 'XML metadata files path', + multiple: true, }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; public async run(): Promise { - uxLog(this, c.cyan(`Start finding duplicate values in XML metadata files.`)); + const { flags } = await this.parse(Find); + uxLog("action", this, c.cyan(`Start finding duplicate values in XML metadata files.`)); await this.initConfig(); - const filesWithDuplicates = await this.findDuplicates(); - uxLog(this, c.cyan(`Done finding duplicate values in XML metadata files.`)); + const filesWithDuplicates = await this.findDuplicates(flags); + uxLog("action", this, c.cyan("Summary")); if (filesWithDuplicates.length > 0) { + const duplicatesString = filesWithDuplicates + .map((file) => { + return `${file.file}\n - Key : ${file.key}\n - Values : ${file.duplicates.join(', ')}`; + }) + .join('\n'); + uxLog( + "error", + this, + c.red( + `Found ${filesWithDuplicates.length} files with duplicate values\n${duplicatesString}` + ) + ); process.exitCode = 1; } + else { + uxLog("success", this, c.green('No duplicate values found.')); + } return filesWithDuplicates; } async initConfig() { - this.configInfo = await getConfig("user"); + this.configInfo = await getConfig('user'); if (this.configInfo.metadataDuplicateFindKeys) { Find.metadataDuplicateFindKeys = this.configInfo.metadataDuplicateFindKeys; } - // Gets the root sfdx logger level + // Gets the root SF CLI logger level this.logLevel = (await Logger.root()).getLevel(); } - async findDuplicates() { + async findDuplicates(flags) { // Collect input parameters - const inputFiles = []; + const inputFiles: any[] = []; - if (this.flags.files) { - const files = await glob("./" + this.flags.files, { cwd: process.cwd() }); + if (flags.files) { + const files = await glob('./' + flags.files, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); inputFiles.push(...files); } - const foundFilesWithDuplicates = []; + const foundFilesWithDuplicates: any[] = []; for (const inputFile of inputFiles) { // Extract given metadata type based on filename using type-meta.xml // For example PersonAccount.layout-meta.xml returns layout and Admin.profile-meta.xml returns profile const filenameRegex = /\w*\.(\w*)-meta.xml/; + if (!inputFile.match(filenameRegex)) { + uxLog("warning", this, c.yellow(`Filename ${inputFile} does not match expected pattern, skipping.`)); + continue; + } const type = inputFile.match(filenameRegex)[1]; // Check if given metadata type has unicity rules const uniqueKeys = Find.metadataDuplicateFindKeys[type]; if (!uniqueKeys) { if (this.logLevel === LoggerLevel.DEBUG) { - uxLog(this, c.gray(`No unicity rule found for metadata type ${type} (processing ${inputFile})`)); + uxLog("error", this, c.grey(`No unicity rule found for metadata type ${type} (processing ${inputFile})`)); } continue; } @@ -145,7 +166,11 @@ $ sfdx hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xm const file = await parseXmlFile(inputFile); uniqueKeys.forEach((key) => { // Traverse the file down to the key based on the fragments separated by . (dots), abort if not found - const allProps = key.split("."); + const allProps = key.split('.'); + if (!file || !allProps[0] || !file[allProps[0]]) { + uxLog("warning", this, c.yellow(`Key ${key} not found in file ${inputFile}`)); + return; + } const valuesFound = this.traverseDown(file, allProps[0], allProps, []); // https://stackoverflow.com/a/840808 @@ -156,13 +181,6 @@ $ sfdx hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xm key, duplicates, }); - uxLog( - this, - c.red(`Duplicate values in ${basename(inputFile)} - - Key : ${key} - - Values : ${duplicates.join(", ")} -`), - ); } }); } @@ -173,12 +191,17 @@ $ sfdx hardis:project.metadata:findduplicates -f "force-app/main/default/**/*.xm * Traverse down a XML tree, allProps containing all the properties to be traversed, currentProp being updated as we * descend. */ - traverseDown(parent: Record | Array, currentProp: string, allProps: Array, results: Array) { + traverseDown( + parent: Record | Array, + currentProp: string, + allProps: Array, + results: Array + ) { const nextProp = allProps[allProps.indexOf(currentProp) + 1]; // If we're at the end of property path (A.B.C -> parent = A.B, currentProp = C, nextProp = undefined) we add the // value contained in A.B.C - if (nextProp === undefined) { + if (nextProp === undefined && parent[currentProp]) { results.push(parent[currentProp][0]); } // If A.B is an array, we'll traverse A.B.C1, A.B.C2, etc... diff --git a/src/commands/hardis/scratch/create.ts b/src/commands/hardis/scratch/create.ts index 685aa33ac..cf41f71bc 100644 --- a/src/commands/hardis/scratch/create.ts +++ b/src/commands/hardis/scratch/create.ts @@ -1,15 +1,23 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { AuthInfo, Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { assert } from "console"; -import * as fs from "fs-extra"; -import * as moment from "moment"; -import * as os from "os"; -import * as path from "path"; -import { clearCache } from "../../../common/cache"; -import { elapseEnd, elapseStart, execCommand, execSfdxJson, getCurrentGitBranch, isCI, uxLog } from "../../../common/utils"; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { AuthInfo, Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { assert } from 'console'; +import fs from 'fs-extra'; +import moment from 'moment'; +import * as os from 'os'; +import * as path from 'path'; +import { clearCache } from '../../../common/cache/index.js'; +import { + elapseEnd, + elapseStart, + execCommand, + execSfdxJson, + getCurrentGitBranch, + isCI, + uxLog, +} from '../../../common/utils/index.js'; import { initApexScripts, initOrgData, @@ -17,74 +25,100 @@ import { initPermissionSetAssignments, installPackages, promptUserEmail, -} from "../../../common/utils/orgUtils"; -import { addScratchOrgToPool, fetchScratchOrg } from "../../../common/utils/poolUtils"; -import { prompts } from "../../../common/utils/prompts"; -import { WebSocketClient } from "../../../common/websocketClient"; -import { getConfig, setConfig } from "../../../config"; +} from '../../../common/utils/orgUtils.js'; +import { addScratchOrgToPool, fetchScratchOrg } from '../../../common/utils/poolUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { getConfig, setConfig } from '../../../config/index.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class ScratchCreate extends SfCommand { + public static title = 'Create and initialize scratch org'; + + public static description = ` +## Command Behavior + +**Creates and fully initializes a Salesforce scratch org with complete development environment setup.** + +This command is a comprehensive scratch org provisioning tool that automates the entire process of creating, configuring, and initializing a Salesforce scratch org for development work. It handles everything from basic org creation to advanced configuration including package installation, metadata deployment, and data initialization. -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Key functionalities: -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +- **Intelligent Org Management:** Automatically generates unique scratch org aliases based on username, git branch, and timestamp, with options to reuse existing orgs or force creation of new ones. +- **Scratch Org Pool Integration:** Supports fetching pre-configured scratch orgs from pools for faster development cycles and CI/CD optimization. +- **Custom Scratch Definition:** Dynamically builds project-scratch-def.json files with user-specific configurations including email, username patterns, and org shape settings (set variable **SCRATCH_ORG_SHAPE** to use org shapes). +- **Package Installation:** Automatically installs all configured packages defined in \`installedPackages\` configuration property. +- **Metadata Deployment:** Pushes source code and deploys metadata using optimized deployment strategies for scratch org environments. +- **Permission Set Assignment:** Assigns specified permission sets defined in \`initPermissionSets\` configuration to the scratch org user. +- **Apex Script Execution:** Runs custom Apex initialization scripts defined in \`scratchOrgInitApexScripts\` for org-specific setup. +- **Data Loading:** Loads initial data using SFDMU data packages from \`dataPackages\` configuration for realistic development environments. +- **User Configuration:** Automatically configures the scratch org admin user with proper names, email, country settings, and marketing user permissions. +- **Password Generation:** Creates and stores secure passwords for easy scratch org access during development. +- **CI/CD Integration:** Provides specialized handling for continuous integration environments including automated cleanup and pool management. +- **Error Handling:** Comprehensive error recovery including scratch org cleanup on failure and detailed troubleshooting messages. -export default class ScratchCreate extends SfdxCommand { - public static title = "Create and initialize scratch org"; +The command configuration can be customized using: - public static description = `Create and initialize a scratch org or a source-tracked sandbox (config can be defined using \`config/.sfdx-hardis.yml\`): +- \`config/.sfdx-hardis.yml\` file with properties like \`installedPackages\`, \`initPermissionSets\`, \`scratchOrgInitApexScripts\`, and \`dataPackages\`. +- Environment variable **SCRATCH_ORG_SHAPE** with shape org id, if you want to use org shapes -- **Install packages** - - Use property \`installedPackages\` -- **Push sources** -- **Assign permission sets** - - Use property \`initPermissionSets\` -- **Run apex initialization scripts** - - Use property \`scratchOrgInitApexScripts\` -- **Load data** - - Use property \`dataPackages\` - `; +
+Technical explanations - public static examples = ["$ sfdx hardis:scratch:create"]; +The command's technical implementation involves: + +- **Configuration Management:** Loads hierarchical configuration from \`.sfdx-hardis.yml\`, branch-specific, and user-specific configuration files using \`getConfig('user')\`. +- **Alias Generation Logic:** Creates intelligent scratch org aliases using username, git branch, timestamp patterns with CI and pool prefixes for different environments. +- **Scratch Org Definition Building:** Dynamically constructs \`project-scratch-def.json\` with user email, custom usernames, org shapes, and feature flags like StateAndCountryPicklist and MarketingUser. +- **Pool Integration:** Implements scratch org pool fetching using \`fetchScratchOrg\` for rapid org provisioning in development and CI environments. +- **Salesforce CLI Integration:** Executes \`sf org create scratch\` commands with proper parameter handling including wait times, duration, and dev hub targeting. +- **Package Installation Pipeline:** Uses \`installPackages\` utility to install managed and unmanaged packages with dependency resolution and error handling. +- **Metadata Deployment:** Leverages \`initOrgMetadatas\` for optimized source pushing and metadata deployment specific to scratch org environments. +- **Permission Set Assignment:** Implements \`initPermissionSetAssignments\` for automated permission set assignment to scratch org users. +- **Apex Script Execution:** Runs custom Apex initialization scripts using \`initApexScripts\` for org-specific configuration and setup. +- **Data Loading Integration:** Uses SFDMU integration through \`initOrgData\` for comprehensive data loading from configured data packages. +- **User Management:** Performs SOQL queries and DML operations to configure scratch org users with proper names, emails, country codes, and permission flags. +- **Authentication Management:** Handles SFDX auth URL generation and storage for CI/CD environments and scratch org pool management. +- **Error Recovery:** Implements comprehensive error handling with scratch org cleanup, pool management, and detailed error messaging for troubleshooting. +- **WebSocket Integration:** Provides real-time status updates and file reporting through WebSocket connections for VS Code extension integration. +
+`; + + public static examples = ['$ sf hardis:scratch:create']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - forcenew: flags.boolean({ - char: "n", + public static flags: any = { + forcenew: Flags.boolean({ + char: 'n', default: false, - description: messages.getMessage("forceNewScratch"), + description: messages.getMessage('forceNewScratch'), }), - pool: flags.boolean({ - char: "d", + pool: Flags.boolean({ default: false, - description: "Creates the scratch org for a scratch org pool", + description: 'Creates the scratch org for a scratch org pool', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdmu", "texei-sfdx-plugin"]; + protected static requiresSfdxPlugins = ['sfdmu']; protected forceNew = false; @@ -103,43 +137,57 @@ export default class ScratchCreate extends SfdxCommand { protected scratchOrgInfo: any; protected scratchOrgUsername: string; protected scratchOrgPassword: string; - protected scratchOrgSfdxAuthUrl: string; + protected scratchOrgSfdxAuthUrl: string | null; protected authFileJson: any; protected projectName: string; protected scratchOrgFromPool: any; public async run(): Promise { - this.pool = this.flags.pool || false; - this.debugMode = this.flags.debug || false; - this.forceNew = this.flags.forcenew || false; + const { flags } = await this.parse(ScratchCreate); + this.pool = flags.pool || false; + this.debugMode = flags.debug || false; + this.forceNew = flags.forcenew || false; elapseStart(`Create and initialize scratch org`); await this.initConfig(); - await this.createScratchOrg(); + await this.createScratchOrg(flags); try { await this.updateScratchOrgUser(); await installPackages(this.configInfo.installedPackages || [], this.scratchOrgAlias); if (this.pool === false) { - await initOrgMetadatas(this.configInfo, this.scratchOrgUsername, this.scratchOrgAlias, this.projectScratchDef, this.debugMode, { - scratch: true, - }); + await initOrgMetadatas( + this.configInfo, + this.scratchOrgUsername, + this.scratchOrgAlias, + this.projectScratchDef, + this.debugMode, + { + scratch: true, + } + ); await initPermissionSetAssignments(this.configInfo.initPermissionSets || [], this.scratchOrgUsername); await initApexScripts(this.configInfo.scratchOrgInitApexScripts || [], this.scratchOrgUsername); - await initOrgData(path.join(".", "scripts", "data", "ScratchInit"), this.scratchOrgUsername); + await initOrgData(path.join('.', 'scripts', 'data', 'ScratchInit'), this.scratchOrgUsername); } } catch (e) { elapseEnd(`Create and initialize scratch org`); - uxLog(this, c.grey("Error: " + e.message + "\n" + e.stack)); + uxLog("log", this, c.grey('Error: ' + (e as Error).message + '\n' + (e as Error).stack)); if (isCI && this.scratchOrgFromPool) { this.scratchOrgFromPool.failures = this.scratchOrgFromPool.failures || []; this.scratchOrgFromPool.failures.push(JSON.stringify(e, null, 2)); - uxLog(this, "[pool] " + c.yellow("Put back scratch org in the scratch orgs pool. ") + c.grey({ result: this.scratchOrgFromPool })); - await addScratchOrgToPool(this.scratchOrgFromPool, { position: "first" }); + uxLog( + "log", + this, + '[pool] ' + + c.yellow('Put back scratch org in the scratch orgs pool. ') + + c.grey({ result: this.scratchOrgFromPool }) + ); + await addScratchOrgToPool(this.scratchOrgFromPool, { position: 'first' }); } else if (isCI && this.scratchOrgUsername) { - await execCommand(`sfdx force:org:delete --noprompt --targetusername ${this.scratchOrgUsername}`, this, { + await execCommand(`sf org delete scratch --no-prompt --target-org ${this.scratchOrgUsername}`, this, { fail: false, output: true, }); - uxLog(this, c.red("Deleted scratch org as we are in CI and its creation has failed")); + uxLog("error", this, c.red('Deleted scratch org as we are in CI and its creation has failed')); } throw e; } @@ -147,8 +195,13 @@ export default class ScratchCreate extends SfdxCommand { // Show password to user if (this.scratchOrgPassword) { uxLog( + "action", this, - c.cyan(`You can connect to your scratch using username ${c.green(this.scratchOrgUsername)} and password ${c.green(this.scratchOrgPassword)}`), + c.cyan( + `You can connect to your scratch using username ${c.green(this.scratchOrgUsername)} and password ${c.green( + this.scratchOrgPassword + )}` + ) ); } elapseEnd(`Create and initialize scratch org`); @@ -161,34 +214,44 @@ export default class ScratchCreate extends SfdxCommand { scratchOrgPassword: this.scratchOrgPassword, scratchOrgSfdxAuthUrl: this.scratchOrgSfdxAuthUrl, authFileJson: this.authFileJson, - outputString: "Created and initialized scratch org", + outputString: 'Created and initialized scratch org', }; } // Initialize configuration from .sfdx-hardis.yml + .gitbranch.sfdx-hardis.yml + .username.sfdx-hardis.yml public async initConfig() { - this.configInfo = await getConfig("user"); - this.gitBranch = await getCurrentGitBranch({ formatted: true }); - const newScratchName = os.userInfo().username + "-" + this.gitBranch.split("/").pop().slice(0, 15) + "_" + moment().format("YYYYMMDD_hhmm"); + this.configInfo = await getConfig('user'); + this.gitBranch = (await getCurrentGitBranch({ formatted: true })) || ''; + const newScratchName = + os.userInfo().username + + '-' + + (this.gitBranch.split('/').pop() || '').slice(0, 15) + + '_' + + moment().format('YYYYMMDD_hhmm'); this.scratchOrgAlias = - process.env.SCRATCH_ORG_ALIAS || (!this.forceNew && this.pool === false ? this.configInfo.scratchOrgAlias : null) || newScratchName; - if (isCI && !this.scratchOrgAlias.startsWith("CI-")) { - this.scratchOrgAlias = "CI-" + this.scratchOrgAlias; + process.env.SCRATCH_ORG_ALIAS || + (!this.forceNew && this.pool == false ? this.configInfo.scratchOrgAlias : null) || + newScratchName; + if (isCI && !this.scratchOrgAlias.startsWith('CI-')) { + this.scratchOrgAlias = 'CI-' + this.scratchOrgAlias; } if (this.pool === true) { - this.scratchOrgAlias = "PO-" + Math.random().toString(36).substr(2, 2) + this.scratchOrgAlias; + this.scratchOrgAlias = 'PO-' + Math.random().toString(36).substr(2, 2) + this.scratchOrgAlias; } // Verify that the user wants to resume scratch org creation if (!isCI && this.scratchOrgAlias !== newScratchName && this.pool === false) { const checkRes = await prompts({ - type: "confirm", - name: "value", + type: 'confirm', + name: 'value', message: c.cyanBright( - `You are about to reuse scratch org ${c.green(this.scratchOrgAlias)}. Are you sure that's what you want to do ?\n${c.grey( - "(if not, run again hardis:work:new or use hardis:scratch:create --forcenew)", - )}`, + `You are about to reuse scratch org ${c.green( + this.scratchOrgAlias + )}. Are you sure that's what you want to do ?\n${c.grey( + '(if not, run again hardis:work:new or use hardis:scratch:create --forcenew)' + )}` ), default: false, + description: 'Confirm that you want to reuse this existing scratch org instead of creating a new one', }); if (checkRes.value === false) { process.exit(0); @@ -210,115 +273,143 @@ export default class ScratchCreate extends SfdxCommand { // If not found, prompt user email and store it in user config file if (this.userEmail == null) { if (this.pool === true) { - throw new SfdxError(c.red("You need to define userEmail property in .sfdx-hardis.yml")); + throw new SfError(c.red('You need to define userEmail property in .sfdx-hardis.yml')); } this.userEmail = await promptUserEmail(); } } // Create a new scratch org or reuse existing one - public async createScratchOrg() { + public async createScratchOrg(flags) { // Build project-scratch-def-branch-user.json - uxLog(this, c.cyan("Building custom project-scratch-def.json...")); - this.projectScratchDef = JSON.parse(fs.readFileSync("./config/project-scratch-def.json", "utf-8")); + uxLog("action", this, c.cyan('Building custom project-scratch-def.json...')); + this.projectScratchDef = JSON.parse(fs.readFileSync('./config/project-scratch-def.json', 'utf-8')); this.projectScratchDef.orgName = this.scratchOrgAlias; this.projectScratchDef.adminEmail = this.userEmail; - this.projectScratchDef.username = `${this.userEmail.split("@")[0]}@hardis-scratch-${this.scratchOrgAlias}.com`; + // Keep only first 15 and last 15 chars if scratch org alias is too long + const aliasForUsername = this.scratchOrgAlias.length > 30 ? this.scratchOrgAlias.slice(0, 15) + this.scratchOrgAlias.slice(-15) : this.scratchOrgAlias; + this.projectScratchDef.username = `${this.userEmail.split('@')[0].slice(0, 20)}@hardis-scratch-${aliasForUsername}.com`; + if (process.env.SCRATCH_ORG_SHAPE || this.configInfo.scratchOrgShape) { + this.projectScratchDef.sourceOrg = process.env.SCRATCH_ORG_SHAPE || this.configInfo.scratchOrgShape; + } + uxLog("log", this, c.grey("Project scratch def: \n" + JSON.stringify(this.projectScratchDef, null, 2))); const projectScratchDefLocal = `./config/user/project-scratch-def-${this.scratchOrgAlias}.json`; await fs.ensureDir(path.dirname(projectScratchDefLocal)); await fs.writeFile(projectScratchDefLocal, JSON.stringify(this.projectScratchDef, null, 2)); + WebSocketClient.sendReportFileMessage(projectScratchDefLocal, "Scratch Org definition", "report"); + // Check current scratch org - const orgListResult = await execSfdxJson("sfdx force:org:list", this); - const hubOrgUsername = this.hubOrg.getUsername(); + const orgListResult = await execSfdxJson('sf org list', this); + const hubOrgUsername = flags['target-dev-hub'].getUsername(); const matchingScratchOrgs = orgListResult?.result?.scratchOrgs?.filter((org: any) => { - return org.alias === this.scratchOrgAlias && org.status === "Active" && org.devHubUsername === hubOrgUsername; + return org.alias === this.scratchOrgAlias && org.status === 'Active' && org.devHubUsername === hubOrgUsername; }) || []; // Reuse existing scratch org - if (matchingScratchOrgs?.length > 0 && !this.forceNew && this.pool === false) { + if (matchingScratchOrgs?.length > 0 && !this.forceNew && this.pool == false) { this.scratchOrgInfo = matchingScratchOrgs[0]; this.scratchOrgUsername = this.scratchOrgInfo.username; - uxLog(this, c.cyan(`Reusing org ${c.green(this.scratchOrgAlias)} with user ${c.green(this.scratchOrgUsername)}`)); + uxLog("action", this, c.cyan(`Reusing org ${c.green(this.scratchOrgAlias)} with user ${c.green(this.scratchOrgUsername)}`)); return; } // Try to fetch a scratch org from the pool if (this.pool === false && this.configInfo.poolConfig) { - this.scratchOrgFromPool = await fetchScratchOrg({ devHubConn: this.hubOrg.getConnection(), devHubUsername: this.hubOrg.getUsername() }); + this.scratchOrgFromPool = await fetchScratchOrg({ + devHubConn: flags['target-dev-hub'].getConnection(), + devHubUsername: flags['target-dev-hub'].getUsername(), + }); if (this.scratchOrgFromPool) { this.scratchOrgAlias = this.scratchOrgFromPool.scratchOrgAlias; this.scratchOrgInfo = this.scratchOrgFromPool.scratchOrgInfo; this.scratchOrgUsername = this.scratchOrgFromPool.scratchOrgUsername; this.scratchOrgPassword = this.scratchOrgFromPool.scratchOrgPassword; - await setConfig("user", { scratchOrgAlias: this.scratchOrgAlias }); - uxLog(this, "[pool] " + c.cyan(`Fetched org ${c.green(this.scratchOrgAlias)} from pool with user ${c.green(this.scratchOrgUsername)}`)); + await setConfig('user', { scratchOrgAlias: this.scratchOrgAlias }); + uxLog( + "log", + this, + '[pool] ' + + c.cyan( + `Fetched org ${c.green(this.scratchOrgAlias)} from pool with user ${c.green(this.scratchOrgUsername)}` + ) + ); if (!isCI) { - uxLog(this, c.cyan("Now opening org...") + " " + c.yellow("(The org is not ready to work in until this script is completed !)")); - await execSfdxJson("sf org open", this, { + uxLog( + "action", + this, + c.cyan('Now opening org...') + + ' ' + + c.yellow('(The org is not ready to work in until this script is completed !)') + ); + await execSfdxJson('sf org open', this, { fail: true, output: false, debug: this.debugMode, }); // Trigger a status refresh on VsCode WebSocket Client - WebSocketClient.sendMessage({ event: "refreshStatus" }); + WebSocketClient.sendRefreshStatusMessage(); } return; } } // Fix @salesforce/cli bug: remove shape.zip if found - const tmpShapeFolder = path.join(os.tmpdir(), "shape"); + const tmpShapeFolder = path.join(os.tmpdir(), 'shape'); if (fs.existsSync(tmpShapeFolder) && this.pool === false) { await fs.remove(tmpShapeFolder); - uxLog(this, c.grey("Deleted " + tmpShapeFolder)); + uxLog("log", this, c.grey('Deleted ' + tmpShapeFolder)); } // Create new scratch org - uxLog(this, c.cyan("Creating new scratch org...")); - const waitTime = process.env.SCRATCH_ORG_WAIT || "15"; + uxLog("action", this, c.cyan('Creating new scratch org...')); + const waitTime = process.env.SCRATCH_ORG_WAIT || '15'; const createCommand = - "sfdx force:org:create --setdefaultusername " + - `--definitionfile ${projectScratchDefLocal} ` + - `--setalias ${this.scratchOrgAlias} ` + + 'sf org create scratch --set-default ' + + `--definition-file ${projectScratchDefLocal} ` + + `--alias ${this.scratchOrgAlias} ` + `--wait ${waitTime} ` + - `--targetdevhubusername ${this.devHubAlias} ` + - `-d ${this.scratchOrgDuration}`; + `--target-dev-hub ${this.devHubAlias} ` + + `--duration-days ${this.scratchOrgDuration}`; const createResult = await execSfdxJson(createCommand, this, { fail: false, output: false, debug: this.debugMode, }); - await clearCache("force:org:list"); + await clearCache('sf org list'); + if (!createResult || createResult.status !== 0 || !createResult.result) { + uxLog("error", this, this.buildScratchCreateErrorMessage(createResult)); + throw new SfError('Scratch org creation failed'); + } assert(createResult.status === 0 && createResult.result, this.buildScratchCreateErrorMessage(createResult)); this.scratchOrgInfo = createResult.result; this.scratchOrgUsername = this.scratchOrgInfo.username; - await setConfig("user", { + await setConfig('user', { scratchOrgAlias: this.scratchOrgAlias, scratchOrgUsername: this.scratchOrgUsername, }); // Generate password - const passwordCommand = `sfdx force:user:password:generate --targetusername ${this.scratchOrgUsername}`; + const passwordCommand = `sf org generate password --target-org ${this.scratchOrgUsername}`; const passwordResult = await execSfdxJson(passwordCommand, this, { fail: true, output: false, debug: this.debugMode, }); this.scratchOrgPassword = passwordResult.result.password; - await setConfig("user", { + await setConfig('user', { scratchOrgPassword: this.scratchOrgPassword, }); // Trigger a status refresh on VsCode WebSocket Client - WebSocketClient.sendMessage({ event: "refreshStatus" }); + WebSocketClient.sendRefreshStatusMessage(); if (isCI || this.pool === true) { // Try to store sfdxAuthUrl for scratch org reuse during CI - const displayOrgCommand = `sfdx force:org:display -u ${this.scratchOrgAlias} --verbose`; + const displayOrgCommand = `sf org display -o ${this.scratchOrgAlias} --verbose`; const displayResult = await execSfdxJson(displayOrgCommand, this, { fail: true, output: false, debug: this.debugMode, }); if (displayResult.result.sfdxAuthUrl) { - await setConfig("user", { + await setConfig('user', { scratchOrgSfdxAuthUrl: displayResult.result.sfdxAuthUrl, }); this.scratchOrgSfdxAuthUrl = displayResult.result.sfdxAuthUrl; @@ -328,89 +419,106 @@ export default class ScratchCreate extends SfdxCommand { const authInfo = await AuthInfo.create({ username: displayResult.result.username }); this.scratchOrgSfdxAuthUrl = authInfo.getSfdxAuthUrl(); displayResult.result.sfdxAuthUrl = this.scratchOrgSfdxAuthUrl; - await setConfig("user", { + await setConfig('user', { scratchOrgSfdxAuthUrl: this.scratchOrgSfdxAuthUrl, }); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (error) { uxLog( + "warning", this, c.yellow( - `Unable to fetch sfdxAuthUrl for ${displayResult.result.username}. Only Scratch Orgs created from DevHub using authenticated using auth:sfdxurl or auth:web will have access token and enabled for autoLogin\nYou may need to define SFDX_AUTH_URL_DEV_HUB or SFDX_AUTH_URL_devHubAlias in your CI job running sfdx hardis:scratch:pool:refresh`, - ), + `Unable to fetch sfdxAuthUrl for ${displayResult.result.username}. Only Scratch Orgs created from DevHub using authenticated using sf org login sfdx-url or sf org login web will have access token and enabled for autoLogin\nYou may need to define SFDX_AUTH_URL_DEV_HUB or SFDX_AUTH_URL_devHubAlias in your CI job running sf hardis:scratch:pool:refresh` + ) ); this.scratchOrgSfdxAuthUrl = null; } } if (this.pool) { - await setConfig("user", { + await setConfig('user', { authFileJson: displayResult, }); this.authFileJson = displayResult; } // Display org URL - const openRes = await execSfdxJson("sf org open --url-only", this, { + const openRes = await execSfdxJson('sf org open --url-only', this, { fail: true, output: false, debug: this.debugMode, }); - uxLog(this, c.cyan(`Open scratch org with url: ${c.green(openRes?.result?.url)}`)); + uxLog("action", this, c.cyan(`Open scratch org with url: ${c.green(openRes?.result?.url)}`)); } else { // Open scratch org for user if not in CI - await execSfdxJson("sf org open", this, { + await execSfdxJson('sf org open', this, { fail: true, output: false, debug: this.debugMode, }); } - uxLog(this, c.cyan(`Created scratch org ${c.green(this.scratchOrgAlias)} with user ${c.green(this.scratchOrgUsername)}`)); + uxLog( + "action", + this, + c.cyan(`Created scratch org ${c.green(this.scratchOrgAlias)} with user ${c.green(this.scratchOrgUsername)}`) + ); } public buildScratchCreateErrorMessage(createResult) { if (createResult.status === 0 && createResult.result) { - return c.green("Scratch create OK"); - } else if (createResult.status === 1 && createResult.errorMessage.includes("Socket timeout occurred while listening for results")) { + return c.green('Scratch create OK'); + } else if ( + createResult.status === 1 && + createResult.errorMessage.includes('Socket timeout occurred while listening for results') + ) { return c.red( `[sfdx-hardis] Error creating scratch org. ${c.bold( - "This is probably a Salesforce error, try again manually or launch again CI job", - )}\n${JSON.stringify(createResult, null, 2)}`, + 'This is probably a Salesforce error, try again manually or launch again CI job' + )}\n${JSON.stringify(createResult, null, 2)}` ); - } else if (createResult.status === 1 && createResult.errorMessage.includes("LIMIT_EXCEEDED")) { + } else if (createResult.status === 1 && createResult.errorMessage.includes('LIMIT_EXCEEDED')) { return c.red( `[sfdx-hardis] Error creating scratch org. ${c.bold( - 'It seems you have no more scratch orgs available, go delete some in "Active Scratch Orgs" tab in the Dev Hub org', - )}\n${JSON.stringify(createResult, null, 2)}`, + 'It seems you have no more scratch orgs available, go delete some in "Active Scratch Orgs" tab in the Dev Hub org' + )}\n${JSON.stringify(createResult, null, 2)}` ); } return c.red( - `[sfdx-hardis] Error creating scratch org. Maybe try ${c.yellow(c.bold("sfdx hardis:scratch:create --forcenew"))} ?\n${JSON.stringify( - createResult, - null, - 2, - )}`, + `[sfdx-hardis] Error creating scratch org. Maybe try ${c.yellow( + c.bold('sf hardis:scratch:create --forcenew') + )} ?\n${JSON.stringify(createResult, null, 2)}` ); } // Update scratch org user public async updateScratchOrgUser() { - const config = await getConfig("user"); + const config = await getConfig('user'); // Update scratch org main user - uxLog(this, c.cyan("Update / fix scratch org user " + this.scratchOrgUsername)); - const userQueryCommand = `sfdx force:data:record:get -s User -w "Username=${this.scratchOrgUsername}" -u ${this.scratchOrgAlias}`; - const userQueryRes = await execSfdxJson(userQueryCommand, this, { fail: true, output: false, debug: this.debugMode }); + uxLog("action", this, c.cyan('Update / fix scratch org user ' + this.scratchOrgUsername)); + const userQueryCommand = `sf data get record --sobject User --where "Username=${this.scratchOrgUsername}" --target-org ${this.scratchOrgAlias}`; + const userQueryRes = await execSfdxJson(userQueryCommand, this, { + fail: true, + output: false, + debug: this.debugMode, + }); let updatedUserValues = `LastName='SFDX-HARDIS' FirstName='Scratch Org'`; if (config.userEmail !== userQueryRes.result.CountryCode) { updatedUserValues += ` Email='${config.userEmail}'`; } // Fix country value is State & Country picklist activated - if ((this.projectScratchDef.features || []).includes("StateAndCountryPicklist") && userQueryRes.result.CountryCode == null) { - updatedUserValues += ` CountryCode='${config.defaultCountryCode || "FR"}' Country='${config.defaultCountry || "France"}'`; + if ( + (this.projectScratchDef.features || []).includes('StateAndCountryPicklist') && + userQueryRes.result.CountryCode == null + ) { + updatedUserValues += ` CountryCode='${config.defaultCountryCode || 'FR'}' Country='${config.defaultCountry || 'France' + }'`; } - if ((this.projectScratchDef.features || []).includes("MarketingUser") && userQueryRes.result.UserPermissionsMarketingUser === false) { + if ( + (this.projectScratchDef.features || []).includes('MarketingUser') && + userQueryRes.result.UserPermissionsMarketingUser === false + ) { // Make sure MarketingUser is checked on scratch org user if it is supposed to be - updatedUserValues += " UserPermissionsMarketingUser=true"; + updatedUserValues += ' UserPermissionsMarketingUser=true'; } - const userUpdateCommand = `sfdx force:data:record:update -s User -i ${userQueryRes.result.Id} -v "${updatedUserValues}" -u ${this.scratchOrgAlias}`; + const userUpdateCommand = `sf data update record --sobject User --record-id ${userQueryRes.result.Id} --values "${updatedUserValues}" --target-org ${this.scratchOrgAlias}`; await execSfdxJson(userUpdateCommand, this, { fail: false, output: true, debug: this.debugMode }); } } diff --git a/src/commands/hardis/scratch/delete.ts b/src/commands/hardis/scratch/delete.ts index 2690efcfb..dd1311a90 100644 --- a/src/commands/hardis/scratch/delete.ts +++ b/src/commands/hardis/scratch/delete.ts @@ -1,89 +1,114 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { execCommand, execSfdxJson, uxLog } from "../../../common/utils"; -import { prompts } from "../../../common/utils/prompts"; -import * as c from "chalk"; -import * as sortArray from "sort-array"; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { execCommand, execSfdxJson, uxLog } from '../../../common/utils/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import c from 'chalk'; +import sortArray from 'sort-array'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ScratchDelete extends SfCommand { + public static title = 'Delete scratch orgs(s)'; -export default class ScratchDelete extends SfdxCommand { - public static title = "Delete scratch orgs(s)"; + public static description = `## Command Behavior - public static description = "Assisted menu to delete scratch orgs associated to a DevHub"; +**Provides an assisted menu to delete Salesforce scratch orgs associated with a Dev Hub.** - public static examples = ["$ sfdx hardis:scratch:delete"]; +This command simplifies the process of cleaning up your Salesforce development environments by allowing you to easily select and delete multiple scratch orgs. This is crucial for managing your scratch org limits and ensuring that you don't accumulate unnecessary or expired orgs. + +Key functionalities: + +- **Interactive Scratch Org Selection:** Displays a list of all active scratch orgs linked to your Dev Hub, including their usernames, instance URLs, and last used dates. +- **Multi-Selection:** Allows you to select multiple scratch orgs for deletion. +- **Confirmation Prompt:** Prompts for confirmation before proceeding with the deletion, ensuring that you don't accidentally delete important orgs. +- **Dev Hub Integration:** Works with your configured Dev Hub to manage scratch orgs. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce CLI Integration:** It executes the \`sf org list\` command to retrieve a list of all scratch orgs associated with the current Dev Hub. It then filters this list to show only active orgs. +- **Interactive Prompts:** Uses the \`prompts\` library to present a multi-select menu of scratch orgs to the user. +- **Scratch Org Deletion:** For each selected scratch org, it executes the \`sf org delete scratch --no-prompt\` command to perform the deletion. +- **Error Handling:** Includes basic error handling for Salesforce CLI commands. +- **Data Sorting:** Sorts the list of scratch orgs by username, alias, and instance URL for better readability in the interactive menu. +
+`; + + public static examples = ['$ sf hardis:scratch:delete']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - /* jscpd:ignore-end */ public async run(): Promise { - const debugMode = this.flags.debug || false; + const { flags } = await this.parse(ScratchDelete); + const debugMode = flags.debug || false; // List all scratch orgs referenced on local computer - const orgListRequest = "sfdx force:org:list"; - const hubOrgUsername = this.hubOrg.getUsername(); + const orgListRequest = 'sf org list'; + const hubOrgUsername = flags['target-dev-hub'].getUsername(); const orgListResult = await execSfdxJson(orgListRequest, this, { fail: true, output: false, debug: debugMode }); const scratchOrgsSorted = sortArray(orgListResult?.result?.scratchOrgs || [], { - by: ["username", "alias", "instanceUrl"], - order: ["asc", "asc", "asc"], + by: ['username', 'alias', 'instanceUrl'], + order: ['asc', 'asc', 'asc'], }); const scratchOrgChoices = scratchOrgsSorted - .filter((scratchInfo) => { + .filter((scratchInfo: any) => { return scratchInfo.devHubUsername === hubOrgUsername; }) - .map((scratchInfo) => { + .map((scratchInfo: any) => { return { title: scratchInfo.username, - description: `${scratchInfo.instanceUrl}, last used on ${new Date(scratchInfo.lastUsed).toLocaleDateString()}`, + description: `${scratchInfo.instanceUrl}, last used on ${new Date( + scratchInfo.lastUsed + ).toLocaleDateString()}`, value: scratchInfo, }; }); // Request user which scratch he/she wants to delete const scratchToDeleteRes = await prompts({ - type: "multiselect", - name: "value", - message: c.cyanBright("Please select the list of scratch orgs you want to delete"), + type: 'multiselect', + name: 'value', + message: c.cyanBright('Please select the list of scratch orgs you want to delete'), + description: 'Choose which scratch orgs to permanently delete (this action cannot be undone)', choices: scratchOrgChoices, }); // Delete scratch orgs for (const scratchOrgToDelete of scratchToDeleteRes.value) { - const deleteCommand = `sfdx force:org:delete --noprompt --targetusername ${scratchOrgToDelete.username}`; + const deleteCommand = `sf org delete scratch --no-prompt --target-org ${scratchOrgToDelete.username}`; await execCommand(deleteCommand, this, { fail: false, debug: debugMode, output: true }); - uxLog(this, c.cyan(`Scratch org ${c.green(scratchOrgToDelete.username)} at ${scratchOrgToDelete.instanceUrl} has been deleted`)); + uxLog( + "action", + this, + c.cyan( + `Scratch org ${c.green(scratchOrgToDelete.username)} at ${scratchOrgToDelete.instanceUrl} has been deleted` + ) + ); } // Return an object to be displayed with --json - return { outputString: "Deleted scratch orgs" }; + return { outputString: 'Deleted scratch orgs' }; } } diff --git a/src/commands/hardis/scratch/pool/create.ts b/src/commands/hardis/scratch/pool/create.ts index 119fb8a9e..8c069d10f 100644 --- a/src/commands/hardis/scratch/pool/create.ts +++ b/src/commands/hardis/scratch/pool/create.ts @@ -1,23 +1,19 @@ /* jscpd:ignore-start */ -import * as c from "chalk"; -import { flags, SfdxCommand } from "@salesforce/command"; -import { AuthInfo, Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { getConfig, setConfig } from "../../../../config"; -import { prompts } from "../../../../common/utils/prompts"; -import { uxLog } from "../../../../common/utils"; -import { instantiateProvider, listKeyValueProviders } from "../../../../common/utils/poolUtils"; -import { KeyValueProviderInterface } from "../../../../common/utils/keyValueUtils"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class ScratchPoolCreate extends SfdxCommand { - public static title = "Create and configure scratch org pool"; +import c from 'chalk'; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { AuthInfo, Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { getConfig, setConfig } from '../../../../config/index.js'; +import { prompts } from '../../../../common/utils/prompts.js'; +import { uxLog } from '../../../../common/utils/index.js'; +import { instantiateProvider, listKeyValueProviders } from '../../../../common/utils/poolUtils.js'; +import { KeyValueProviderInterface } from '../../../../common/utils/keyValueUtils.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class ScratchPoolCreate extends SfCommand { + public static title = 'Create and configure scratch org pool'; public static description = `Select a data storage service and configure information to build a scratch org pool @@ -28,72 +24,73 @@ export default class ScratchPoolCreate extends SfdxCommand { - Call the following lines in the CI job: \`\`\`shell - sfdx hardis:auth:login --devhub - sfdx hardis:scratch:pool:refresh + sf hardis:auth:login --devhub + sf hardis:scratch:pool:refresh \`\`\` `; - public static examples = ["$ sfdx hardis:scratch:pool:configure"]; + public static examples = ['$ sf hardis:scratch:pool:configure']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { // Get pool configuration - const config = await getConfig("project"); + const { flags } = await this.parse(ScratchPoolCreate); + const config = await getConfig('project'); const poolConfig = config.poolConfig || {}; // Tell user if he/she's about to overwrite existing configuration if (config.poolConfig && Object.keys(poolConfig).length > 0) { uxLog( + "warning", this, c.yellow( `There is already an existing scratch org pool configuration: ${JSON.stringify(config.poolConfig)}. -If you really want to replace it, please remove poolConfig property from .sfdx-hardis.yml and run again this command`, - ), +If you really want to replace it, please remove poolConfig property from .sfdx-hardis.yml and run again this command` + ) ); - return { outputString: "Scratch org pool configuration already existing" }; + return { outputString: 'Scratch org pool configuration already existing' }; } const allProviders = await listKeyValueProviders(); const response = await prompts([ { - type: "select", - name: "storageService", - message: c.cyanBright("What storage service do you want to use for your scratch orgs pool ?"), + type: 'select', + name: 'storageService', + message: c.cyanBright('What storage service do you want to use for your scratch orgs pool ?'), + description: 'Choose a storage backend for managing and tracking scratch org pools', + placeholder: 'Select a storage service', initial: 0, choices: allProviders.map((provider: KeyValueProviderInterface) => { return { title: provider.name, description: provider.description, value: provider.name }; }), }, { - type: "number", - name: "maxScratchOrgsNumber", - message: c.cyanBright("What is the maximum number of scratch orgs in the pool ?"), + type: 'number', + name: 'maxScratchOrgsNumber', + message: c.cyanBright('What is the maximum number of scratch orgs in the pool ?'), + description: 'Set the maximum number of scratch orgs that can exist in the pool at any time', + placeholder: 'Ex: 5', initial: poolConfig.maxScratchOrgsNumber || 5, }, ]); @@ -101,28 +98,38 @@ If you really want to replace it, please remove poolConfig property from .sfdx-h // Store updated config poolConfig.maxScratchOrgsNumber = response.maxScratchOrgsNumber; poolConfig.storageService = response.storageService; - await setConfig("project", { poolConfig: poolConfig }); + await setConfig('project', { poolConfig: poolConfig }); // Request additional setup to the user const provider = await instantiateProvider(response.storageService); - await provider.userSetup({ devHubConn: this.hubOrg.getConnection(), devHubUsername: this.hubOrg.getUsername() }); + await provider.userSetup({ + devHubConn: flags['target-dev-hub'].getConnection(), + devHubUsername: flags['target-dev-hub'].getUsername(), + }); - const authInfo = await AuthInfo.create({ username: this.hubOrg.getUsername() }); + const authInfo = await AuthInfo.create({ username: flags['target-dev-hub'].getUsername() }); const sfdxAuthUrl = authInfo.getSfdxAuthUrl(); if (sfdxAuthUrl) { - uxLog(this, c.cyan(`You need to define CI masked variable ${c.green("SFDX_AUTH_URL_DEV_HUB")} = ${c.green(sfdxAuthUrl)}`)); + uxLog( + "action", + this, + c.cyan(`You need to define CI masked variable ${c.green('SFDX_AUTH_URL_DEV_HUB')} = ${c.green(sfdxAuthUrl)}`) + ); } else { uxLog( + "warning", this, c.yellow( `You'll probably need to define CI masked variable ${c.green( - "SFDX_AUTH_URL_DEV_HUB", - )} with content of sfdxAuthUrl that you can retrieve with ${c.white("sfdx force:org:display -u YOURDEVHUBUSERNAME --verbose --json")}`, - ), + 'SFDX_AUTH_URL_DEV_HUB' + )} with content of sfdxAuthUrl that you can retrieve with ${c.white( + 'sf org display -o YOURDEVHUBUSERNAME --verbose --json' + )}` + ) ); } // Return an object to be displayed with --json - return { outputString: "Configured scratch orgs pool" }; + return { outputString: 'Configured scratch orgs pool' }; } } diff --git a/src/commands/hardis/scratch/pool/localauth.ts b/src/commands/hardis/scratch/pool/localauth.ts index b57bc877a..7fbc5a59b 100644 --- a/src/commands/hardis/scratch/pool/localauth.ts +++ b/src/commands/hardis/scratch/pool/localauth.ts @@ -1,69 +1,83 @@ /* jscpd:ignore-start */ -import * as c from "chalk"; -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { getConfig } from "../../../../config"; -import { uxLog } from "../../../../common/utils"; -import { instantiateProvider } from "../../../../common/utils/poolUtils"; -import { KeyValueProviderInterface } from "../../../../common/utils/keyValueUtils"; +import c from 'chalk'; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { getConfig } from '../../../../config/index.js'; +import { uxLog } from '../../../../common/utils/index.js'; +import { instantiateProvider } from '../../../../common/utils/poolUtils.js'; +import { KeyValueProviderInterface } from '../../../../common/utils/keyValueUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ScratchPoolLocalAuth extends SfCommand { + public static title = 'Authenticate locally to scratch org pool'; -export default class ScratchPoolLocalAuth extends SfdxCommand { - public static title = "Authenticate locally to scratch org pool"; + public static description = ` +## Command Behavior - public static description = - "Calls the related storage service to request api keys and secrets that allows a local user to fetch a scratch org from scratch org pool"; +**Authenticates a local user to the configured scratch org pool storage service, enabling them to fetch and manage scratch orgs from the pool.** - public static examples = ["$ sfdx hardis:scratch:pool:localauth"]; +This command is essential for developers who want to utilize a shared scratch org pool for their local development. It establishes the necessary authentication with the backend storage service (e.g., Salesforce Custom Object, Redis) that manages the pool's state, allowing the user to retrieve available scratch orgs for their work. + +Key functionalities: + +- **Storage Service Authentication:** Initiates the authentication process with the chosen storage service to obtain the required API keys or secrets. +- **Enables Pool Access:** Once authenticated, the local user can then use other sfdx-hardis commands to fetch, use, and return scratch orgs from the pool. +- **Configuration Check:** Verifies if a scratch org pool is already configured for the current project and provides guidance if it's not. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the \`poolConfig\` from the project's .sfdx-hardis.yml file to identify the configured storage service. +- **Provider Instantiation:** It uses the \`instantiateProvider\` utility function to create an instance of the \`KeyValueProviderInterface\` corresponding to the configured storage service. +- **User Authentication:** It then calls the \`userAuthenticate()\` method on the instantiated provider. This method encapsulates the specific logic for authenticating with the chosen storage service (e.g., prompting for API keys, performing OAuth flows). +- **Error Handling:** It checks for the absence of a configured scratch org pool and provides a user-friendly message. +
+`; + + public static examples = ['$ sf hardis:scratch:pool:localauth']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { // Get pool configuration - const config = await getConfig("project"); + const config = await getConfig('project'); const poolConfig = config.poolConfig || {}; // Tell user if he/she's about to overwrite existing configuration if (!poolConfig.storageService) { uxLog( + "warning", this, c.yellow( - `There is not scratch orgs pool configured on this project. Please see with your tech lead about using command hardis:scratch:pool:configure`, - ), + `There is not scratch orgs pool configured on this project. Please see with your tech lead about using command hardis:scratch:pool:configure` + ) ); - return { outputString: "Scratch org pool configuration to create" }; + return { outputString: 'Scratch org pool configuration to create' }; } // Request additional setup to the user @@ -71,6 +85,6 @@ export default class ScratchPoolLocalAuth extends SfdxCommand { await provider.userAuthenticate(); // Return an object to be displayed with --json - return { outputString: "Locally authenticated with scratch org pool" }; + return { outputString: 'Locally authenticated with scratch org pool' }; } } diff --git a/src/commands/hardis/scratch/pool/refresh.ts b/src/commands/hardis/scratch/pool/refresh.ts index d9d05f2be..45f62f67d 100644 --- a/src/commands/hardis/scratch/pool/refresh.ts +++ b/src/commands/hardis/scratch/pool/refresh.ts @@ -1,156 +1,188 @@ /* jscpd:ignore-start */ -import { spawn } from "child_process"; -import * as c from "chalk"; +import { spawn } from 'child_process'; +import c from 'chalk'; -import * as which from "which"; -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { addScratchOrgToPool, getPoolStorage, setPoolStorage } from "../../../../common/utils/poolUtils"; -import { getConfig } from "../../../../config"; -import { execCommand, stripAnsi, uxLog } from "../../../../common/utils"; -import * as moment from "moment"; -import { authenticateWithSfdxUrlStore } from "../../../../common/utils/orgUtils"; +import which from 'which'; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { addScratchOrgToPool, getPoolStorage, setPoolStorage } from '../../../../common/utils/poolUtils.js'; +import { getConfig } from '../../../../config/index.js'; +import { execCommand, stripAnsi, uxLog } from '../../../../common/utils/index.js'; +import moment from 'moment'; +import { authenticateWithSfdxUrlStore } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ScratchPoolRefresh extends SfCommand { + public static title = 'Refresh scratch org pool'; -export default class ScratchPoolRefresh extends SfdxCommand { - public static title = "Refresh scratch org pool"; + public static description = `## Command Behavior - public static description = "Create enough scratch orgs to fill the pool"; +**Refreshes a scratch org pool by creating new scratch orgs to fill the pool and deleting expired ones.** - public static examples = ["$ sfdx hardis:scratch:pool:refresh"]; +This command is designed to maintain a healthy and adequately sized scratch org pool, ensuring that developers and CI/CD pipelines always have access to ready-to-use scratch orgs. It automates the lifecycle management of scratch orgs within the pool. + +Key functionalities: + +- **Expired Org Cleanup:** Identifies and deletes scratch orgs from the pool that are nearing their expiration date (configurable via \`minScratchOrgRemainingDays\` in \`.sfdx-hardis.yml\`). +- **Pool Replenishment:** Creates new scratch orgs to replace expired ones and to reach the \`maxScratchOrgsNumber\` defined in the pool configuration. +- **Parallel Creation:** New scratch orgs are created in parallel using child processes, optimizing the replenishment process. +- **Authentication Handling:** Authenticates to scratch orgs before deletion or creation, ensuring proper access. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the \`poolConfig\` from the project's \`.sfdx-hardis.yml\` file to get parameters like \`maxScratchOrgsNumber\`, \`maxScratchOrgsNumberToCreateOnce\`, and \`minScratchOrgRemainingDays\`. +- **Pool Storage Interaction:** It uses \`getPoolStorage\` and \`setPoolStorage\` to interact with the configured storage service (e.g., Salesforce Custom Object, Redis) to retrieve and update the list of scratch orgs in the pool. +- **Expiration Check:** It calculates the remaining days for each scratch org in the pool using moment and flags those below the \`minScratchOrgRemainingDays\` threshold for deletion. +- **Scratch Org Deletion:** For expired orgs, it authenticates to them using \`authenticateWithSfdxUrlStore\` and then executes \`sf org delete scratch\` via \`execCommand\`. +- **Scratch Org Creation:** To replenish the pool, it spawns new child processes that run the \`sf hardis:scratch:create --pool\` command. This allows for parallel creation of multiple scratch orgs. +- **Error Handling:** It includes error handling for scratch org creation failures, logging them and updating the pool storage accordingly. +- **Logging:** Provides detailed logs about the status of scratch orgs (kept, deleted, created, failed creations) and a summary of the refresh operation. +
+`; + + public static examples = ['$ sf hardis:scratch:pool:refresh']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; private debugMode = false; public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(ScratchPoolRefresh); + this.debugMode = flags.debug || false; // Check pool configuration is defined on project - const config = await getConfig("project"); + const config = await getConfig('project'); if (config.poolConfig == null) { - uxLog(this, c.yellow("Configuration file must contain a poolConfig property") + "\n" + c.grey(JSON.stringify(config, null, 2))); - return { outputString: "Configuration file must contain a poolConfig property" }; + uxLog( + "warning", + this, + c.yellow('Configuration file must contain a poolConfig property') + + '\n' + + c.grey(JSON.stringify(config, null, 2)) + ); + return { outputString: 'Configuration file must contain a poolConfig property' }; } const maxScratchOrgsNumber = config.poolConfig.maxScratchOrgsNumber || 5; const maxScratchOrgsNumberToCreateOnce = config.poolConfig.maxScratchOrgsNumberToCreateOnce || 10; - uxLog(this, c.grey("Pool config: " + JSON.stringify(config.poolConfig))); + uxLog("log", this, c.grey('Pool config: ' + JSON.stringify(config.poolConfig))); // Get pool storage - const poolStorage = await getPoolStorage({ devHubConn: this.hubOrg.getConnection(), devHubUsername: this.hubOrg.getUsername() }); + const poolStorage = await getPoolStorage({ + devHubConn: flags['target-dev-hub'].getConnection(), + devHubUsername: flags['target-dev-hub'].getUsername(), + }); let scratchOrgs = poolStorage.scratchOrgs || []; /* jscpd:ignore-end */ // Clean expired orgs const minScratchOrgRemainingDays = config.poolConfig.minScratchOrgRemainingDays || 25; - const scratchOrgsToDelete = []; + const scratchOrgsToDelete: any[] = []; scratchOrgs = scratchOrgs.filter((scratchOrg) => { const expiration = moment(scratchOrg?.authFileJson?.result?.expirationDate); const today = moment(); - const daysBeforeExpiration = expiration.diff(today, "days"); + const daysBeforeExpiration = expiration.diff(today, 'days'); if (daysBeforeExpiration < minScratchOrgRemainingDays) { scratchOrg.daysBeforeExpiration = daysBeforeExpiration; scratchOrgsToDelete.push(scratchOrg); uxLog( + "log", this, c.grey( - `Scratch org ${scratchOrg?.authFileJson?.result?.instanceUrl} will be deleted as it has only ${daysBeforeExpiration} remaining days (expiration on ${scratchOrg?.authFileJson?.result?.expirationDate})`, - ), + `Scratch org ${scratchOrg?.authFileJson?.result?.instanceUrl} will be deleted as it has only ${daysBeforeExpiration} remaining days (expiration on ${scratchOrg?.authFileJson?.result?.expirationDate})` + ) ); return false; } uxLog( + "log", this, c.grey( - `Scratch org ${scratchOrg?.authFileJson?.result?.instanceUrl} will be kept as it still has ${daysBeforeExpiration} remaining days (expiration on ${scratchOrg?.authFileJson?.result?.expirationDate})`, - ), + `Scratch org ${scratchOrg?.authFileJson?.result?.instanceUrl} will be kept as it still has ${daysBeforeExpiration} remaining days (expiration on ${scratchOrg?.authFileJson?.result?.expirationDate})` + ) ); return true; }); // Delete expired orgs and update pool if found if (scratchOrgsToDelete.length > 0) { poolStorage.scratchOrgs = scratchOrgs; - await setPoolStorage(poolStorage, { devHubConn: this.hubOrg.getConnection(), devHubUsername: this.hubOrg.getUsername() }); + await setPoolStorage(poolStorage, { + devHubConn: flags['target-dev-hub'].getConnection(), + devHubUsername: flags['target-dev-hub'].getUsername(), + }); for (const scratchOrgToDelete of scratchOrgsToDelete) { // Authenticate to scratch org to delete await authenticateWithSfdxUrlStore(scratchOrgToDelete); // Delete scratch org - const deleteCommand = `sfdx force:org:delete --noprompt --targetusername ${scratchOrgToDelete.scratchOrgUsername}`; + const deleteCommand = `sf org delete scratch --no-prompt --target-org ${scratchOrgToDelete.scratchOrgUsername}`; await execCommand(deleteCommand, this, { fail: false, debug: this.debugMode, output: true }); uxLog( + "action", this, c.cyan( - `Scratch org ${c.green(scratchOrgToDelete.scratchOrgUsername)} at ${ - scratchOrgToDelete?.authFileJson?.result?.instanceUrl - } has been deleted because only ${scratchOrgToDelete.daysBeforeExpiration} days were remaining.`, - ), + `Scratch org ${c.green(scratchOrgToDelete.scratchOrgUsername)} at ${scratchOrgToDelete?.authFileJson?.result?.instanceUrl + } has been deleted because only ${scratchOrgToDelete.daysBeforeExpiration} days were remaining.` + ) ); } } // Create new scratch orgs const numberOfOrgsToCreate = Math.min(maxScratchOrgsNumber - scratchOrgs.length, maxScratchOrgsNumberToCreateOnce); - uxLog(this, c.cyan("Creating " + numberOfOrgsToCreate + " scratch orgs...")); + uxLog("action", this, c.cyan('Creating ' + numberOfOrgsToCreate + ' scratch orgs...')); let numberCreated = 0; let numberfailed = 0; - const subProcesses = []; + const subProcesses: any[] = []; for (let i = 0; i < numberOfOrgsToCreate; i++) { // eslint-disable-next-line no-async-promise-executor const spawnPromise = new Promise(async (resolve) => { // Run scratch:create command asynchronously - const commandArgs = ["hardis:scratch:create", "--pool", "--json"]; - const sfdxPath = await which("sfdx"); - const child = spawn(sfdxPath || "sfdx", commandArgs, { cwd: process.cwd(), env: process.env }); - uxLog(this, "[pool] " + c.grey(`hardis:scratch:create (${i}) started`)); + const commandArgs = ['hardis:scratch:create', '--pool', '--json']; + const sfdxPath = await which('sf'); + const child = spawn(sfdxPath || 'sf', commandArgs, { cwd: process.cwd(), env: process.env }); + uxLog("log", this, '[pool] ' + c.grey(`hardis:scratch:create (${i}) started`)); // handle errors - child.on("error", (err) => { + child.on('error', (err) => { resolve({ code: 1, result: { error: err } }); }); // Store data - let stdout = ""; - child.stdout.on("data", (data) => { + let stdout = ''; + child.stdout.on('data', (data) => { stdout += data.toString(); if (this.debugMode === true) { - uxLog(this, data.toString()); + uxLog("other", this, data.toString()); } }); // Handle end of command - child.on("close", async (code) => { + child.on('close', async (code) => { const colorFunc = code === 0 ? c.green : c.red; - uxLog(this, "[pool] " + colorFunc(`hardis:scratch:create (${i}) exited with code ${c.bold(code)}`)); + uxLog("action", this, '[pool] ' + colorFunc(`hardis:scratch:create (${i}) exited with code ${c.bold(code)}`)); if (code !== 0) { - uxLog(this, `Return code is not 0 (${i}): ` + c.grey(stdout)); + uxLog("warning", this, `Return code is not 0 (${i}): ` + c.grey(stdout)); numberfailed++; } else { numberCreated++; @@ -162,7 +194,7 @@ export default class ScratchPoolRefresh extends SfdxCommand { // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { result = { result: { status: 1, rawLog: stdout } }; - uxLog(this, c.yellow(`Error parsing stdout (${i}): ` + stdout)); + uxLog("warning", this, c.yellow(`Error parsing stdout (${i}): ` + stdout)); } await addScratchOrgToPool(result.result || result); resolve({ code, result: result }); @@ -174,12 +206,22 @@ export default class ScratchPoolRefresh extends SfdxCommand { // Await parallel scratch org creations are completed const createResults = await Promise.all(subProcesses); if (this.debugMode) { - uxLog(this, c.grey("Create results: \n" + JSON.stringify(createResults, null, 2))); + uxLog("log", this, c.grey('Create results: \n' + JSON.stringify(createResults, null, 2))); } const colorFunc = numberCreated === numberOfOrgsToCreate ? c.green : numberCreated === 0 ? c.red : c.yellow; - uxLog(this, "[pool] " + colorFunc(`Created ${c.bold(numberCreated)} scratch orgs (${c.bold(numberfailed)} creations(s) failed)`)); + uxLog( + "action", + this, + '[pool] ' + + colorFunc(`Created ${c.bold(numberCreated)} scratch orgs (${c.bold(numberfailed)} creations(s) failed)`) + ); // Return an object to be displayed with --json - return { outputString: "Refreshed scratch orgs pool", createResults: createResults, numberCreated: numberCreated, numberFailed: numberfailed }; + return { + outputString: 'Refreshed scratch orgs pool', + createResults: createResults, + numberCreated: numberCreated, + numberFailed: numberfailed, + }; } } diff --git a/src/commands/hardis/scratch/pool/reset.ts b/src/commands/hardis/scratch/pool/reset.ts index 3d11bbf56..d64b872f0 100644 --- a/src/commands/hardis/scratch/pool/reset.ts +++ b/src/commands/hardis/scratch/pool/reset.ts @@ -1,69 +1,96 @@ /* jscpd:ignore-start */ -import * as c from "chalk"; -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { getPoolStorage, setPoolStorage } from "../../../../common/utils/poolUtils"; -import { getConfig } from "../../../../config"; -import { execCommand, uxLog } from "../../../../common/utils"; -import { authenticateWithSfdxUrlStore } from "../../../../common/utils/orgUtils"; +import c from 'chalk'; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { getPoolStorage, setPoolStorage } from '../../../../common/utils/poolUtils.js'; +import { getConfig } from '../../../../config/index.js'; +import { execCommand, uxLog } from '../../../../common/utils/index.js'; +import { authenticateWithSfdxUrlStore } from '../../../../common/utils/orgUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ScratchPoolReset extends SfCommand { + public static title = 'Reset scratch org pool'; -export default class ScratchPoolReset extends SfdxCommand { - public static title = "Reset scratch org pool"; + public static description = ` +## Command Behavior - public static description = "Reset scratch org pool (delete all scratches in the pool)"; +**Resets the scratch org pool by deleting all existing scratch orgs within it.** - public static examples = ["$ sfdx hardis:scratch:pool:refresh"]; +This command provides a way to clear out the entire scratch org pool, effectively starting fresh. This can be useful for: + +- **Troubleshooting:** If the pool becomes corrupted or contains problematic scratch orgs. +- **Major Changes:** When there are significant changes to the scratch org definition or initialization process that require all existing orgs to be recreated. +- **Cleanup:** Periodically cleaning up the pool to ensure only the latest and most relevant scratch orgs are available. + +Key functionalities: + +- **Full Pool Deletion:** Identifies all scratch orgs currently in the pool and initiates their deletion. +- **Dev Hub Integration:** Works with your configured Dev Hub to manage the scratch orgs within the pool. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the \`poolConfig\` from the project's .sfdx-hardis.yml file to ensure a pool is configured. +- **Pool Storage Interaction:** It uses \`getPoolStorage\` to retrieve the current list of scratch orgs in the pool and \`setPoolStorage\` to clear the pool's record. +- **Scratch Org Deletion:** It iterates through each scratch org in the retrieved list. For each org, it authenticates to it using \`authenticateWithSfdxUrlStore\` and then executes \`sf org delete scratch\` via \`execCommand\`. +- **Logging:** Provides clear messages about the deletion process and the status of each scratch org. +
+`; + + public static examples = ['$ sf hardis:scratch:pool:refresh']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ private debugMode = false; public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(ScratchPoolReset); + this.debugMode = flags.debug || false; // Check pool configuration is defined on project - const config = await getConfig("project"); + const config = await getConfig('project'); if (config.poolConfig == null) { - uxLog(this, c.yellow("Configuration file must contain a poolConfig property") + "\n" + c.grey(JSON.stringify(config, null, 2))); - return { outputString: "Configuration file must contain a poolConfig property" }; + uxLog( + "warning", + this, + c.yellow('Configuration file must contain a poolConfig property') + + '\n' + + c.grey(JSON.stringify(config, null, 2)) + ); + return { outputString: 'Configuration file must contain a poolConfig property' }; } - uxLog(this, c.cyan(`Reseting scratch org pool on org ${c.green(this.hubOrg.getUsername())}...`)); - uxLog(this, c.grey("Pool config: " + JSON.stringify(config.poolConfig))); + uxLog("action", this, c.cyan(`Reseting scratch org pool on org ${c.green(flags['target-dev-hub'].getUsername())}...`)); + uxLog("log", this, c.grey('Pool config: ' + JSON.stringify(config.poolConfig))); // Get pool storage - const poolStorage = await getPoolStorage({ devHubConn: this.hubOrg.getConnection(), devHubUsername: this.hubOrg.getUsername() }); + const poolStorage = await getPoolStorage({ + devHubConn: flags['target-dev-hub'].getConnection(), + devHubUsername: flags['target-dev-hub'].getUsername(), + }); let scratchOrgs = poolStorage.scratchOrgs || []; // Delete existing scratch orgs @@ -71,24 +98,27 @@ export default class ScratchPoolReset extends SfdxCommand { const scratchOrgsToDelete = [...scratchOrgs]; scratchOrgs = []; poolStorage.scratchOrgs = scratchOrgs; - await setPoolStorage(poolStorage, { devHubConn: this.hubOrg.getConnection(), devHubUsername: this.hubOrg.getUsername() }); + await setPoolStorage(poolStorage, { + devHubConn: flags['target-dev-hub'].getConnection(), + devHubUsername: flags['target-dev-hub'].getUsername(), + }); for (const scratchOrgToDelete of scratchOrgsToDelete) { // Authenticate to scratch org to delete await authenticateWithSfdxUrlStore(scratchOrgToDelete); // Delete scratch org - const deleteCommand = `sfdx force:org:delete --noprompt --targetusername ${scratchOrgToDelete.scratchOrgUsername}`; + const deleteCommand = `sf org delete scratch --no-prompt --target-org ${scratchOrgToDelete.scratchOrgUsername}`; await execCommand(deleteCommand, this, { fail: false, debug: this.debugMode, output: true }); uxLog( + "action", this, c.cyan( - `Scratch org ${c.green(scratchOrgToDelete.scratchOrgUsername)} at ${ - scratchOrgToDelete?.authFileJson?.result?.instanceUrl - } has been deleted`, - ), + `Scratch org ${c.green(scratchOrgToDelete.scratchOrgUsername)} at ${scratchOrgToDelete?.authFileJson?.result?.instanceUrl + } has been deleted` + ) ); } /* jscpd:ignore-end */ - return { outputString: "Reset scratch orgs pool" }; + return { outputString: 'Reset scratch orgs pool' }; } } diff --git a/src/commands/hardis/scratch/pool/view.ts b/src/commands/hardis/scratch/pool/view.ts index af347f0d5..048cfdb89 100644 --- a/src/commands/hardis/scratch/pool/view.ts +++ b/src/commands/hardis/scratch/pool/view.ts @@ -1,84 +1,103 @@ /* jscpd:ignore-start */ -import * as c from "chalk"; -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { getConfig } from "../../../../config"; -import { uxLog } from "../../../../common/utils"; -import { getPoolStorage } from "../../../../common/utils/poolUtils"; +import c from 'chalk'; +import { SfCommand, Flags, requiredHubFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { getConfig } from '../../../../config/index.js'; +import { uxLog } from '../../../../common/utils/index.js'; +import { getPoolStorage } from '../../../../common/utils/poolUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class ScratchPoolView extends SfCommand { + public static title = 'View scratch org pool info'; -export default class ScratchPoolView extends SfdxCommand { - public static title = "View scratch org pool info"; + public static description = ` +## Command Behavior - public static description = "Displays all stored content of project scratch org pool if defined"; +**Displays information about the configured scratch org pool, including its current state and available scratch orgs.** - public static examples = ["$ sfdx hardis:scratch:pool:view"]; +This command provides visibility into your scratch org pool, allowing you to monitor its health, check the number of available orgs, and verify its configuration. It's a useful tool for administrators and developers managing shared scratch org environments. + +Key functionalities: + +- **Pool Configuration Display:** Shows the \`poolConfig\` defined in your ".sfdx-hardis.yml" file, including the chosen storage service and the maximum number of scratch orgs. +- **Pool Storage Content:** Displays the raw content of the pool storage, which includes details about each scratch org in the pool (e.g., alias, username, expiration date). +- **Available Scratch Org Count:** Provides a summary of how many scratch orgs are currently available in the pool. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves the \`poolConfig\` from the project's ".sfdx-hardis.yml" file using \`getConfig\`. +- **Pool Storage Retrieval:** It uses \`getPoolStorage\` to connect to the configured storage service (e.g., Salesforce Custom Object, Redis) and retrieve the current state of the scratch org pool. +- **Data Display:** It logs the retrieved pool configuration and pool storage content to the console in a human-readable format. +- **Error Handling:** It checks if a scratch org pool is configured for the project and provides a warning message if it's not. +
+`; + + public static examples = ['$ sf hardis:scratch:pool:view']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': requiredHubFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { + const { flags } = await this.parse(ScratchPoolView); // Get pool configuration - const config = await getConfig("project"); + const config = await getConfig('project'); const poolConfig = config.poolConfig || {}; - uxLog(this, "Pool config: " + c.grey(JSON.stringify(poolConfig, null, 2))); + uxLog("log", this, 'Pool config: ' + c.grey(JSON.stringify(poolConfig, null, 2))); // Missing scratch orgs pool configuration if (!poolConfig.storageService) { uxLog( + "warning", this, c.yellow( - `There is not scratch orgs pool configured on this project. Please see with your tech lead about using command hardis:scratch:pool:configure`, - ), + `There is not scratch orgs pool configured on this project. Please see with your tech lead about using command hardis:scratch:pool:configure` + ) ); - return { status: 1, outputString: "Scratch org pool configuration to create" }; + return { status: 1, outputString: 'Scratch org pool configuration to create' }; } // Query pool storage - const poolStorage = await getPoolStorage({ devHubConn: this?.hubOrg?.getConnection(), devHubUsername: this?.hubOrg?.getUsername() }); - uxLog(this, "Pool storage: " + c.grey(JSON.stringify(poolStorage, null, 2))); + const poolStorage = await getPoolStorage({ + devHubConn: flags['target-dev-hub']?.getConnection(), + devHubUsername: flags['target-dev-hub']?.getUsername(), + }); + uxLog("other", this, 'Pool storage: ' + c.grey(JSON.stringify(poolStorage, null, 2))); const scratchOrgs = poolStorage.scratchOrgs || []; const availableNumber = scratchOrgs.length; // Display logs - uxLog(this, c.cyan(`There are ${c.bold(availableNumber)} available scratch orgs`)); + uxLog("action", this, c.cyan(`There are ${c.bold(availableNumber)} available scratch orgs`)); // Return an object to be displayed with --json return { status: 0, - outputString: "Viewed scratch org pool", + outputString: 'Viewed scratch org pool', poolStorage: poolStorage, availableScratchOrgs: availableNumber, maxScratchOrgs: poolConfig.maxScratchOrgsNumber, diff --git a/src/commands/hardis/scratch/pull.ts b/src/commands/hardis/scratch/pull.ts index c295f2904..df6b89f8b 100644 --- a/src/commands/hardis/scratch/pull.ts +++ b/src/commands/hardis/scratch/pull.ts @@ -1,73 +1,94 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { forceSourcePull } from "../../../common/utils/deployUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { forceSourcePull } from '../../../common/utils/deployUtils.js'; +import { uxLog } from '../../../common/utils/index.js'; +import c from "chalk"; +import { CONSTANTS } from '../../../config/index.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class SourcePull extends SfCommand { + public static title = 'Scratch PULL'; -export default class SourcePull extends SfdxCommand { - public static title = "Scratch PULL"; + public static description = ` +## Command Behavior - public static description = `This commands pulls the updates you performed in your scratch or sandbox org, into your local files +**Pulls metadata changes from your scratch org or source-tracked sandbox into your local project files.** -Then, you probably want to stage and commit the files containing the updates you want to keep, as explained in this video. +This command is essential for synchronizing your local development environment with the changes you've made directly in your Salesforce org. After pulling, you can then stage and commit the relevant files to your version control system. - +Key features and considerations: -- Calls sfdx force:source:pull under the hood -- If there are errors, proposes to automatically add erroneous item in \`.forceignore\`, then pull again -- If you want to always retrieve sources like CustomApplication that are not always detected as updates by force:source:pull , you can define property **autoRetrieveWhenPull** in .sfdx-hardis.yml +- **Underlying Command:** Internally, this command executes \`sf project retrieve start\` to fetch the metadata. +- **Error Handling:** If the pull operation encounters errors, it offers to automatically add the problematic items to your \`.forceignore\` file and then attempts to pull again, helping you resolve conflicts and ignore unwanted metadata. +- **Missing Updates:** If you don't see certain updated items in the pull results, you might need to manually retrieve them using the Salesforce Extension's **Org Browser** or the **Salesforce CLI** directly. Refer to the [Retrieve Metadatas documentation](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-publish-task/#retrieve-metadatas) for more details. +- **Automatic Retrieval:** You can configure the \`autoRetrieveWhenPull\` property in your \`.sfdx-hardis.yml\` file to always retrieve specific metadata types (e.g., \`CustomApplication\`) that might not always be detected as updates by \`project:retrieve:start\`. -Example: +Example \`.sfdx-hardis.yml\` configuration for \`autoRetrieveWhenPull\`: \`\`\`yaml autoRetrieveWhenPull: - CustomApplication:MyCustomApplication - CustomApplication:MyOtherCustomApplication - CustomApplication:MyThirdCustomApp \`\`\` + +For a visual explanation of the process, watch this video: + + + +
+Technical explanations + +The command's technical implementation focuses on robust metadata synchronization: + +- **Salesforce CLI Wrapper:** It acts as a wrapper around the standard Salesforce CLI \`sf project retrieve start\` command, providing enhanced error handling and configuration options. +- **Force Source Pull Utility:** The core logic resides in the \`forceSourcePull\` utility function, which orchestrates the retrieval process, including handling \`.forceignore\` updates. +- **Configuration Integration:** It reads the \`autoRetrieveWhenPull\` setting from the project's \`.sfdx-hardis.yml\` to determine additional metadata to retrieve automatically. +- **User Feedback:** Provides clear messages to the user regarding the pull status and guidance for troubleshooting. +
`; - public static examples = ["$ sfdx hardis:scratch:pull"]; + public static examples = ['$ sf hardis:scratch:pull']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { - const debugMode = this.flags.debug || false; - const targetUsername = this.org.getUsername(); + const { flags } = await this.parse(SourcePull); + const debugMode = flags.debug || false; + const targetUsername = flags['target-org'].getUsername() || ''; + uxLog("action", this, c.cyan(`This command will pull all the latest metadata changes from your dev org into your local project files (even those updated by other users).`)); + uxLog("action", this, c.cyan(`Pulling metadata changes from org: ${c.bold(targetUsername)}`)); await forceSourcePull(targetUsername, debugMode); + uxLog("warning", this, c.yellow(`If you don't see your updated items in the results, check the following documentation: https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/#retrieve-metadatas`)); + + WebSocketClient.sendReportFileMessage("workbench.view.scm", "Commit your retrieved files", "actionCommand"); + WebSocketClient.sendReportFileMessage(`${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-publish-task/#commit-your-updates`, "Retrieve and Commit documentation", 'docUrl'); // Return an object to be displayed with --json - return { outputString: "Pulled scratch org updates" }; + return { outputString: 'Pulled scratch org / source-tracked sandbox updates' }; } } diff --git a/src/commands/hardis/scratch/push.ts b/src/commands/hardis/scratch/push.ts index cc2f4d718..aff955575 100644 --- a/src/commands/hardis/scratch/push.ts +++ b/src/commands/hardis/scratch/push.ts @@ -1,57 +1,68 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { forceSourcePush } from "../../../common/utils/deployUtils"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { forceSourcePush } from '../../../common/utils/deployUtils.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class SourcePush extends SfCommand { + public static title = 'Scratch PUSH'; -export default class SourcePush extends SfdxCommand { - public static title = "Scratch PUSH"; + public static description = `## Command Behavior - public static description = `Push local files to scratch org +**Pushes local Salesforce DX source files to a scratch org or source-tracked sandbox.** -Calls \`sfdx force:source:push\` under the hood +This command is a fundamental operation in Salesforce DX development, allowing developers to synchronize their local codebase with their development org. It ensures that changes made locally are reflected in the scratch org, enabling testing and validation. + +Key functionalities: + +- **Source Synchronization:** Deploys all local changes (metadata and code) to the target scratch org. +- **Underlying Command:** Internally, this command executes \`sf project deploy start\` to perform the push operation. + +
+Technical explanations + +The command's technical implementation involves: + +- **Salesforce CLI Wrapper:** It acts as a wrapper around the standard Salesforce CLI \`sf project deploy start\` command. +- **\`forceSourcePush\` Utility:** The core logic resides in the \`forceSourcePush\` utility function, which orchestrates the deployment process. +- **Connection Handling:** It uses the connection to the target org to perform the push operation. +
`; - public static examples = ["$ sfdx hardis:scratch:push"]; + public static examples = ['$ sf hardis:scratch:push']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - // protected static requiresDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; /* jscpd:ignore-end */ public async run(): Promise { - const debugMode = this.flags.debug || false; - await forceSourcePush(this.org.getUsername(), this, debugMode, { conn: this.org.getConnection() }); + const { flags } = await this.parse(SourcePush); + const debugMode = flags.debug || false; + await forceSourcePush(flags['target-org'].getUsername() || '', this, debugMode, { + conn: flags['target-org'].getConnection(), + }); // Return an object to be displayed with --json - return { outputString: "Pushed local git branch in scratch org" }; + return { outputString: 'Pushed local git branch in scratch org' }; } } diff --git a/src/commands/hardis/source/deploy.ts b/src/commands/hardis/source/deploy.ts index 69f05f442..5fb5a3fe2 100644 --- a/src/commands/hardis/source/deploy.ts +++ b/src/commands/hardis/source/deploy.ts @@ -1,12 +1,19 @@ -import { flags, FlagsConfig, SfdxCommand } from "@salesforce/command"; -import { Duration } from "@salesforce/kit"; -import { AnyJson } from "@salesforce/ts-types"; -import { GitProvider } from "../../../common/gitProvider"; -import { checkDeploymentOrgCoverage, executePrePostCommands, extractOrgCoverageFromLog } from "../../../common/utils/deployUtils"; -import { wrapSfdxCoreCommand } from "../../../common/utils/wrapUtils"; +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { GitProvider } from '../../../common/gitProvider/index.js'; +import { + checkDeploymentOrgCoverage, + executePrePostCommands, + extractOrgCoverageFromLog, +} from '../../../common/utils/deployUtils.js'; +import { wrapSfdxCoreCommand } from '../../../common/utils/wrapUtils.js'; +import { uxLog } from '../../../common/utils/index.js'; +import { CONSTANTS } from '../../../config/index.js'; // Wrapper for sfdx force:source:deploy -export class Deploy extends SfdxCommand { +export class Deploy extends SfCommand { public static readonly description = `sfdx-hardis wrapper for sfdx force:source:deploy that displays tips to solve deployment errors. Additional to the base command wrapper: If using **--checkonly**, add options **--checkcoverage** and **--coverageformatters json-summary** to check that org coverage is > 75% (or value defined in .sfdx-hardis.yml property **apexTestsMinCoverageOrgWide**) @@ -15,16 +22,22 @@ Additional to the base command wrapper: If using **--checkonly**, add options ** You can also have deployment results as pull request comments, on: -- GitHub (see [GitHub Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-github/)) -- Gitlab (see [Gitlab integration configuration](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-gitlab/)) -- Azure DevOps (see [Azure integration configuration](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure/)) +- GitHub (see [GitHub Pull Requests comments config](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integration-github/)) +- Gitlab (see [Gitlab integration configuration](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integration-gitlab/)) +- Azure DevOps (see [Azure integration configuration](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integration-azure/)) [![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) ### Deployment pre or post commands -You can define command lines to run before or after a deployment +You can define command lines to run before or after a deployment, with parameters: + +- **id**: Unique Id for the command +- **label**: Human readable label for the command +- **skipIfError**: If defined to "true", the post-command won't be run if there is a deployment failure +- **context**: Defines the context where the command will be run. Can be **all** (default), **check-deployment-only** or **process-deployment-only** +- **runOnlyOnceByOrg**: If set to true, the command will be run only one time per org. A record of SfdxHardisTrace__c is stored to make that possible (it needs to be existing in target org) If the commands are not the same depending on the target org, you can define them into **config/branches/.sfdx-hardis-BRANCHNAME.yml** instead of root **config/.sfdx-hardis.yml** @@ -38,6 +51,7 @@ commandsPreDeploy: - id: knowledgeAssign label: Assign Knowledge user to the deployment user command: sf data update record --sobject User --where "Username='deploy.github@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + commandsPostDeploy: - id: knowledgeUnassign label: Remove KnowledgeUser right to the user who has it @@ -45,6 +59,12 @@ commandsPostDeploy: - id: knowledgeAssign label: Assign Knowledge user to desired username command: sf data update record --sobject User --where "Username='admin-yser@myclient.com'" --values "UserPermissionsKnowledgeUser='true'" --json + - id: someActionToRunJustOneTime + label: And to run only if deployment is success + command: sf sfdmu:run ... + skipIfError: true + context: process-deployment-only + runOnlyOnceByOrg: true \`\`\` Notes: @@ -54,114 +74,137 @@ Notes: [See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_deploy) `; public static readonly examples = [ - "$ sfdx hardis:source:deploy -x manifest/package.xml --wait 60 --ignorewarnings --testlevel RunLocalTests --postdestructivechanges ./manifest/destructiveChanges.xml --targetusername nicolas.vuillamy@cloudity.com.sfdxhardis --checkonly --checkcoverage --verbose --coverageformatters json-summary", + '$ sf hardis:source:deploy -x manifest/package.xml --wait 60 --ignorewarnings --testlevel RunLocalTests --postdestructivechanges ./manifest/destructiveChanges.xml --target-org nicolas.vuillamy@cloudity.com.sfdxhardis --checkonly --checkcoverage --verbose --coverageformatters json-summary', ]; public static readonly requiresProject = true; - public static readonly requiresUsername = true; - public static readonly flagsConfig: FlagsConfig = { - checkonly: flags.boolean({ - char: "c", - description: "checkonly", + public static readonly flags: any = { + checkonly: Flags.boolean({ + char: 'c', + description: 'checkonly', }), - soapdeploy: flags.boolean({ + soapdeploy: Flags.boolean({ default: false, - description: "soapDeploy", - }), - wait: flags.minutes({ - char: "w", - default: Duration.minutes(60), - min: Duration.minutes(0), // wait=0 means deploy is asynchronous - description: "wait", - }), - testlevel: flags.enum({ - char: "l", - description: "testlevel", - options: ["NoTestRun", "RunSpecifiedTests", "RunLocalTests", "RunAllTestsInOrg"], - default: "NoTestRun", - }), - runtests: flags.array({ - char: "r", - description: "runTests", + description: 'soapDeploy', + }), + wait: Flags.integer({ + char: 'w', + default: 60, + min: 0, // wait=0 means deploy is asynchronous + description: 'wait', + }), + testlevel: Flags.string({ + char: 'l', + description: 'testlevel', + options: ['NoTestRun', 'RunSpecifiedTests', 'RunLocalTests', 'RunAllTestsInOrg'], + default: 'NoTestRun', + }), + runtests: Flags.string({ + char: 'r', + description: 'runTests', default: [], - }), - ignoreerrors: flags.boolean({ - char: "o", - description: "ignoreErrors", - }), - ignorewarnings: flags.boolean({ - char: "g", - description: "ignoreWarnings", - }), - validateddeployrequestid: flags.id({ - char: "q", - description: "validateDeployRequestId", - exclusive: ["manifest", "metadata", "sourcepath", "checkonly", "testlevel", "runtests", "ignoreerrors", "ignorewarnings"], - }), - verbose: flags.builtin({ - description: "verbose", - }), - metadata: flags.array({ - char: "m", - description: "metadata", - exclusive: ["manifest", "sourcepath"], - }), - sourcepath: flags.array({ - char: "p", - description: "sourcePath", - exclusive: ["manifest", "metadata"], - }), - manifest: flags.filepath({ - char: "x", - description: "flagsLong.manifest", - exclusive: ["metadata", "sourcepath"], - }), - predestructivechanges: flags.filepath({ - description: "predestructivechanges", - dependsOn: ["manifest"], - }), - postdestructivechanges: flags.filepath({ - description: "postdestructivechanges", - dependsOn: ["manifest"], - }), - tracksource: flags.boolean({ - char: "t", - description: "tracksource", - exclusive: ["checkonly", "validateddeployrequestid"], - }), - forceoverwrite: flags.boolean({ - char: "f", - description: "forceoverwrite", - dependsOn: ["tracksource"], - }), - resultsdir: flags.directory({ - description: "resultsdir", - }), - coverageformatters: flags.array({ - description: "coverageformatters", - }), - junit: flags.boolean({ description: "junit" }), - checkcoverage: flags.boolean({ description: "Check Apex org coverage" }), - debug: flags.boolean({ + multiple: true, + }), + ignoreerrors: Flags.boolean({ + description: 'ignoreErrors', + }), + ignorewarnings: Flags.boolean({ + char: 'g', + description: 'ignoreWarnings', + }), + validateddeployrequestid: Flags.string({ + char: 'q', + description: 'validateDeployRequestId', + exclusive: [ + 'manifest', + 'metadata', + 'sourcepath', + 'checkonly', + 'testlevel', + 'runtests', + 'ignoreerrors', + 'ignorewarnings', + ], + }), + verbose: Flags.boolean({ + description: 'verbose', + }), + metadata: Flags.string({ + char: 'm', + description: 'metadata', + exclusive: ['manifest', 'sourcepath'], + multiple: true, + }), + sourcepath: Flags.string({ + char: 'p', + description: 'sourcePath', + exclusive: ['manifest', 'metadata'], + multiple: true, + }), + manifest: Flags.file({ + char: 'x', + description: 'flagsLong.manifest', + exclusive: ['metadata', 'sourcepath'], + }), + predestructivechanges: Flags.file({ + description: 'predestructivechanges', + dependsOn: ['manifest'], + }), + postdestructivechanges: Flags.file({ + description: 'postdestructivechanges', + dependsOn: ['manifest'], + }), + tracksource: Flags.boolean({ + char: 't', + description: 'tracksource', + exclusive: ['checkonly', 'validateddeployrequestid'], + }), + forceoverwrite: Flags.boolean({ + char: 'f', + description: 'forceoverwrite', + dependsOn: ['tracksource'], + }), + resultsdir: Flags.directory({ + description: 'resultsdir', + }), + coverageformatters: Flags.string({ + description: 'coverageformatters', + multiple: true, + }), + junit: Flags.boolean({ description: 'junit' }), + checkcoverage: Flags.boolean({ description: 'Check Apex org coverage' }), + debug: Flags.boolean({ default: false, - description: "debug", + description: 'debug', }), - websocket: flags.string({ - description: "websocket", + websocket: Flags.string({ + description: 'websocket', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - protected xorFlags = ["manifest", "metadata", "sourcepath", "validateddeployrequestid"]; + protected xorFlags = ['manifest', 'metadata', 'sourcepath', 'validateddeployrequestid']; public async run(): Promise { + const { flags } = await this.parse(Deploy); + uxLog("error", this, c.red('This command will be removed by Salesforce in November 2024.')); + uxLog("error", this, c.red('Please migrate to command sf hardis project deploy start')); + uxLog( + "error", + this, + c.red( + 'See https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm' + ) + ); // Run pre deployment commands if defined - await executePrePostCommands("commandsPreDeploy", true); - const result = await wrapSfdxCoreCommand("sfdx force:source:deploy", this.argv, this, this.flags.debug); + const conn = flags["target-org"].getConnection(); + await executePrePostCommands('commandsPreDeploy', { success: true, checkOnly: flags.checkonly, conn: conn }); + const result = await wrapSfdxCoreCommand('sfdx force:source:deploy', this.argv, this, flags.debug); // Check org coverage if requested - if (this.flags.checkcoverage && result.stdout) { - const orgCoveragePercent = await extractOrgCoverageFromLog(result.stdout + result.stderr || ""); - const checkOnly = this.flags.checkonly || false; + const checkOnly = flags.checkonly || false; + if (flags.checkcoverage && result.stdout) { + const orgCoveragePercent = await extractOrgCoverageFromLog(result.stdout + result.stderr || ''); if (orgCoveragePercent) { try { - await checkDeploymentOrgCoverage(orgCoveragePercent, { check: checkOnly }); + await checkDeploymentOrgCoverage(Number(orgCoveragePercent), { check: checkOnly }); } catch (errCoverage) { await GitProvider.managePostPullRequestComment(); throw errCoverage; @@ -169,8 +212,9 @@ Notes: } } // Run post deployment commands if defined - await executePrePostCommands("commandsPostDeploy", process.exitCode === 0); + await executePrePostCommands('commandsPostDeploy', { success: process.exitCode === 0, checkOnly: checkOnly, conn: conn }); await GitProvider.managePostPullRequestComment(); return result; } } +/* jscpd:ignore-end */ \ No newline at end of file diff --git a/src/commands/hardis/source/push.ts b/src/commands/hardis/source/push.ts index b215a9be5..89d27fcdf 100644 --- a/src/commands/hardis/source/push.ts +++ b/src/commands/hardis/source/push.ts @@ -1,45 +1,59 @@ -import { FlagsConfig, flags, SfdxCommand } from "@salesforce/command"; -import { Duration } from "@salesforce/kit"; -import { AnyJson } from "@salesforce/ts-types"; -import { wrapSfdxCoreCommand } from "../../../common/utils/wrapUtils"; +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import c from 'chalk'; +import { AnyJson } from '@salesforce/ts-types'; +import { wrapSfdxCoreCommand } from '../../../common/utils/wrapUtils.js'; +import { uxLog } from '../../../common/utils/index.js'; -export default class Push extends SfdxCommand { +export default class Push extends SfCommand { public static readonly description = `sfdx-hardis wrapper for sfdx force:source:push that displays tips to solve deployment errors. [![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](https://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) [See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_push) `; - protected static readonly flagsConfig: FlagsConfig = { - forceoverwrite: flags.boolean({ - char: "f", - description: "forceoverwrite", + public static readonly flags: any = { + forceoverwrite: Flags.boolean({ + char: 'f', + description: 'forceoverwrite', }), - wait: flags.minutes({ - char: "w", - default: Duration.minutes(60), - min: Duration.minutes(1), - description: "wait", + wait: Flags.integer({ + char: 'w', + default: 60, + min: 1, + description: 'wait', }), - ignorewarnings: flags.boolean({ - char: "g", - description: "ignorewarnings", + ignorewarnings: Flags.boolean({ + char: 'g', + description: 'ignorewarnings', }), - quiet: flags.builtin({ - description: "quiet", + quiet: Flags.boolean({ + description: 'quiet', }), - debug: flags.boolean({ + debug: Flags.boolean({ default: false, - description: "debug", + description: 'debug', }), - websocket: flags.string({ - description: "websocket", + websocket: Flags.string({ + description: 'websocket', }), + 'target-org': requiredOrgFlagWithDeprecations, }; - protected static requiresUsername = true; - protected static requiresProject = true; + + public static requiresProject = true; public async run(): Promise { - return await wrapSfdxCoreCommand("sfdx force:source:push", this.argv, this, this.flags.debug); + const { flags } = await this.parse(Push); + uxLog("error", this, c.red('This command will be removed by Salesforce in November 2024.')); + uxLog("error", this, c.red('Please migrate to command sf hardis project deploy start')); + uxLog( + "error", + this, + c.red( + 'See https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm' + ) + ); + return await wrapSfdxCoreCommand('sfdx force:source:push', this.argv, this, flags.debug); } } +/* jscpd:ignore-end */ \ No newline at end of file diff --git a/src/commands/hardis/source/retrieve.ts b/src/commands/hardis/source/retrieve.ts index 7070ad61e..9d0a3c9c8 100644 --- a/src/commands/hardis/source/retrieve.ts +++ b/src/commands/hardis/source/retrieve.ts @@ -1,98 +1,131 @@ -import { flags, FlagsConfig, SfdxCommand } from "@salesforce/command"; -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import { MetadataUtils } from "../../../common/metadata-utils"; -import { isCI } from "../../../common/utils"; -import { promptOrgUsernameDefault } from "../../../common/utils/orgUtils"; -import { wrapSfdxCoreCommand } from "../../../common/utils/wrapUtils"; +/* jscpd:ignore-start */ +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { SfError } from '@salesforce/core'; +import c from 'chalk'; +import { MetadataUtils } from '../../../common/metadata-utils/index.js'; +import { isCI, uxLog } from '../../../common/utils/index.js'; +import { promptOrgUsernameDefault } from '../../../common/utils/orgUtils.js'; +import { wrapSfdxCoreCommand } from '../../../common/utils/wrapUtils.js'; -export class SourceRetrieve extends SfdxCommand { - public static readonly description = `sfdx-hardis wrapper for sfdx force:source:retrieve +export class SourceRetrieve extends SfCommand { + public static readonly description = ` +## Command Behavior -- If no retrieve constraint is sent, as assisted menu will request the list of metadatas to retrieve -- If no org is selected , an assisted menu will request the user to choose one +**A wrapper command for Salesforce CLI's \`sf project retrieve start\` (formerly \`sfdx force:source:retrieve\`), with enhanced interactive features.** -[See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_retrieve) +This command facilitates the retrieval of metadata from a Salesforce org into your local project. It provides an assisted experience, especially when no specific retrieval constraints are provided. + +Key features: + +- **Assisted Metadata Selection:** If no \`sourcepath\`, \`manifest\`, \`metadata\`, or \`packagenames\` flags are specified, an interactive menu will prompt you to select the metadata types you wish to retrieve. +- **Assisted Org Selection:** If no target org is specified, an interactive menu will guide you to choose an org for the retrieval operation. +- **Backward Compatibility:** While this command wraps the newer \`sf project retrieve start\`, it maintains compatibility with the older \`sfdx force:source:retrieve\` flags. + +**Important Note:** The underlying Salesforce CLI command \`sfdx force:source:retrieve\` is being deprecated by Salesforce in November 2024. It is recommended to migrate to \`sf project retrieve start\` for future compatibility. See [Salesforce CLI Migration Guide](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm) for more information. + +
+Technical explanations + +This command acts as an intelligent wrapper around the Salesforce CLI's source retrieval functionality: + +- **Command Wrapping:** It uses the \`wrapSfdxCoreCommand\` utility to execute the \`sfdx force:source:retrieve\` (or its equivalent \`sf project retrieve start\`) command, passing through all relevant flags and arguments. +- **Interactive Prompts:** It leverages \`MetadataUtils.promptMetadataTypes()\` and \`promptOrgUsernameDefault()\` to provide interactive menus for metadata and org selection when the user does not provide them as flags. +- **Argument Transformation:** It dynamically constructs the command-line arguments for the underlying Salesforce CLI command based on user selections and provided flags. +- **Error Handling:** It includes basic error handling, such as prompting the user to re-select an org if an issue occurs during org selection. +- **Deprecation Warning:** It explicitly logs warnings about the deprecation of \`sfdx force:source:retrieve\` to inform users about upcoming changes. +
`; public static readonly examples = []; public static readonly requiresProject = true; - public static readonly requiresUsername = true; - public static readonly flagsConfig: FlagsConfig = { - apiversion: flags.builtin({ + public static readonly flags: any = { + apiversion: Flags.orgApiVersion({ /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ // @ts-ignore force char override for backward compat - char: "a", + char: 'a', }), - sourcepath: flags.array({ - char: "p", - description: "sourcePath", - longDescription: "sourcePath", - exclusive: ["manifest", "metadata"], + sourcepath: Flags.string({ + char: 'p', + description: 'sourcePath', + longDescription: 'sourcePath', + exclusive: ['manifest', 'metadata'], + multiple: true, }), - wait: flags.minutes({ - char: "w", - description: "wait", - longDescription: "wait", + wait: Flags.integer({ + char: 'w', + description: 'wait', }), - manifest: flags.filepath({ - char: "x", - description: "manifest", - longDescription: "manifest", - exclusive: ["metadata", "sourcepath"], + manifest: Flags.directory({ + char: 'x', + description: 'manifest', + exclusive: ['metadata', 'sourcepath'], }), - metadata: flags.array({ - char: "m", - description: "metadata", - longDescription: "metadata", - exclusive: ["manifest", "sourcepath"], + metadata: Flags.string({ + char: 'm', + description: 'metadata', + longDescription: 'metadata', + exclusive: ['manifest', 'sourcepath'], + multiple: true, }), - packagenames: flags.array({ - char: "n", - description: "packagenames", + packagenames: Flags.string({ + char: 'n', + description: 'packagenames', + multiple: true, }), - tracksource: flags.boolean({ - char: "t", - description: "tracksource", + tracksource: Flags.boolean({ + char: 't', + description: 'tracksource', }), - forceoverwrite: flags.boolean({ - char: "f", - description: "forceoverwrite", - dependsOn: ["tracksource"], + forceoverwrite: Flags.boolean({ + char: 'f', + description: 'forceoverwrite', + dependsOn: ['tracksource'], }), - verbose: flags.builtin({ - description: "verbose", + verbose: Flags.boolean({ + description: 'verbose', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: "debugMode", + description: 'debugMode', }), - websocket: flags.string({ - description: "websocket", + websocket: Flags.string({ + description: 'websocket', }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-org': requiredOrgFlagWithDeprecations, }; public async run(): Promise { + const { flags } = await this.parse(SourceRetrieve); + uxLog("error", this, c.red('This command will be removed by Salesforce in November 2024.')); + uxLog("error", this, c.red('Please migrate to command sf hardis project retrieve start')); + uxLog( + "error", + this, + c.red( + 'See https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_mig_deploy_retrieve.htm' + ) + ); const args = this.argv; // Manage user selection for metadatas - if (!isCI && !this.flags.sourcepath && !this.flags.manifest && !this.flags.metadata && !this.flags.packagenames) { + if (!isCI && !flags.sourcepath && !flags.manifest && !flags.metadata && !flags.packagenames) { const metadatas = await MetadataUtils.promptMetadataTypes(); - const metadataArg = metadatas.map((metadataType: any) => metadataType.xmlName).join(","); - args.push(...["-m", `"${metadataArg}"`]); + const metadataArg = metadatas.map((metadataType: any) => metadataType.xmlName).join(','); + args.push(...['-m', `"${metadataArg}"`]); } // Manage user selection for org - if (!isCI && !this.flags.targetusername) { - let orgUsername = this.org.getUsername(); + if (!isCI && !flags['target-org']) { + let orgUsername = (flags['target-org'] as any).getUsername(); orgUsername = await promptOrgUsernameDefault(this, orgUsername, { devHub: false, setDefault: false }); if (orgUsername) { - args.push(...["--targetusername", `"${orgUsername}"`]); + args.push(...['--target-org', `"${orgUsername}"`]); } else { - throw new SfdxError(c.yellow("For technical reasons, run again this command and select your org in the list :)")); + throw new SfError(c.yellow('For technical reasons, run again this command and select your org in the list :)')); } } - return await wrapSfdxCoreCommand("sfdx force:source:retrieve", args, this, this.flags.debug); + return await wrapSfdxCoreCommand('sfdx force:source:retrieve', args, this, flags.debug); } } +/* jscpd:ignore-end */ \ No newline at end of file diff --git a/src/commands/hardis/work/new.ts b/src/commands/hardis/work/new.ts index 32ff4a6f2..af4d9286e 100644 --- a/src/commands/hardis/work/new.ts +++ b/src/commands/hardis/work/new.ts @@ -1,86 +1,100 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as path from "path"; -import { MetadataUtils } from "../../../common/metadata-utils"; -import { checkGitClean, ensureGitBranch, execCommand, execSfdxJson, git, gitCheckOutRemote, uxLog } from "../../../common/utils"; -import { selectTargetBranch } from "../../../common/utils/gitUtils"; +import { SfCommand, Flags, optionalHubFlagWithDeprecations, optionalOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import * as path from 'path'; +import { MetadataUtils } from '../../../common/metadata-utils/index.js'; +import { + checkGitClean, + ensureGitBranch, + execCommand, + execSfdxJson, + getGitRepoUrl, + git, + gitCheckOutRemote, + uxLog, +} from '../../../common/utils/index.js'; +import { selectTargetBranch } from '../../../common/utils/gitUtils.js'; import { initApexScripts, initOrgData, initOrgMetadatas, initPermissionSetAssignments, installPackages, + makeSureOrgIsConnected, promptOrg, -} from "../../../common/utils/orgUtils"; -import { prompts } from "../../../common/utils/prompts"; -import { WebSocketClient } from "../../../common/websocketClient"; -import { getConfig, setConfig } from "../../../config"; -import SandboxCreate from "../org/create"; -import ScratchCreate from "../scratch/create"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class NewTask extends SfdxCommand { - public static title = "New work task"; - - public static description = `Assisted menu to start working on a Salesforce task. - -Advanced instructions in [Create New Task documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-create-new-task/) - -At the end of the command, it will allow you to work on either a scratch org or a sandbox, depending on your choices. - -Under the hood, it can: - -- Make **git pull** to be up to date with target branch -- Create **new git branch** with formatted name (you can override the choices using .sfdx-hardis.yml property **branchPrefixChoices**) -- Create and initialize a scratch org or a source-tracked sandbox (config can be defined using \`config/.sfdx-hardis.yml\`): -- (and for scratch org only for now): - - **Install packages** - - Use property \`installedPackages\` - - **Push sources** - - **Assign permission sets** - - Use property \`initPermissionSets\` - - **Run apex initialization scripts** - - Use property \`scratchOrgInitApexScripts\` - - **Load data** - - Use property \`dataPackages\` +} from '../../../common/utils/orgUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { CONSTANTS, getConfig, setConfig } from '../../../config/index.js'; +import SandboxCreate from '../org/create.js'; +import ScratchCreate from '../scratch/create.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class NewTask extends SfCommand { + public static title = 'New User Story'; + + public static description = ` +## Command Behavior + +**Assisted menu to start working on a Salesforce User Story, streamlining the setup of your development environment.** + +This command guides you through the process of preparing your local environment and a Salesforce org for a new development or configuration based User Story. It automates several steps, ensuring consistency and adherence to project standards. + +Key features include: + +- **Git Branch Management:** Ensures your local Git repository is up-to-date with the target branch and creates a new Git branch with a formatted name based on your User Story details. Branch naming conventions can be customized via the \`branchPrefixChoices\` property in \`.sfdx-hardis.yml\`. + +- **Org Provisioning & Initialization:** Facilitates the creation and initialization of either a scratch org or a source-tracked sandbox. The configuration for org initialization (e.g., package installation, source push, permission set assignments, Apex script execution, data loading) can be defined in \`config/.sfdx-hardis.yml\ + +- **Project-Specific Configuration:** Supports defining multiple target branches (\`availableTargetBranches\`) and projects (\`availableProjects\`) in \`.sfdx-hardis.yml\`, allowing for tailored User Stories workflows. + +- **User Story Name Validation:** Enforces User Story name formatting using \`newTaskNameRegex\` and provides examples via \`newTaskNameRegexExample\ + +- **Shared Development Sandboxes:** Accounts for scenarios with shared development sandboxes, adjusting prompts to prevent accidental overwrites. + +Advanced instructions are available in the [Create New User Story documentation](${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-create-new-task/). + +
+Technical explanations + +The command's logic orchestrates various underlying processes: + +- **Git Operations:** Utilizes \`checkGitClean\`, \`ensureGitBranch\`, \`gitCheckOutRemote\`, and \`git().pull()\` to manage Git repository state and branches. +- **Interactive Prompts:** Leverages the \`prompts\` library to gather user input for User Story type, source types, and User Story names. +- **Configuration Management:** Reads and applies project-specific configurations from \`.sfdx-hardis.yml\` using \`getConfig\` and \`setConfig\ +- **Org Initialization Utilities:** Calls a suite of utility functions for org setup, including \`initApexScripts\`, \`initOrgData\`, \`initOrgMetadatas\`, \`initPermissionSetAssignments\`, \`installPackages\`, and \`makeSureOrgIsConnected\ +- **Salesforce CLI Interaction:** Executes Salesforce CLI commands (e.g., \`sf config set target-org\`, \`sf org open\`, \`sf project delete tracking\`) via \`execCommand\` and \`execSfdxJson\ +- **Dynamic Org Selection:** Presents choices for scratch orgs or sandboxes based on project configuration and existing orgs, dynamically calling \`ScratchCreate.run\` or \`SandboxCreate.run\` as needed. +- **WebSocket Communication:** Sends refresh status messages via \`WebSocketClient.sendRefreshStatusMessage()\` to update connected VS Code clients. +
`; - public static examples = ["$ sfdx hardis:work:task:new"]; + public static examples = ['$ sf hardis:work:new']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), + 'target-dev-hub': optionalHubFlagWithDeprecations, + 'target-org': optionalOrgFlagWithDeprecations }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - protected static supportsDevhubUsername = true; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + public static requiresProject = true; protected targetBranch: string; protected debugMode = false; @@ -88,164 +102,224 @@ Under the hood, it can: /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(NewTask); + this.debugMode = flags.debug || false; - uxLog(this, c.cyan("This tool will assist you to create a new task (dev or config) with Hardis CI/CD")); - uxLog(this, c.cyan("When you don't know what to answer, you can let the default value and push ENTER")); + uxLog("action", this, c.cyan('This tool will assist you to create a new User Story (dev or config) with SFDX Hardis CI/CD')); + uxLog("log", this, c.grey("When you don't know what to answer, you can let the default value and push ENTER")); // Make sure the git status is clean, to not delete uncommitted updates await checkGitClean({ allowStash: true }); - const config = await getConfig("project"); + const config = await getConfig('project'); this.targetBranch = await selectTargetBranch(); const defaultBranchPrefixChoices = [ { - title: "🏗️ Feature", - value: "features", + title: '🏗️ Feature', + value: 'features', description: "New feature, evolution of an existing feature... If you don't know, just select Feature", }, - { title: "🛠️ Debug", value: "fixes", description: "A bug has been identified and you are the right person to solve it !" }, + { + title: '🛠️ Debug', + value: 'fixes', + description: 'A bug has been identified and you are the right person to solve it !', + }, ]; const branchPrefixChoices = config.branchPrefixChoices || defaultBranchPrefixChoices; // Select project if multiple projects are defined in availableProjects .sfdx-hardis.yml property - let projectBranchPart = ""; + let projectBranchPart = ''; const availableProjects = config.availableProjects || []; if (availableProjects.length > 1) { const projectResponse = await prompts({ - type: "select", - name: "project", - message: c.cyanBright("Please select the project your task is for"), + type: 'select', + name: 'project', + message: c.cyanBright('Please select the project your User Story is for'), + description: 'Choose which project this new work item belongs to', + placeholder: 'Select a project', choices: availableProjects.map((project: string) => { - return { title: project, value: project }; + return { + title: project.includes(',') ? project.split(',').join(' - ') : project, + value: project.includes(',') ? project.split(',')[0] : project, + }; }), }); - projectBranchPart = projectResponse.project + "/"; + projectBranchPart = projectResponse.project + '/'; } // Request info to build branch name. ex features/config/MYTASK const response = await prompts([ { - type: "select", - name: "branch", - message: c.cyanBright("What is the type of the task you want to do ?"), + type: 'select', + name: 'branch', + message: c.cyanBright('What is the type of the User Story you want to do ?'), + description: 'Select the category of work that best describes your User Story', + placeholder: 'Select User Story type', initial: 0, choices: branchPrefixChoices, }, { - type: "select", - name: "sources", - message: c.cyanBright("What type(s) of Salesforce updates will you have to perform for this task ?"), + type: 'select', + name: 'sources', + message: c.cyanBright('What type(s) of Salesforce updates will you implement to perform this User Story ?'), + description: 'Choose the type of changes you will make to help set up the appropriate development environment', + placeholder: 'Select update type', initial: 0, choices: [ - { title: "🥸 Configuration", value: "config", description: "You will update anything in the setup except apex code :)" }, - { title: "🤓 Development", value: "dev", description: "You are a developer who will do magic with Apex or Javascript !" }, { - title: "🥸🤓 Configuration + Development", - value: "dev", - description: "Like the unicorn you are, you will update configuration but also write code :)", + title: '🥸 Configuration', + value: 'config', + description: 'You will update anything in the setup except apex code :)', + }, + { + title: '🤓 Development', + value: 'dev', + description: 'You are a developer who will do magic with Apex or Javascript !', + }, + { + title: '🥸🤓 Configuration + Development', + value: 'dev', + description: 'Like the unicorn you are, you will update configuration but also write code :)', }, ], - }, - { - type: "text", - name: "taskName", - message: c.cyanBright( - "What is the name of your new task ? (examples: JIRA123-webservice-get-account, T1000-flow-process-opportunity...). Please avoid accents or special characters", - ), - }, + } ]); + // Request task name + const taskName = await this.promptTaskName(config.newTaskNameRegex || null, config.newTaskNameRegexExample || null); + // Checkout development main branch - const branchName = `${projectBranchPart}${response.branch || "features"}/${response.sources || "dev"}/${response.taskName.replace( - /[^a-zA-Z0-9 -]|\s/g, - "-", - )}`; - uxLog(this, c.cyan(`Checking out the most recent version of branch ${c.bold(this.targetBranch)} from git server...`)); + const branchName = `${projectBranchPart}${response.branch || 'features'}/${response.sources || 'dev'}/${taskName}`; + const repoUrl = await getGitRepoUrl() + uxLog( + "action", + this, + c.cyan(`Checking out the most recent version of git branch ${c.bold(this.targetBranch)} from ${repoUrl} ...`) + ); await gitCheckOutRemote(this.targetBranch); // Pull latest version of target branch await git().pull(); // Create new branch - uxLog(this, c.cyan(`Creating new git branch ${c.green(branchName)}...`)); + uxLog("action", this, c.cyan(`Creating new local git branch ${c.green(branchName)}...`)); await ensureGitBranch(branchName); // Update config if necessary if (config.developmentBranch !== this.targetBranch && (config.availableTargetBranches || null) == null) { const updateDefaultBranchRes = await prompts({ - type: "confirm", - name: "value", - message: c.cyanBright(`Do you want to update your default target git branch to ${c.green(this.targetBranch)} ?`), + type: 'confirm', + name: 'value', + message: c.cyanBright( + `Do you want to update your default target git branch to ${c.green(this.targetBranch)} ?` + ), + description: 'Set this branch as your default target for future work items', default: false, }); if (updateDefaultBranchRes.value === true) { - await setConfig("user", { developmentBranch: this.targetBranch }); + await setConfig('user', { developmentBranch: this.targetBranch }); } } // Update local user config files to store the target of the just created branch - const currentUserConfig = await getConfig("user"); + const currentUserConfig = await getConfig('user'); const localStorageBranchTargets = currentUserConfig.localStorageBranchTargets || {}; localStorageBranchTargets[branchName] = this.targetBranch; - await setConfig("user", { localStorageBranchTargets: localStorageBranchTargets }); + await setConfig('user', { localStorageBranchTargets: localStorageBranchTargets }); // Get allowed work org types from config if possible const allowedOrgTypes = config?.allowedOrgTypes || []; let selectedOrgType = allowedOrgTypes.length == 1 ? allowedOrgTypes[0] : null; // If necessary, Prompt if you want to use a scratch org or a tracked sandbox org, or no org - const orgTypeChoices = []; - if (allowedOrgTypes.includes("sandbox") || allowedOrgTypes.length === 0) { + const orgTypeChoices: any[] = []; + if (allowedOrgTypes.includes('sandbox') || allowedOrgTypes.length === 0) { + orgTypeChoices.push({ + title: '🌎 Sandbox org with source tracking', + value: 'sandbox', + description: + "Release manager told me that I can work on Sandboxes on my project so let's use fresh dedicated one", + }); + } + if (allowedOrgTypes.includes('scratch') || allowedOrgTypes.length === 0) { orgTypeChoices.push({ - title: "🌎 Sandbox org with source tracking", - value: "sandbox", - description: "Release manager told me that I can work on Sandboxes on my project so let's use fresh dedicated one", + title: '🪐 Scratch org', + value: 'scratch', + description: 'Scratch orgs are configured on my project so I want to create or reuse one', }); } - if (allowedOrgTypes.includes("scratch") || allowedOrgTypes.length === 0) { + if (flags['target-org'] && flags['target-org']?.getConnection()) { orgTypeChoices.push({ - title: "🪐 Scratch org", - value: "scratch", - description: "Scratch orgs are configured on my project so I want to create or reuse one", + title: `😎 Current org ${flags['target-org']?.getConnection().instanceUrl.replace("https://", "")}`, + value: 'currentOrg', + description: `Your default org with username ${flags['target-org']?.getUsername()} is already the org you want to use to work :)`, }); } orgTypeChoices.push({ title: "🤠 I'm hardcore, I don't need an org !", - value: "noOrg", - description: "You just want to play with XML and sfdx-hardis configuration, and you know what you are doing !", + value: 'noOrg', + description: 'You just want to play with XML and sfdx-hardis configuration, and you know what you are doing !', }); const orgTypeResponse = await prompts({ - type: "select", - name: "value", - message: c.cyanBright(`Do you want to use a scratch org or a tracked sandbox org ?`), + type: 'select', + name: 'value', + message: c.cyanBright(`In which Salesforce org do you want to work in ?`), + description: 'Choose the type of Salesforce org to use for your development work', + placeholder: 'Select org type', initial: 0, choices: orgTypeChoices, }); selectedOrgType = orgTypeResponse.value; // Select or create org that user will work in - if (selectedOrgType === "scratch") { + if (selectedOrgType === 'scratch') { // scratch org - await this.selectOrCreateScratchOrg(branchName); - } else if (selectedOrgType === "sandbox") { + await this.selectOrCreateScratchOrg(branchName, flags); + } else if (selectedOrgType === 'sandbox' || selectedOrgType === 'currentOrg') { // source tracked sandbox - await this.selectOrCreateSandbox(branchName, config); + await this.selectOrCreateSandbox(branchName, config, flags, selectedOrgType); } else { - uxLog(this, c.yellow(`No org selected... I hope you know what you are doing, don't break anything :)`)); + uxLog("warning", this, c.yellow(`No org selected... I hope you know what you are doing, don't break anything :)`)); } - uxLog(this, c.cyan(`You are now ready to work in branch ${c.green(branchName)} :)`)); + uxLog("action", this, c.cyan(`You are now ready to work in branch ${c.green(branchName)} :)`)); // Return an object to be displayed with --json - return { outputString: "Created new task" }; + return { outputString: 'Created new User Story' }; + } + + async promptTaskName(validationRegex: string | null, taskNameExample: string | null) { + if (taskNameExample == null) { + taskNameExample = 'MYPROJECT-123 Update account status validation rule'; + } + const taskResponse = await prompts({ + type: 'text', + name: 'taskName', + message: c.cyanBright( + `What is the name of your new User Story ? Please avoid accents or special characters` + ), + description: 'Enter a descriptive name for your User Story that will be used in the git branch name', + placeholder: `Ex: ${taskNameExample}`, + }); + const taskName = taskResponse.taskName.replace(/[^a-zA-Z0-9 -]|\s/g, '-'); + if (validationRegex != null && !new RegExp(validationRegex).test(taskName)) { + uxLog( + "warning", + this, + c.yellow( + `The User Story name ${c.bold(taskName)} does not match the expected pattern ${c.bold(validationRegex)}. Please try again` + ) + ); + return this.promptTaskName(validationRegex, taskNameExample); + } + return taskName; } // Select/Create scratch org - async selectOrCreateScratchOrg(branchName) { - const hubOrgUsername = this?.hubOrg?.getUsername(); - const scratchOrgList = await MetadataUtils.listLocalOrgs("scratch", { devHubUsername: hubOrgUsername }); + async selectOrCreateScratchOrg(branchName, flags) { + const hubOrgUsername = flags['target-dev-hub'].getUsername(); + const scratchOrgList = await MetadataUtils.listLocalOrgs('scratch', { devHubUsername: hubOrgUsername }); const currentOrg = await MetadataUtils.getCurrentOrg(); const baseChoices = [ { - title: c.yellow("🆕 Create new scratch org"), - value: "newScratchOrg", + title: c.yellow('🆕 Create new scratch org'), + value: 'newScratchOrg', description: "This will generate a new scratch org, and in a few minutes you'll be ready to work", }, ]; @@ -257,9 +331,11 @@ Under the hood, it can: }); } const scratchResponse = await prompts({ - type: "select", - name: "value", + type: 'select', + name: 'value', message: c.cyanBright(`Please select a scratch org to use for your branch ${c.green(branchName)}`), + description: 'Choose whether to create a new scratch org or reuse an existing one', + placeholder: 'Select scratch org option', initial: 0, choices: [ ...baseChoices, @@ -272,64 +348,204 @@ Under the hood, it can: }), ], }); - if (scratchResponse.value === "newScratchOrg") { - await setConfig("user", { + if (scratchResponse.value === 'newScratchOrg') { + await setConfig('user', { scratchOrgAlias: null, scratchOrgUsername: null, }); // Check if DevHub is connected - await this.config.runHook("auth", { + await this.config.runHook('auth', { Command: this, devHub: true, scratch: false, }); - this.assignHubOrg(); // Create scratch org const config = await getConfig(); - const createResult = await ScratchCreate.run(["--forcenew", "--targetdevhubusername", config.devHubAlias]); + const createResult = await ScratchCreate.run(['--forcenew', '--targetdevhubusername', config.devHubAlias]); if (createResult == null) { - throw new SfdxError("Unable to create scratch org"); + throw new SfError('Unable to create scratch org'); } } else { // Set selected org as default org - await execCommand(`sfdx config:set defaultusername=${scratchResponse.value.username}`, this, { + await execCommand(`sf config set target-org=${scratchResponse.value.username}`, this, { output: true, fail: true, }); uxLog( + "action", this, - c.cyan(`Selected and opening scratch org ${c.green(scratchResponse.value.instanceUrl)} with user ${c.green(scratchResponse.value.username)}`), + c.cyan( + `Selected and opening scratch org ${c.green(scratchResponse.value.instanceUrl)} with user ${c.green( + scratchResponse.value.username + )}` + ) ); // Open selected org - await execSfdxJson("sf org open", this, { + uxLog("action", this, c.cyan('Opening scratch org in browser...')); + await execSfdxJson('sf org open', this, { fail: true, output: false, debug: this.debugMode, }); // Trigger a status refresh on VsCode WebSocket Client - WebSocketClient.sendMessage({ event: "refreshStatus" }); + WebSocketClient.sendRefreshStatusMessage(); } } // Select or create sandbox - async selectOrCreateSandbox(branchName, config) { - const hubOrgUsername = this?.hubOrg?.getUsername(); - const sandboxOrgList = await MetadataUtils.listLocalOrgs("devSandbox", { devHubUsername: hubOrgUsername }); + async selectOrCreateSandbox(branchName, config, flags, selectedOrgType: "sandbox" | "currentOrg") { + let openOrg = false; + let orgUsername = ""; + if (selectedOrgType === "currentOrg") { + openOrg = true; + orgUsername = flags['target-org'].getUsername(); + await makeSureOrgIsConnected(orgUsername); + } + else { + const promptRes = await this.promptSandbox(flags, branchName); + orgUsername = promptRes.orgUsername; + openOrg = promptRes.openOrg; + } + + // Initialize / Update existing sandbox if available + if (!(config.sharedDevSandboxes === true)) { + const initSandboxResponse = await prompts({ + type: 'select', + name: 'value', + message: c.cyanBright( + `Do you want to update the sandbox according to git branch "${this.targetBranch}" current state ?` + ), + description: 'Choose whether to sync your sandbox with the latest changes from the target branch (packages, sources, permission sets, apex scripts, initial data)', + placeholder: 'Select sync option', + choices: [ + { + title: '🧑‍🤝‍🧑 No, continue working on my current sandbox state', + value: 'no', + description: 'Use if you are multiple users in the same SB, or have have uncommitted changes in your sandbox', + }, + { + title: '☢️ Yes, please try to update my sandbox !', + value: 'init', + description: `Integrate new updates from the parent branch "${this.targetBranch}" before working on your new User Story. WARNING: Will overwrite uncommitted changes in your org !`, + }, + ], + }); + let initSandbox = initSandboxResponse.value === 'init'; + // Ask the user if he's really sure of what he's doing ! + if (initSandbox) { + const promptConfirm = await prompts({ + type: 'confirm', + message: c.cyanBright( + `Are you really sure you want to update the dev sandbox with the state of git branch ${this.targetBranch} ? This will overwrite setup updates that you or other users have not committed yet` + ), + description: 'Confirm that you want to reset your sandbox to match the target branch state', + }); + initSandbox = promptConfirm.value === true; + } + if (initSandbox) { + let initSourcesErr: any = null; + let initSandboxErr: any = null; + try { + if (config.installedPackages) { + await installPackages(config.installedPackages || [], orgUsername); + } + try { + // Continue initialization even if push did not work... it could work and be not such a problem :) + uxLog("action", this, c.cyan('Resetting local SF Cli tracking...')); + await execCommand(`sf project delete tracking --no-prompt -o ${orgUsername}`, this, { + fail: false, + output: true, + }); + await initOrgMetadatas(config, orgUsername, orgUsername, {}, this.debugMode, { scratch: false }); + } catch (e1) { + initSourcesErr = e1; + } + await initPermissionSetAssignments(config.initPermissionSets || [], orgUsername); + await initApexScripts(config.scratchOrgInitApexScripts || [], orgUsername); + await initOrgData(path.join('.', 'scripts', 'data', 'ScratchInit'), orgUsername); + } catch (e) { + initSandboxErr = e; + } + if (initSandboxErr) { + uxLog( + "log", + this, + c.grey('Error(s) while initializing sandbox: ' + initSandboxErr.message + '\n' + initSandboxErr.stack) + ); + uxLog( + "warning", + this, + c.yellow( + 'Your sandbox may not be completely initialized from git. You can send the error above to your release manager' + ) + ); + } + if (initSourcesErr) { + uxLog( + "log", + this, + c.grey('Error(s) while pushing sources to sandbox: ' + initSourcesErr.message + '\n' + initSourcesErr.stack) + ); + uxLog( + "warning", + this, + c.yellow(`If you really want your sandbox to be up to date with branch ${c.bold(this.targetBranch)}, you may: + - ${c.bold( + 'Fix the errors' + )} (probably by manually updating the target sandbox in setup), then run "New User Story" again and select again the same sandbox + - ${c.bold('Refresh your sandbox')} (ask your release manager if you don't know how) + Else, you can start working now (but beware of conflicts ^^):) + `) + ); + } + } + } + // Open of if not already open + if (openOrg === true) { + const openOrgRes = await prompts({ + type: 'confirm', + name: 'value', + message: c.cyanBright(`Do you want to open org ${c.green(orgUsername)} in your browser ?`), + description: 'Open the sandbox org in your web browser to start working on it', + initial: true + }); + if (openOrgRes.value === true) { + uxLog("action", this, c.cyan(`Opening org ${c.green(orgUsername)} so you can work in it...`)); + await execSfdxJson('sf org open', this, { + fail: true, + output: false, + debug: this.debugMode, + }); + } + } + + // Trigger a status refresh on VsCode WebSocket Client + WebSocketClient.sendRefreshStatusMessage(); + } + + private async promptSandbox(flags: any, branchName: any) { + const hubOrgUsername = flags['target-dev-hub']?.getUsername(); + const sandboxOrgList = await MetadataUtils.listLocalOrgs('devSandbox', { devHubUsername: hubOrgUsername }); + const defaultSandbox = sandboxOrgList.find((org: any) => { + return org.username === flags['target-org']?.getUsername(); + }); const sandboxResponse = await prompts({ - type: "select", - name: "value", + type: 'select', + name: 'value', message: c.cyanBright( `Please select a sandbox org to use for your branch ${c.green( - branchName, - )} (if you want to avoid conflicts, you should often refresh your sandbox)`, + branchName + )} (if you want to avoid conflicts, you should often refresh your sandbox)` ), - initial: 0, + description: 'Choose an existing sandbox or connect to a new one for this branch', + placeholder: 'Select sandbox', + default: defaultSandbox ? defaultSandbox : undefined, choices: [ ...[ { - title: c.yellow("🌐 Connect to a sandbox not appearing in this list"), - description: "Login in web browser to your source-tracked sandbox", - value: "connectSandbox", + title: c.yellow('🌐 Connect to a sandbox not appearing in this list'), + description: 'Login in web browser to your source-tracked sandbox', + value: 'connectSandbox', }, /* { title: c.yellow("Create new sandbox from another sandbox or production org (ALPHA -> UNSTABLE, DO NOT USE YET)"), @@ -338,120 +554,50 @@ Under the hood, it can: ], ...sandboxOrgList.map((sandboxOrg: any) => { return { - title: `☁️ Use sandbox org ${c.yellow(sandboxOrg.username || sandboxOrg.alias)}`, - description: sandboxOrg.instanceUrl, + title: sandboxOrg.instanceUrl, + description: `☁️ Use sandbox org ${c.yellow(sandboxOrg.username || sandboxOrg.alias)}`, value: sandboxOrg, }; }), ], }); - // Remove scratch org info in user config - await setConfig("user", { - scratchOrgAlias: null, - scratchOrgUsername: null, - }); - let orgUsername = ""; - let openOrg = false; + // Remove scratch org info in user config if necessary + const config = await getConfig("user"); + if (config.scratchOrgAlias || config.scratchOrgUsername) { + await setConfig('user', { + scratchOrgAlias: null, + scratchOrgUsername: null, + }); + } + // Connect to a sandbox - if (sandboxResponse.value === "connectSandbox") { + let orgUsername = ''; + let openOrg = false; + if (sandboxResponse.value === 'connectSandbox') { const slctdOrg = await promptOrg(this, { setDefault: true, devSandbox: true }); orgUsername = slctdOrg.username; } + // Create a new sandbox ( NOT WORKING YET, DO NOT USE) - else if (sandboxResponse.value === "newSandbox") { + else if (sandboxResponse.value === 'newSandbox') { const createResult = await SandboxCreate.run(); if (createResult == null) { - throw new SfdxError("Unable to create sandbox org"); + throw new SfError('Unable to create sandbox org'); } - orgUsername = createResult.username; + orgUsername = (createResult as any).username; } + // Selected sandbox from list else { - await execCommand(`sfdx config:set defaultusername=${sandboxResponse.value.username}`, this, { + await makeSureOrgIsConnected(sandboxResponse.value); + uxLog("action", this, c.cyan(`Setting sandbox org ${c.green(sandboxResponse.value.instanceUrl)} (${sandboxResponse.value.username}) as default org...`)); + await execCommand(`sf config set target-org=${sandboxResponse.value.username}`, this, { output: true, fail: true, }); orgUsername = sandboxResponse.value.username; openOrg = true; } - // Initialize / Update existing sandbox if required - const initSandboxResponse = await prompts({ - type: "select", - name: "value", - message: c.cyanBright( - `Do you want to update the sandbox according to git branch "${this.targetBranch}" current state ? (packages,SOURCES,permission set assignments,apex scripts,initial data)`, - ), - choices: [ - { - title: "🧑‍🤝‍🧑 No, continue working on my current sandbox state", - value: "no", - description: "Use if you are multiple users in the same SB, or have have uncommitted changes in your sandbox", - }, - { - title: "☢️ Yes, please try to update my sandbox !", - value: "init", - description: `Integrate new updates from the parent branch "${this.targetBranch}" before working on your new task. WARNING: Will overwrite uncommitted changes in your org !`, - }, - ], - }); - let initSandbox = initSandboxResponse.value === "init"; - // Ask the user if he's really sure of what he's doing ! - if (initSandbox) { - const promptConfirm = await prompts({ - type: "confirm", - message: c.cyanBright( - `Are you really sure you want to update the dev sandbox with the state of git branch ${this.targetBranch} ? This will overwrite setup updates that you or other users have not committed yet`, - ), - }); - initSandbox = promptConfirm.value === true; - } - if (initSandbox) { - let initSourcesErr: any = null; - let initSandboxErr: any = null; - try { - if (config.installedPackages) { - await installPackages(config.installedPackages || [], orgUsername); - } - try { - // Continue initialization even if push did not work... it could work and be not such a problem :) - uxLog(this, c.cyan("Resetting local sfdx tracking...")); - await execCommand(`sfdx force:source:tracking:clear --noprompt -u ${orgUsername}`, this, { fail: false, output: true }); - await initOrgMetadatas(config, orgUsername, orgUsername, {}, this.debugMode, { scratch: false }); - } catch (e1) { - initSourcesErr = e1; - } - await initPermissionSetAssignments(config.initPermissionSets || [], orgUsername); - await initApexScripts(config.scratchOrgInitApexScripts || [], orgUsername); - await initOrgData(path.join(".", "scripts", "data", "ScratchInit"), orgUsername); - } catch (e) { - initSandboxErr = e; - } - if (initSandboxErr) { - uxLog(this, c.grey("Error(s) while initializing sandbox: " + initSandboxErr.message + "\n" + initSandboxErr.stack)); - uxLog(this, c.yellow("Your sandbox may not be completely initialized from git. You can send the error above to your release manager")); - } - if (initSourcesErr) { - uxLog(this, c.grey("Error(s) while pushing sources to sandbox: " + initSourcesErr.message + "\n" + initSourcesErr.stack)); - uxLog( - this, - c.yellow(`If you really want your sandbox to be up to date with branch ${c.bold(this.targetBranch)}, you may: - - ${c.bold("Fix the errors")} (probably by manually updating the target sandbox in setup), then run new task again and select again the same sandbox - - ${c.bold("Refresh your sandbox")} (ask your release manager if you don't know how) - Else, you can start working now (but beware of conflicts ^^):) - `), - ); - } - } - // Open of if not already open - if (openOrg === true) { - await execSfdxJson("sf org open", this, { - fail: true, - output: false, - debug: this.debugMode, - }); - } - - // Trigger a status refresh on VsCode WebSocket Client - WebSocketClient.sendMessage({ event: "refreshStatus" }); + return { orgUsername, openOrg }; } -} +} \ No newline at end of file diff --git a/src/commands/hardis/work/refresh.ts b/src/commands/hardis/work/refresh.ts index 37e3d704d..b7748d1c4 100644 --- a/src/commands/hardis/work/refresh.ts +++ b/src/commands/hardis/work/refresh.ts @@ -1,56 +1,82 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { execCommand, getCurrentGitBranch, git, uxLog } from "../../../common/utils"; -import { forceSourcePull, forceSourcePush } from "../../../common/utils/deployUtils"; -import { prompts } from "../../../common/utils/prompts"; -import { getConfig } from "../../../config"; +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { execCommand, getCurrentGitBranch, git, uxLog } from '../../../common/utils/index.js'; +import { forceSourcePull, forceSourcePush } from '../../../common/utils/deployUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { getConfig } from '../../../config/index.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class RefreshTask extends SfCommand { + public static title = 'Refresh User Story branch'; -export default class RefreshTask extends SfdxCommand { - public static title = "Refresh work task"; + public static description = ` +## Command Behavior - public static description = messages.getMessage("refreshWorkTask"); +**Refreshes your local Git branch and Salesforce org with the latest content from another Git branch.** - public static examples = ["$ sfdx hardis:work:refresh"]; +This command is designed to help developers keep their local development environment synchronized with changes made by other team members. It automates the process of pulling updates from a designated branch, merging them into your current working branch, and then pushing those changes to your scratch org or source-tracked sandbox. + +Key functionalities: + +- **Pre-Merge Check:** Prompts the user to confirm that they have saved their current work before proceeding with the merge, preventing accidental data loss. +- **Branch Selection:** Allows you to select a target Git branch (e.g., \`integration\`, \`preprod\`) from which to pull updates. +- **Git Operations:** Performs a series of Git operations: + - Pulls the latest version of the selected merge branch. + - Stashes your uncommitted local changes before merging. + - Merges the selected branch into your current local branch. + - Handles merge conflicts interactively, prompting the user to resolve them. + - Restores your stashed changes after the merge. +- **Org Synchronization:** Pushes the updated local branch content to your scratch org or source-tracked sandbox, ensuring your org reflects the latest merged code. + +
+Technical explanations + +The command's technical implementation involves: + +- **Configuration Loading:** It retrieves project configurations using \`getConfig\` to determine the default development branch. +- **Git Integration:** Extensively uses \`simple-git\` (\`git()\`) for various Git operations: + - \`git().branch()\`: Lists local and remote branches. + - \`git().stash()\`: Saves and restores uncommitted changes. + - \`git().fetch()\`: Fetches updates from remote repositories. + - \`git().checkout()\`: Switches between branches. + - \`git().pull()\`: Pulls changes from a remote branch. + - \`git().merge()\`: Merges one branch into another, handling conflicts. +- **Interactive Prompts:** Uses the \`prompts\` library to guide the user through confirmations (e.g., saving work) and branch selection. +- **Salesforce CLI Integration:** It uses \`forceSourcePull\` to pull changes from the scratch org and \`forceSourcePush\` to push changes to the scratch org. +- **Error Handling:** Includes robust error handling for Git operations (e.g., merge conflicts) and provides guidance to the user for resolution. +- **Environment Variable Check:** Checks for an \`EXPERIMENTAL\` environment variable to gate access to this command, indicating it might not be fully stable. +
+`; + + public static examples = ['$ sf hardis:work:refresh']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - nopull: flags.boolean({ - char: "n", + public static flags: any = { + nopull: Flags.boolean({ + char: 'n', default: false, - description: "No scratch pull before save (careful if you use that!)", + description: 'No scratch pull before save (careful if you use that!)', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + 'target-org': requiredOrgFlagWithDeprecations, + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; protected debugMode = false; protected noPull = false; @@ -58,26 +84,35 @@ export default class RefreshTask extends SfdxCommand { /* jscpd:ignore-end */ public async run(): Promise { - const config = await getConfig("project"); - if (config.get("EXPERIMENTAL", "") !== "true") { - const msg = "This command is not stable enough to be used. Use EXPERIMENTAL=true to use it anyway"; - uxLog(this, c.yellow(msg)); + const { flags } = await this.parse(RefreshTask); + const config = await getConfig('project'); + if (config.get('EXPERIMENTAL', '') !== 'true') { + const msg = 'This command is not stable enough to be used. Use EXPERIMENTAL=true to use it anyway'; + uxLog("warning", this, c.yellow(msg)); return { outputString: msg }; } - this.noPull = this.flags.nopull || false; - uxLog(this, c.cyan("This command will refresh your git branch and your org with the content of another git branch")); + this.noPull = flags.nopull || false; + uxLog( + "action", + this, + c.cyan('This command will refresh your git branch and your org with the content of another git branch') + ); // Verify that the user saved his/her work before merging another branch const savePromptRes = await prompts({ - type: "select", - message: c.cyanBright(`This is a SENSITIVE OPERATION. Did you run ${c.green("hardis:work:save")} BEFORE running this command ?`), - name: "value", + type: 'select', + message: c.cyanBright( + `This is a SENSITIVE OPERATION. Did you run ${c.green('hardis:work:save')} BEFORE running this command ?` + ), + name: 'value', + description: 'Confirm that you have saved your current work before proceeding with this sensitive operation', + placeholder: 'Select an option', choices: [ { - title: "Yes I did save my current updates before merging updates from others !", + title: 'Yes I did save my current updates before merging updates from others !', value: true, }, - { title: "No, I did not, I will do that right now", value: false }, + { title: 'No, I did not, I will do that right now', value: false }, ], }); if (savePromptRes.value !== true) { @@ -85,7 +120,7 @@ export default class RefreshTask extends SfdxCommand { } // Select branch to merge const localBranch = await getCurrentGitBranch(); - const branchSummary = await git().branch(["-r"]); + const branchSummary = await git().branch(['-r']); const branchChoices = [ { title: `${config.developmentBranch} (recommended)`, @@ -93,66 +128,74 @@ export default class RefreshTask extends SfdxCommand { }, ]; for (const branchName of Object.keys(branchSummary.branches)) { - const branchNameLocal = branchName.replace("origin/", ""); + const branchNameLocal = branchName.replace('origin/', ''); if (branchNameLocal !== config.developmentBranch) { branchChoices.push({ title: branchNameLocal, value: branchNameLocal }); } } const branchRes = await prompts({ - type: "select", + type: 'select', message: `Please select the branch that you want to merge in your current branch ${c.green(localBranch)}`, - name: "value", + name: 'value', + description: 'Choose which branch to merge into your current working branch', + placeholder: 'Select a branch to merge', choices: branchChoices, }); this.mergeBranch = branchRes.value; // Run refresh of local branch try { - return await this.runRefresh(localBranch); + return await this.runRefresh(localBranch, flags); } catch (e) { - uxLog(this, c.yellow("There has been a merge conflict or a technical error, please contact a Developer for help !")); + uxLog( + "warning", + this, + c.yellow('There has been a merge conflict or a technical error, please contact a Developer for help !') + ); throw e; } } - private async runRefresh(localBranch): Promise { - this.debugMode = this.flags.debug || false; + private async runRefresh(localBranch, flags): Promise { + this.debugMode = flags.debug || false; uxLog( + "action", this, c.cyan( `sfdx-hardis will refresh your local branch ${c.green(localBranch)} and your local scratch org ${c.green( - this.org.getUsername(), - )} with the latest state of ${c.green(this.mergeBranch)}`, - ), + flags['target-org'].getUsername() + )} with the latest state of ${c.green(this.mergeBranch)}` + ) ); if (localBranch === this.mergeBranch) { - throw new SfdxError("[sfdx-hardis] You can not refresh from the same branch"); + throw new SfError('[sfdx-hardis] You can not refresh from the same branch'); } // Pull from scratch org if (this.noPull) { - uxLog(this, c.cyan(`Skipped pull from scratch org`)); + uxLog("action", this, c.cyan(`Skipped pull from scratch org`)); } else { - uxLog(this, c.cyan(`Pulling sources from scratch org ${this.org.getUsername()}...`)); - await forceSourcePull(this.org.getUsername(), this.debugMode); + uxLog("action", this, c.cyan(`Pulling sources from scratch org ${flags['target-org'].getUsername()}...`)); + await forceSourcePull(flags['target-org'].getUsername(), this.debugMode); } // Stash uxLog( + "action", this, c.cyan( - `Stashing your uncommitted updates in ${c.green(localBranch)} before merging ${c.green(this.mergeBranch)} into your local branch ${c.green( - localBranch, - )}...`, - ), + `Stashing your uncommitted updates in ${c.green(localBranch)} before merging ${c.green( + this.mergeBranch + )} into your local branch ${c.green(localBranch)}...` + ) ); - const stashResult = await git({ output: true }).stash(["save", `[sfdx-hardis] Stash of ${localBranch}`]); - const stashed = stashResult.includes("Saved working directory"); + const stashResult = await git({ output: true }).stash(['save', `[sfdx-hardis] Stash of ${localBranch}`]); + const stashed = stashResult.includes('Saved working directory'); // Pull most recent version of development branch - uxLog(this, c.cyan(`Pulling most recent version of remote branch ${c.green(this.mergeBranch)}...`)); + uxLog("action", this, c.cyan(`Pulling most recent version of remote branch ${c.green(this.mergeBranch)}...`)); await git({ output: true }).fetch(); - await git({ output: true }).checkout(this.mergeBranch); + await git({ output: true }).checkout(this.mergeBranch || ''); const pullRes = await git({ output: true }).pull(); // Go back to current work branch await git({ output: true }).checkout(localBranch); @@ -162,21 +205,24 @@ export default class RefreshTask extends SfdxCommand { output: true, }) ).stdout; - const localRef = (await execCommand(`git merge-base ${this.mergeBranch} ${localBranch}`, this, { output: true })).stdout; + const localRef = (await execCommand(`git merge-base ${this.mergeBranch} ${localBranch}`, this, { output: true })) + .stdout; // Merge into current branch if necessary if (pullRes.summary.changes > 0 || mergeRef !== localRef) { // Create new commit from merge - uxLog(this, c.cyan(`Creating a merge commit of ${c.green(this.mergeBranch)} within ${c.green(localBranch)}...`)); - let mergeSummary = await git({ output: true }).merge([this.mergeBranch]); + uxLog("action", this, c.cyan(`Creating a merge commit of ${c.green(this.mergeBranch)} within ${c.green(localBranch)}...`)); + let mergeSummary = await git({ output: true }).merge([this.mergeBranch || '']); while (mergeSummary.failed) { const mergeResult = await prompts({ - type: "select", - name: "value", + type: 'select', + name: 'value', message: c.cyanBright( - "There are merge conflicts, please solve them, then select YES here. Otherwise, exit the script and call a developer for help :)", + 'There are merge conflicts, please solve them, then select YES here. Otherwise, exit the script and call a developer for help :)' ), + description: 'Choose your next action after attempting to resolve merge conflicts', + placeholder: 'Select an option', choices: [ - { value: true, title: "If finished to merge conflicts" }, + { value: true, title: 'If finished to merge conflicts' }, { value: false, title: "I can't merge conflicts, I give up for now", @@ -184,24 +230,30 @@ export default class RefreshTask extends SfdxCommand { ], }); if (mergeResult.value === false) { - uxLog(this, "Refresh script stopped by user"); + uxLog("other", this, 'Refresh script stopped by user'); process.exit(0); } - mergeSummary = await git({ output: true }).merge(["--continue"]); + mergeSummary = await git({ output: true }).merge(['--continue']); } } else { - uxLog(this, c.cyan(`Local branch ${c.green(localBranch)} is already up to date with ${c.green(this.mergeBranch)}`)); + uxLog( + "action", + this, + c.cyan(`Local branch ${c.green(localBranch)} is already up to date with ${c.green(this.mergeBranch)}`) + ); } // Restoring stash if (stashed) { - uxLog(this, c.cyan(`Restoring stash into your local branch ${c.green(localBranch)}...`)); - await git({ output: true }).stash(["pop"]); + uxLog("action", this, c.cyan(`Restoring stash into your local branch ${c.green(localBranch)}...`)); + await git({ output: true }).stash(['pop']); } // Push new branch state to scratch org - await forceSourcePush(this.org.getUsername(), this, this.debugMode, { conn: this.org.getConnection() }); + await forceSourcePush(flags['target-org'].getUsername(), this, this.debugMode, { + conn: flags['target-org'].getConnection(), + }); // Return an object to be displayed with --json - return { outputString: "Refreshed the task & org" }; + return { outputString: 'Refreshed the User Story branch & org' }; } } diff --git a/src/commands/hardis/work/resetselection.ts b/src/commands/hardis/work/resetselection.ts index 7d688a04f..a585270c3 100644 --- a/src/commands/hardis/work/resetselection.ts +++ b/src/commands/hardis/work/resetselection.ts @@ -1,78 +1,97 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages, SfdxError } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import { execCommand, getCurrentGitBranch, git, uxLog } from "../../../common/utils"; -import { selectTargetBranch } from "../../../common/utils/gitUtils"; -import { setConfig } from "../../../config"; -import { prompts } from "../../../common/utils/prompts"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class RebuildSelection extends SfdxCommand { - public static title = "Select again"; - - public static description = `Resets the selection that we want to add in the merge request - -Calls a soft git reset behind the hood +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages, SfError } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import { execCommand, getCurrentGitBranch, git, uxLog } from '../../../common/utils/index.js'; +import { selectTargetBranch } from '../../../common/utils/gitUtils.js'; +import { setConfig } from '../../../config/index.js'; +import { prompts } from '../../../common/utils/prompts.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class RebuildSelection extends SfCommand { + public static title = 'Select again'; + + public static description = ` +## Command Behavior + +**Resets the local Git repository to allow for a new selection of files to be included in a merge request.** + +This command is designed to be used when you need to re-evaluate which changes should be part of your next merge request. It performs a soft Git reset, effectively unstaging all committed changes since the last merge with the target branch, and then cleans up any generated files. + +Key functionalities: + +- **Target Branch Selection:** Prompts you to select the target branch of your current or future merge request. +- **Soft Git Reset:** Performs a \`git reset --soft\` operation to uncommit changes, moving the HEAD pointer back but keeping the changes in your working directory. +- **Generated File Cleanup:** Resets and checks out \`manifest/package.xml\` and \`manifest/destructiveChanges.xml\` to their state before the reset, ensuring a clean slate for new selections. +- **Force Push Authorization:** Sets a flag in your user configuration (\`canForcePush: true\`) to allow a force push in the subsequent \`hardis:work:save\` command, as the history will have been rewritten. + +
+Technical explanations + +The command's technical implementation involves: + +- **Git Integration:** Uses \`simple-git\` (\`git()\`) to interact with the Git repository: + - \`git().branch()\`: Retrieves information about local and remote branches. + - \`git().log()\`: Fetches the commit history to determine which commits to reset. + - \`git().reset()\`: Performs the soft reset operation. + - \`git().checkout()\`: Resets specific files (\`package.xml\`, \`destructiveChanges.xml\`) to their previous state. + - \`git().status()\`: Displays the current status of the Git repository after the reset. +- **Interactive Prompts:** Uses the \`prompts\` library to confirm the reset operation with the user and to select the target branch. +- **Configuration Management:** Updates the user's configuration (\`.sfdx-hardis.yml\`) using \`setConfig\` to set the \`canForcePush\` flag. +- **Error Handling:** Includes a check to prevent resetting protected branches. +
`; - public static examples = ["$ sfdx hardis:work:resetsave"]; + public static examples = ['$ sf hardis:work:resetsave']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - debug: flags.boolean({ - char: "d", + public static flags: any = { + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + 'target-org': requiredOrgFlagWithDeprecations, + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; protected debugMode = false; /* jscpd:ignore-end */ public async run(): Promise { - this.debugMode = this.flags.debug || false; + const { flags } = await this.parse(RebuildSelection); + this.debugMode = flags.debug || false; - const targetBranch = await selectTargetBranch({ message: "Please select the target branch of your current or future merge request" }); + const targetBranch = await selectTargetBranch({ + message: 'Please select the target branch of your current or future merge request', + }); - uxLog(this, c.cyan(`This script will rebuild selection that you will want to merge into ${c.green(targetBranch)}`)); + uxLog("action", this, c.cyan(`This script will rebuild selection that you will want to merge into ${c.green(targetBranch)}`)); const currentGitBranch = await getCurrentGitBranch(); if (currentGitBranch === targetBranch) { - throw new SfdxError(c.red("[sfdx-hardis] You can not revert commits of a protected branch !")); + throw new SfError(c.red('[sfdx-hardis] You can not revert commits of a protected branch !')); } // Ask user to confirm const confirm = await prompts({ - type: "confirm", + type: 'confirm', message: `This command will git reset (soft) your branch ${currentGitBranch}. You will need to select and commit again your files. Are you sure ?`, + description: 'Confirm that you want to perform a soft git reset on your current branch', }); if (confirm.value === false) { - throw new SfdxError(c.red("[sfdx-hardis] Cancelled by user")); + throw new SfError(c.red('[sfdx-hardis] Cancelled by user')); } // List all commits since the branch creation @@ -80,20 +99,20 @@ Calls a soft git reset behind the hood const commitstoReset = logResult.all; const commitsToResetNumber = commitstoReset.length; // Reset commits - await git({ output: true }).reset(["--soft", `HEAD~${commitsToResetNumber}`]); - await setConfig("user", { canForcePush: true }); + await git({ output: true }).reset(['--soft', `HEAD~${commitsToResetNumber}`]); + await setConfig('user', { canForcePush: true }); // unstage files - await execCommand("git reset", this, { + await execCommand('git reset', this, { output: true, fail: true, debug: this.debugMode, }); // await git({output:true}).reset(); does not work, let's use direct command - await git({ output: true }).checkout(["--", "manifest/package.xml"]); - await git({ output: true }).checkout(["--", "manifest/destructiveChanges.xml"]); + await git({ output: true }).checkout(['--', 'manifest/package.xml']); + await git({ output: true }).checkout(['--', 'manifest/destructiveChanges.xml']); await git({ output: true }).status(); - uxLog(this, c.cyan("The following items are not available for selection")); - uxLog(this, c.cyan("Selection has been reset")); + uxLog("action", this, c.cyan('The following items are now available for selection')); + uxLog("action", this, c.cyan('Selection has been reset')); // Return an object to be displayed with --json - return { outputString: "Reset selection pocessed" }; + return { outputString: 'Reset selection pocessed' }; } } diff --git a/src/commands/hardis/work/save.ts b/src/commands/hardis/work/save.ts index 443ca74be..986ec3db9 100644 --- a/src/commands/hardis/work/save.ts +++ b/src/commands/hardis/work/save.ts @@ -1,43 +1,62 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as open from "open"; -import * as path from "path"; -import { createTempDir, execCommand, getCurrentGitBranch, git, gitHasLocalUpdates, normalizeFileStatusPath, uxLog } from "../../../common/utils"; -import { exportData } from "../../../common/utils/dataUtils"; -import { forceSourcePull } from "../../../common/utils/deployUtils"; -import { callSfdxGitDelta, getGitDeltaScope, selectTargetBranch } from "../../../common/utils/gitUtils"; -import { prompts } from "../../../common/utils/prompts"; -import { parseXmlFile, writeXmlFile } from "../../../common/utils/xmlUtils"; -import { WebSocketClient } from "../../../common/websocketClient"; -import { CONSTANTS, getConfig, setConfig } from "../../../config"; -import CleanReferences from "../project/clean/references"; -import CleanXml from "../project/clean/xml"; - -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); - -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); - -export default class SaveTask extends SfdxCommand { - public static title = "Save work task"; - - public static description = `When a work task is completed, guide user to create a merge request - -Advanced instructions in [Publish a task](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/) - -- Generate package-xml diff using sfdx-git-delta -- Automatically update \`manifest/package.xml\` and \`manifest/destructiveChanges.xml\` according to the committed updates -- Automatically Clean XML files using \`.sfdx-hardis.yml\` properties - - \`autocleantypes\`: List of auto-performed sources cleanings, available on command [hardis:project:clean:references](https://sfdx-hardis.cloudity.com/hardis/project/clean/references/) - - \`autoRemoveUserPermissions\`: List of userPermission to automatically remove from profile metadatas - -Example: +import { SfCommand, Flags, requiredOrgFlagWithDeprecations } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import c from 'chalk'; +import fs from 'fs-extra'; +import open from 'open'; +import * as path from 'path'; +import { + createTempDir, + execCommand, + getCurrentGitBranch, + git, + gitHasLocalUpdates, + normalizeFileStatusPath, + uxLog, +} from '../../../common/utils/index.js'; +import { exportData } from '../../../common/utils/dataUtils.js'; +import { forceSourcePull } from '../../../common/utils/deployUtils.js'; +import { callSfdxGitDelta, getGitDeltaScope, selectTargetBranch } from '../../../common/utils/gitUtils.js'; +import { prompts } from '../../../common/utils/prompts.js'; +import { + appendPackageXmlFilesContent, + parseXmlFile, + removePackageXmlFilesContent, + writeXmlFile, +} from '../../../common/utils/xmlUtils.js'; +import { WebSocketClient } from '../../../common/websocketClient.js'; +import { CONSTANTS, getApiVersion, getConfig, setConfig } from '../../../config/index.js'; +import CleanReferences from '../project/clean/references.js'; +import CleanXml from '../project/clean/xml.js'; +import { GitProvider } from '../../../common/gitProvider/index.js'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); + +export default class SaveTask extends SfCommand { + public static title = 'Save User Story'; + + public static description = ` +## Command Behavior + +**Guides the user through the process of saving their work, preparing it for a Merge Request (also named Pull Request), and pushing changes to the remote Git repository.** + +This command automates several critical steps involved in finalizing a development User Story and integrating it into the main codebase. It ensures that your local changes are properly synchronized, cleaned, and committed before being pushed. + +Key functionalities include: + +- **Git Status Management:** Ensures a clean Git working directory by handling ongoing merges and unstaging files. +- **Org Synchronization (Optional):** Prompts the user to pull the latest metadata updates from their scratch org or source-tracked sandbox, ensuring local files reflect the org's state. +- **Package.xml Updates:** Automatically generates \`package.xml\` and \`destructiveChanges.xml\` files based on the Git delta between your current branch and the target branch, reflecting added, modified, and deleted metadata. +- **Automated Source Cleaning:** Applies predefined cleaning operations to your local Salesforce sources, such as removing unwanted references, minimizing profiles, or cleaning XML files based on configurations in your \`.sfdx-hardis.yml\`. + - \`autoCleanTypes\`: A list of automated source cleanings, configurable via [hardis:project:clean:references]($\{CONSTANTS.DOC_URL_ROOT}/hardis/project/clean/references/). + - \`autoRemoveUserPermissions\`: A list of user permissions to automatically remove from profile metadata. +- **Deployment Plan Generation:** Builds an automated deployment plan based on the updated \`package.xml\` and configured deployment splits. +- **Commit and Push:** Guides the user to commit the changes and push them to the remote Git repository, optionally handling force pushes if a branch reset occurred. +- **Merge Request Guidance:** Provides information and links to facilitate the creation of a merge request after the changes are pushed. + +Example \`.sfdx-hardis.yml\` configuration: \`\`\`yaml autoCleanTypes: @@ -58,60 +77,69 @@ autoRemoveUserPermissions: - WorkCalibrationUser \`\`\` -- Push commit to server - `; +Advanced instructions are available in the [Publish a User Story documentation]($\{CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-publish-task/). - public static examples = ["$ sfdx hardis:work:task:save", "$ sfdx hardis:work:task:save --nopull --nogit --noclean"]; +
+Technical explanations + +The command's technical implementation involves a series of orchestrated steps: + +- **Git Integration:** Extensively uses the \`git\` utility for status checks, adding files, committing, and pushing. It also leverages \`sfdx-git-delta\` for generating metadata differences between Git revisions. +- **Interactive Prompts:** Employs the \`prompts\` library to interact with the user for decisions like pulling sources or pushing commits. +- **Configuration Management:** Reads and updates project and user configurations using \`getConfig\` and \`setConfig\` to store preferences and deployment plans. +- **Metadata Synchronization:** Calls \`forceSourcePull\` to retrieve metadata from the org and \`callSfdxGitDelta\` to generate \`package.xml\` and \`destructiveChanges.xml\` based on Git changes. +- **XML Manipulation:** Utilizes \`appendPackageXmlFilesContent\`, \`removePackageXmlFilesContent\`, \`parseXmlFile\`, and \`writeXmlFile\` for modifying \`package.xml\` and \`destructiveChanges.xml\` files. +- **Automated Cleaning:** Integrates with \`CleanReferences.run\` and \`CleanXml.run\` commands to perform automated cleaning operations on the Salesforce source files. +- **Deployment Plan Building:** Dynamically constructs a deployment plan by analyzing the \`package.xml\` content and applying configured deployment splits. +- **WebSocket Communication:** Uses \`WebSocketClient.sendRefreshStatusMessage\` to notify connected VS Code clients about status updates. +- **External Tool Integration:** Requires the \`sfdx-git-delta\` plugin to be installed for its core functionality. +
+`; + + public static examples = ['$ sf hardis:work:task:save', '$ sf hardis:work:task:save --nopull --nogit --noclean']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - nopull: flags.boolean({ - char: "n", + public static flags: any = { + nopull: Flags.boolean({ + char: 'n', default: false, - description: "No scratch pull before save", + description: 'No scratch pull before save', }), - nogit: flags.boolean({ - char: "g", + nogit: Flags.boolean({ + char: 'g', default: false, - description: "No automated git operations", + description: 'No automated git operations', }), - noclean: flags.boolean({ - char: "c", + noclean: Flags.boolean({ + char: 'c', default: false, - description: "No cleaning of local sources", + description: 'No cleaning of local sources', }), - auto: flags.boolean({ + auto: Flags.boolean({ default: false, - description: "No user prompts (when called from CI for example)", + description: 'No user prompts (when called from CI for example)', }), - targetbranch: flags.string({ - description: "Name of the Merge Request target branch. Will be guessed or prompted if not provided.", + targetbranch: Flags.string({ + description: 'Name of the Merge Request target branch. Will be guessed or prompted if not provided.', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), - }; - - // Comment this out if your command does not require an org username - protected static requiresUsername = true; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = true; + 'target-org': requiredOrgFlagWithDeprecations, + }; // Set this to true if your command requires a project workspace; 'requiresProject' is false by default + public static requiresProject = true; // List required plugins, their presence will be tested before running the command - protected static requiresSfdxPlugins = ["sfdx-essentials", "sfdx-git-delta"]; + protected static requiresSfdxPlugins = ['sfdx-git-delta']; protected debugMode = false; protected noPull = false; @@ -120,43 +148,51 @@ autoRemoveUserPermissions: protected auto = false; protected gitUrl: string; protected currentBranch: string; - protected targetBranch: string; + protected targetBranch: string | null; /* jscpd:ignore-end */ public async run(): Promise { - this.noPull = this.flags.nopull || false; - this.noGit = this.flags.nogit || false; - this.noClean = this.flags.noclean || false; - this.auto = this.flags.auto || false; - this.targetBranch = this.flags.targetbranch || null; - this.debugMode = this.flags.debug || false; - const localBranch = await getCurrentGitBranch(); + const { flags } = await this.parse(SaveTask); + this.noPull = flags.nopull || false; + this.noGit = flags.nogit || false; + this.noClean = flags.noclean || false; + this.auto = flags.auto || false; + this.targetBranch = flags.targetbranch || null; + this.debugMode = flags.debug || false; + const localBranch = (await getCurrentGitBranch()) || ''; // Define current and target branches - this.gitUrl = await git().listRemote(["--get-url"]); - this.currentBranch = await getCurrentGitBranch(); + this.gitUrl = (await git().listRemote(['--get-url']))?.trim() || ''; + this.currentBranch = (await getCurrentGitBranch()) || ''; if (this.targetBranch == null) { - const userConfig = await getConfig("user"); + const userConfig = await getConfig('user'); if (userConfig?.localStorageBranchTargets && userConfig?.localStorageBranchTargets[localBranch]) { this.targetBranch = userConfig?.localStorageBranchTargets[localBranch]; } } if (this.targetBranch == null) { - this.targetBranch = await selectTargetBranch({ message: "Please select the target branch of your Merge Request" }); + this.targetBranch = await selectTargetBranch({ + message: `Please select the target branch of your future ${GitProvider.getMergeRequestName(this.gitUrl)}`, + }); } // User log info uxLog( + "action", this, - c.cyan(`This script will prepare the merge request from your local branch ${c.green(localBranch)} to remote ${c.green(this.targetBranch)}`), + c.cyan( + `This command will help to prepare the ${GitProvider.getMergeRequestName(this.gitUrl)} from your branch ${c.green(localBranch)} to major branch ${c.green( + this.targetBranch + )}` + ) ); // Make sure git is clean before starting operations await this.cleanGitStatus(); // Make sure commit is ready before starting operations - const orgPullStateRes = await this.ensureCommitIsReady(); + const orgPullStateRes = await this.ensureCommitIsReady(flags); if (orgPullStateRes && orgPullStateRes.outputString) { return orgPullStateRes; } - // Update package.xml files using sfdx git delta + // Update package.xml files using sfdx-git-delta const gitStatusWithConfig = await this.upgradePackageXmlFilesWithDelta(); // Apply cleaning on sources await this.applyCleaningOnSources(); @@ -166,128 +202,141 @@ autoRemoveUserPermissions: // Push new commit(s) await this.manageCommitPush(gitStatusWithConfig, gitStatusAfterDeployPlan); + + let mergeRequestUrl = GitProvider.getMergeRequestCreateUrl(this.gitUrl, this.targetBranch || '', this.currentBranch); + mergeRequestUrl = mergeRequestUrl || this.gitUrl.replace('.git', ''); + // Merge request - uxLog(this, c.cyan(`If your work is ${c.bold("completed")}, you can create a ${c.bold("merge request")}:`)); - uxLog( - this, - c.cyan(`- click on the link in the upper text, below ${c.italic("To create a merge request for " + this.currentBranch + ", visit")}`), - ); - uxLog(this, c.cyan(`- or manually create the merge request on repository UI: ${c.green(this.gitUrl)}`)); - // const remote = await git().listRemote(); - // const remoteMergeRequest = `${remote.replace('.git','-/merge_requests/new')}`; - // await open(remoteMergeRequest, {wait: true}); - uxLog( - this, - c.cyan( - c.bold( - `${c.yellow("When your Merge Request will have been merged:")} - - ${c.yellow("DO NOT REUSE THE SAME BRANCH")} - - Use New task menu (sfdx hardis:work:new), even if you work in the same sandbox or scratch org :)`, - ), - ), - ); - uxLog( - this, - c.cyan( - `If you are working with a ticketing system like JIRA, try to add the FULL URL of the tickets in the MR/PR description -- Good example: https://sfdx-hardis.atlassian.net/browse/CLOUDITY-4 -- Less good example but will work anyway on most cases: CLOUDITY-4 -`, - ), - ); - uxLog( - this, - c.cyan( - `Merge request documentation is available here -> ${c.bold( - "https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/#create-merge-request", - )}`, - ), - ); + uxLog("action", this, c.cyan(`If your work is ${c.bold('completed')}, you can create a ${c.bold(GitProvider.getMergeRequestName(this.gitUrl))}, otherwise you can push new commits on ${c.green(this.currentBranch)} branch.`)); + let summaryMsg = c.grey(""); + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(mergeRequestUrl, `Create ${GitProvider.getMergeRequestName(this.gitUrl)}`, 'actionUrl'); + } + else { + summaryMsg += c.grey(`- New ${GitProvider.getMergeRequestName(this.gitUrl)} URL: ${c.green(mergeRequestUrl)}\n`); + } + const mergeRequestDoc = `${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-publish-task/#create-merge-request`; + summaryMsg += c.grey(`- Repository: ${c.green(this.gitUrl.replace('.git', ''))}\n`); + summaryMsg += c.grey(`- Source branch: ${c.green(this.currentBranch)}\n`); + summaryMsg += c.grey(`- Target branch: ${c.green(this.targetBranch)}`); + uxLog("log", this, summaryMsg); + uxLog("log", this, `${c.yellow(`When your ${GitProvider.getMergeRequestName(this.gitUrl)} will have been merged:`)} +- ${c.yellow('DO NOT REUSE THE SAME BRANCH')} +- Use New User Story menu (sf hardis:work:new), even if you work in the same sandbox or scratch org :)`); + // Manual actions file + const config = await getConfig('project'); + if (config.manualActionsFileUrl && config.manualActionsFileUrl !== '') { + uxLog("warning", this, c.yellow(`If you have pre-deployment or post-deployment manual actions, make sure to write them in the file ${c.green(config.manualActionsFileUrl)}`)); + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(config.manualActionsFileUrl, `Update Manual Actions file`, 'actionUrl'); + } + } + else { + uxLog("warning", this, c.yellow(`You should have a manual actions file defined. Ask your release manager to create one for the project and set its link in your .sfdx-hardis.yml file under manualActionsFileUrl property.`)); + } + if (!WebSocketClient.isAliveWithLwcUI()) { + uxLog("log", this, c.grey(`${GitProvider.getMergeRequestName(this.gitUrl)} documentation is available here -> ${c.bold(mergeRequestDoc)}`)); + } + WebSocketClient.sendReportFileMessage(mergeRequestDoc, `View ${GitProvider.getMergeRequestName(this.gitUrl)} documentation`, 'docUrl'); // Return an object to be displayed with --json - return { outputString: "Saved the task" }; + return { outputString: 'Saved the User Story' }; } // Clean git status private async cleanGitStatus() { // Skip git stuff if requested if (this.noGit) { - uxLog(this, c.cyan(`[Expert mode] Skipped git reset`)); + uxLog("action", this, c.cyan(`[Expert mode] Skipped git reset`)); return; } let gitStatusInit = await git().status(); // Cancel merge if ongoing merge if (gitStatusInit.conflicted.length > 0) { - await git({ output: true }).merge(["--abort"]); + await git({ output: true }).merge(['--abort']); gitStatusInit = await git().status(); } // Unstage files if (gitStatusInit.staged.length > 0) { - await execCommand("git reset", this, { output: true, fail: true }); + await execCommand('git reset', this, { output: true, fail: true }); } } - private async ensureCommitIsReady() { - // Manage force:source:pull from scratch org + private async ensureCommitIsReady(flags) { + // Manage project deploy start from scratch org if (this.noPull || this.auto) { // Skip pull - uxLog(this, c.cyan(`Skipped force:source:pull from scratch org`)); + uxLog("action", this, c.cyan(`Skipped sf project:retrieve:start from scratch org`)); return; } // Request user if commit is ready const commitReadyRes = await prompts({ - type: "select", - name: "value", - message: c.cyanBright("Have you already committed the updated metadata you want to deploy ?"), + type: 'select', + name: 'value', + message: c.cyanBright('Have you already committed the updated metadata you want to deploy ?'), + description: 'Select your current state regarding git commits and metadata updates', + placeholder: 'Select commit status', choices: [ { - title: "😎 Yes, my commit(s) is ready ! I staged my files then created one or multiple commits !", - value: "commitReady", + title: '😎 Yes, my commit(s) is ready ! I staged my files then created one or multiple commits !', + value: 'commitReady', description: "You have already pulled updates from your org (or locally updated the files if you're a nerd) then staged your files and created a commit", }, { - title: "😐 No, please pull my latest updates from my org so I can commit my metadatas", - value: "pleasePull", - description: "Pull latest updates from org so then you can stage files and create your commit", + title: '😐 No, please pull my latest updates from my org so I can commit my metadatas', + value: 'pleasePull', + description: 'Pull latest updates from org so then you can stage files and create your commit', }, { - title: "😱 What is a commit ? What does mean pull ? Help !", - value: "help", - description: "Don't panic, just click on the link that will appear in the console (CTRL + Click) and then you will know :)", + title: '😱 What is a commit ? What does mean pull ? Help !', + value: 'help', + description: + "Don't panic, just click on the link that will appear in the console (CTRL + Click) and then you will know :)", }, ], }); - if (commitReadyRes.value === "pleasePull") { - // Process force:source:pull - uxLog(this, c.cyan(`Pulling sources from scratch org ${this.org.getUsername()}...`)); - await forceSourcePull(this.org.getUsername(), this.debugMode); - uxLog(this, c.cyan(`Sources has been pulled from ${this.org.getUsername()}, now you can stage and commit your updates !`)); - return { outputString: "Pull performed" }; - } else if (commitReadyRes.value === "help") { + if (commitReadyRes.value === 'pleasePull') { + // Process sf project retrieve start + uxLog("action", this, c.cyan(`Pulling sources from scratch org ${flags['target-org'].getUsername()}...`)); + await forceSourcePull(flags['target-org'].getUsername(), this.debugMode); + uxLog( + "action", + this, + c.cyan( + `Sources has been pulled from ${flags[ + 'target-org' + ].getUsername()}, now you can stage and commit your updates !` + ) + ); + WebSocketClient.sendReportFileMessage("workbench.view.scm", "Commit your retrieved files", "actionCommand"); + WebSocketClient.sendReportFileMessage(`${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-publish-task/#commit-your-updates`, "Retrieve and Commit documentation", 'docUrl'); + return { outputString: 'Pull performed' }; + } else if (commitReadyRes.value === 'help') { // Show pull commit stage help - const commitHelpUrl = "https://sfdx-hardis.cloudity.com/hardis/scratch/pull/"; - uxLog(this, c.cyan(`Opening help at ${commitHelpUrl} ...`)); + const commitHelpUrl = `${CONSTANTS.DOC_URL_ROOT}/hardis/scratch/pull/`; + uxLog("action", this, c.cyan(`Opening help at ${commitHelpUrl} ...`)); await open(commitHelpUrl, { wait: true }); - return { outputString: "Help displayed at " }; + return { outputString: 'Help displayed at ' }; } // Extract data from org const dataSources = [ { - label: "Email templates", - dataPath: "./scripts/data/EmailTemplate", + label: 'Email templates', + dataPath: './scripts/data/EmailTemplate', }, ]; for (const dataSource of dataSources) { if (fs.existsSync(dataSource.dataPath)) { const exportDataRes = await prompts({ - type: "confirm", - name: "value", + type: 'confirm', + name: 'value', message: c.cyan(`Did you update ${c.green(dataSource.label)} and want to export related data ?`), + description: 'Confirm if you want to export data that may have been updated for this data source', }); if (exportDataRes.value === true) { await exportData(dataSource.dataPath, this, { - sourceUsername: this.org.getUsername(), + sourceUsername: flags['target-org'].getUsername(), }); } } @@ -295,88 +344,101 @@ autoRemoveUserPermissions: } private async upgradePackageXmlFilesWithDelta() { + uxLog("action", this, c.cyan('Updating manifest/package.xml and manifest/destructiveChanges.xml using sfdx-git-delta...')); // Retrieving info about current branch latest commit and master branch latest commit - const gitDeltaScope = await getGitDeltaScope(this.currentBranch, this.targetBranch); + const gitDeltaScope = await getGitDeltaScope(this.currentBranch, this.targetBranch || ''); // Build package.xml delta between most recent commit and developpement - const localPackageXml = path.join("manifest", "package.xml"); - const toCommitMessage = gitDeltaScope.toCommit ? gitDeltaScope.toCommit.message : ""; + const localPackageXml = path.join('manifest', 'package.xml'); + const toCommitMessage = gitDeltaScope.toCommit ? gitDeltaScope.toCommit.message : ''; uxLog( + "log", this, - c.cyan(`Calculating package.xml diff from [${c.green(this.targetBranch)}] to [${c.green(this.currentBranch)} - ${c.green(toCommitMessage)}]`), + c.grey( + `Calculating package.xml diff from [${c.green(this.targetBranch)}] to [${c.green( + this.currentBranch + )} - ${c.green(toCommitMessage)}]` + ) ); const tmpDir = await createTempDir(); const packageXmlResult = await callSfdxGitDelta( gitDeltaScope.fromCommit, gitDeltaScope.toCommit ? gitDeltaScope.toCommit.hash : gitDeltaScope.fromCommit, - tmpDir, + tmpDir ); if (packageXmlResult.status === 0) { // Upgrade local destructivePackage.xml - const localDestructiveChangesXml = path.join("manifest", "destructiveChanges.xml"); + const localDestructiveChangesXml = path.join('manifest', 'destructiveChanges.xml'); if (!fs.existsSync(localDestructiveChangesXml)) { // Create default destructiveChanges.xml if not defined const blankDestructiveChanges = ` - ${CONSTANTS.API_VERSION} + ${getApiVersion()} `; await fs.writeFile(localDestructiveChangesXml, blankDestructiveChanges); } - const diffDestructivePackageXml = path.join(tmpDir, "destructiveChanges", "destructiveChanges.xml"); - const destructivePackageXmlDiffStr = await fs.readFile(diffDestructivePackageXml, "utf8"); + const diffDestructivePackageXml = path.join(tmpDir, 'destructiveChanges', 'destructiveChanges.xml'); + const destructivePackageXmlDiffStr = await fs.readFile(diffDestructivePackageXml, 'utf8'); uxLog( + "log", this, - c.bold(c.cyan(`destructiveChanges.xml diff to be merged within ${c.green(localDestructiveChangesXml)}:\n`)) + - c.red(destructivePackageXmlDiffStr), + c.grey(c.bold(`Delta destructiveChanges.xml diff to be merged within ${c.green(localDestructiveChangesXml)}:\n`)) + + c.red(destructivePackageXmlDiffStr) + ); + await appendPackageXmlFilesContent( + [localDestructiveChangesXml, diffDestructivePackageXml], + localDestructiveChangesXml ); - const appendDestructivePackageXmlCommand = - "sfdx essentials:packagexml:append" + - ` --packagexmls ${localDestructiveChangesXml},${diffDestructivePackageXml}` + - ` --outputfile ${localDestructiveChangesXml}`; - await execCommand(appendDestructivePackageXmlCommand, this, { - fail: true, - debug: this.debugMode, - }); if ((await gitHasLocalUpdates()) && !this.noGit) { await git().add(localDestructiveChangesXml); } // Upgrade local package.xml - const diffPackageXml = path.join(tmpDir, "package", "package.xml"); - const packageXmlDiffStr = await fs.readFile(diffPackageXml, "utf8"); - uxLog(this, c.bold(c.cyan(`package.xml diff to be merged within ${c.green(localPackageXml)}:\n`)) + c.green(packageXmlDiffStr)); - const appendPackageXmlCommand = - "sfdx essentials:packagexml:append" + ` --packagexmls ${localPackageXml},${diffPackageXml}` + ` --outputfile ${localPackageXml}`; - await execCommand(appendPackageXmlCommand, this, { - fail: true, - debug: this.debugMode, - }); - const removePackageXmlCommand = - "sfdx essentials:packagexml:remove" + - ` --packagexml ${localPackageXml}` + - ` --removepackagexml ${localDestructiveChangesXml}` + - ` --outputfile ${localPackageXml}`; - await execCommand(removePackageXmlCommand, this, { - fail: true, - debug: this.debugMode, + const diffPackageXml = path.join(tmpDir, 'package', 'package.xml'); + const packageXmlDiffStr = await fs.readFile(diffPackageXml, 'utf8'); + uxLog( + "log", + this, + c.grey(c.bold(`Delta package.xml diff to be merged within ${c.green(localPackageXml)}:\n`)) + + c.green(packageXmlDiffStr) + ); + await appendPackageXmlFilesContent([localPackageXml, diffPackageXml], localPackageXml); + await removePackageXmlFilesContent(localPackageXml, localDestructiveChangesXml, { + outputXmlFile: localPackageXml, }); if ((await gitHasLocalUpdates()) && !this.noGit) { await git().add(localPackageXml); } } else { - uxLog(this, `[error] ${c.grey(JSON.stringify(packageXmlResult))}`); - uxLog(this, c.red(`Unable to build git diff.${c.yellow(c.bold("Please update package.xml and destructiveChanges.xml manually"))}`)); + uxLog("log", this, `[error] ${c.grey(JSON.stringify(packageXmlResult))}`); + uxLog( + "error", + this, + c.red( + `Unable to build git diff.${c.yellow( + c.bold('Please update package.xml and destructiveChanges.xml manually') + )}` + ) + ); } // Commit updates let gitStatusWithConfig = await git().status(); if (gitStatusWithConfig.staged.length > 0 && !this.noGit) { - uxLog(this, `Committing files in local git branch ${c.green(this.currentBranch)}...`); + uxLog("log", this, c.grey(`Committing updated files in local git branch ${c.green(this.currentBranch)}...`)); try { - await git({ output: true }).commit("[sfdx-hardis] Update package content"); + await git({ output: true }).commit('[sfdx-hardis] Update package content'); } catch (e) { - uxLog(this, c.yellow(`There may be an issue while committing files but it can be ok to ignore it\n${c.grey(e.message)}`)); + uxLog( + "warning", + this, + c.yellow( + `There may be an issue while committing files but it can be ok to ignore it\n${c.grey( + (e as Error).message + )}` + ) + ); gitStatusWithConfig = await git().status(); } } @@ -385,34 +447,48 @@ autoRemoveUserPermissions: // Apply automated cleaning to avoid to have to do it manually private async applyCleaningOnSources() { - const config = await getConfig("branch"); + const config = await getConfig('branch'); if (!this.noClean) { const gitStatusFilesBeforeClean = (await git().status()).files.map((file) => file.path); - uxLog(this, JSON.stringify(gitStatusFilesBeforeClean, null, 2)); + uxLog("other", this, JSON.stringify(gitStatusFilesBeforeClean, null, 2)); // References cleaning - uxLog(this, c.cyan("Cleaning sfdx project from obsolete references...")); - // User defined cleaning - await CleanReferences.run(["--type", "all"]); + await CleanReferences.run(['--type', 'all']); if (globalThis?.displayProfilesWarning === true) { - uxLog(this, c.yellow(c.bold("Please make sure the attributes removed from Profiles are defined on Permission Sets :)"))); + uxLog( + "warning", + this, + c.yellow(c.bold('Please make sure the attributes removed from Profiles are defined on Permission Sets :)')) + ); + } + + // Xml cleaning + if (config.cleanXmlPatterns && config.cleanXmlPatterns.length > 0) { + uxLog("action", this, c.cyan('Cleaning sfdx project using patterns and xpaths defined in cleanXmlPatterns...')); + await CleanXml.run([]); } - uxLog(this, c.cyan("Cleaning sfdx project using patterns and xpaths defined in cleanXmlPatterns...")); - await CleanXml.run([]); // Manage git after cleaning const gitStatusAfterClean = await git().status(); - uxLog(this, JSON.stringify(gitStatusAfterClean, null, 2)); + uxLog("other", this, JSON.stringify(gitStatusAfterClean, null, 2)); const cleanedFiles = gitStatusAfterClean.files .filter((file) => !gitStatusFilesBeforeClean.includes(file.path)) .map((file) => normalizeFileStatusPath(file.path, config)); if (cleanedFiles.length > 0) { - uxLog(this, c.cyan(`Cleaned the following list of files:\n${cleanedFiles.join("\n")}`)); + uxLog("log", this, c.grey(`Cleaned the following list of files:\n${cleanedFiles.join('\n')}`)); if (!this.noGit) { try { await git().add(cleanedFiles); - await git({ output: true }).commit("[sfdx-hardis] Clean sfdx project"); + await git({ output: true }).commit('[sfdx-hardis] Clean sfdx project'); } catch (e) { - uxLog(this, c.yellow(`There may be an issue while adding cleaned files but it can be ok to ignore it\n${c.grey(e.message)}`)); + uxLog( + "warning", + this, + c.yellow( + `There may be an issue while adding cleaned files but it can be ok to ignore it\n${c.grey( + (e as Error).message + )}` + ) + ); } } } @@ -422,19 +498,19 @@ autoRemoveUserPermissions: private async buildDeploymentPlan() { // Build deployment plan splits let splitConfig = await this.getSeparateDeploymentsConfig(); - const localPackageXml = path.join("manifest", "package.xml"); + const localPackageXml = path.join('manifest', 'package.xml'); const packageXml = await parseXmlFile(localPackageXml); for (const type of packageXml.Package.types || []) { const typeName = type.name[0]; splitConfig = splitConfig.map((split) => { - if (split.types.includes(typeName) && type.members[0] !== "*") { + if (split.types.includes(typeName) && type.members[0] !== '*') { split.content[typeName] = type.members; } return split; }); } // Generate deployment plan items - const config = await getConfig("project"); + const config = await getConfig('project'); const deploymentPlan = config?.deploymentPlan || {}; let packages = deploymentPlan?.packages || []; const blankPackageXml = packageXml; @@ -443,7 +519,7 @@ autoRemoveUserPermissions: if (Object.keys(split.content).length > 0) { // data case if (split.data) { - const label = `Import ${split.types.join("-")} records`; + const label = `Import ${split.types.join('-')} records`; packages = this.addToPlan(packages, { label: label, dataPath: split.data, @@ -462,7 +538,7 @@ autoRemoveUserPermissions: }); } await writeXmlFile(split.file, splitPackageXml); - const label = `Deploy ${split.types.join("-")}`; + const label = `Deploy ${split.types.join('-')}`; packages = this.addToPlan(packages, { label: label, packageXmlFile: split.file, @@ -505,17 +581,25 @@ autoRemoveUserPermissions: } // Update deployment plan in config deploymentPlan.packages = packages.sort((a, b) => (a.order > b.order ? 1 : -1)); - await setConfig("project", { deploymentPlan: deploymentPlan }); + await setConfig('project', { deploymentPlan: deploymentPlan }); if (!this.noGit) { - await git({ output: true }).add(["./config"]); - await git({ output: true }).add(["./manifest"]); + await git({ output: true }).add(['./config']); + await git({ output: true }).add(['./manifest']); } let gitStatusAfterDeployPlan = await git().status(); if (gitStatusAfterDeployPlan.staged.length > 0 && !this.noGit) { try { - await git({ output: true }).commit("[sfdx-hardis] Update deployment plan"); + await git({ output: true }).commit('[sfdx-hardis] Update deployment plan'); } catch (e) { - uxLog(this, c.yellow(`There may be an issue while committing files but it can be ok to ignore it\n${c.grey(e.message)}`)); + uxLog( + "warning", + this, + c.yellow( + `There may be an issue while committing files but it can be ok to ignore it\n${c.grey( + (e as Error).message + )}` + ) + ); gitStatusAfterDeployPlan = await git().status(); } } @@ -533,21 +617,26 @@ autoRemoveUserPermissions: !this.auto ) { const pushResponse = await prompts({ - type: "confirm", - name: "push", + type: 'confirm', + name: 'push', default: true, - message: c.cyanBright(`Do you want to push your commit(s) on git server ? (git push in remote git branch ${c.green(this.currentBranch)})`), + message: c.cyanBright( + `Do you want to push your commit(s) on git server ? (git push in remote git branch ${c.green( + this.currentBranch + )})` + ), + description: 'Choose whether to push your commits to the remote git repository', }); if (pushResponse.push === true) { - uxLog(this, c.cyan(`Pushing new commit(s) in remote git branch ${c.green(`origin/${this.currentBranch}`)}...`)); - const configUSer = await getConfig("user"); + uxLog("action", this, c.cyan(`Pushing new commit(s) in remote git branch ${c.green(`origin/${this.currentBranch}`)}...`)); + const configUSer = await getConfig('user'); let pushResult: any; if (configUSer.canForcePush === true) { // Force push if hardis:work:resetselection has been called before - pushResult = await git({ output: true }).push(["-u", "origin", this.currentBranch, "--force"]); - await setConfig("user", { canForcePush: false }); + pushResult = await git({ output: true }).push(['-u', 'origin', this.currentBranch, '--force']); + await setConfig('user', { canForcePush: false }); } else { - pushResult = await git({ output: true }).push(["-u", "origin", this.currentBranch]); + pushResult = await git({ output: true }).push(['-u', 'origin', this.currentBranch]); } // Update merge request info if (pushResult && pushResult.remoteMessages) { @@ -562,8 +651,8 @@ autoRemoveUserPermissions: mergeRequestsStored.push(this.updateMergeRequestInfo({ branch: this.currentBranch }, pushResult)); } // Update user config file & send Websocket event - await setConfig("user", { mergeRequests: mergeRequestsStored.filter((mr: any) => mr !== null) }); - WebSocketClient.sendMessage({ event: "refreshStatus" }); + await setConfig('user', { mergeRequests: mergeRequestsStored.filter((mr: any) => mr !== null) }); + WebSocketClient.sendRefreshStatusMessage(); } } } @@ -571,7 +660,7 @@ autoRemoveUserPermissions: private updateMergeRequestInfo(mergeRequestStored, mergeRequestInfo) { if (this.debugMode) { - uxLog(this, c.grey(JSON.stringify(mergeRequestInfo, null, 2))); + uxLog("log", this, c.grey(JSON.stringify(mergeRequestInfo, null, 2))); } if (mergeRequestInfo?.remoteMessages?.id) { mergeRequestStored.id = mergeRequestInfo.remoteMessages.id; @@ -583,7 +672,10 @@ autoRemoveUserPermissions: } else { delete mergeRequestStored.urlCreate; } - if (mergeRequestInfo?.remoteMessages?.all[0] && mergeRequestInfo?.remoteMessages?.all[0].includes("View merge request")) { + if ( + mergeRequestInfo?.remoteMessages?.all[0] && + mergeRequestInfo?.remoteMessages?.all[0].includes('View merge request') + ) { mergeRequestStored.url = mergeRequestInfo?.remoteMessages?.all[1]; } else { delete mergeRequestStored.url; @@ -592,7 +684,7 @@ autoRemoveUserPermissions: } private async getSeparateDeploymentsConfig() { - const config = await getConfig("project"); + const config = await getConfig('project'); if (config.separateDeploymentsConfig || config.separateDeploymentsConfig === false) { return config.separateDeploymentConfig || []; } @@ -613,10 +705,10 @@ autoRemoveUserPermissions: content: {}, }, */ { - types: ["SharingRules", "SharingOwnerRule"], - files: "manifest/splits/packageXmlSharingRules{{name}}.xml", + types: ['SharingRules', 'SharingOwnerRule'], + files: 'manifest/splits/packageXmlSharingRules{{name}}.xml', filePos: 30, - mainType: "SharingRules", + mainType: 'SharingRules', waitAfter: 30, content: {}, }, diff --git a/src/commands/hardis/work/ws.ts b/src/commands/hardis/work/ws.ts index c024f3c11..059a95e4a 100644 --- a/src/commands/hardis/work/ws.ts +++ b/src/commands/hardis/work/ws.ts @@ -1,64 +1,78 @@ /* jscpd:ignore-start */ -import { flags, SfdxCommand } from "@salesforce/command"; -import { Messages } from "@salesforce/core"; -import { AnyJson } from "@salesforce/ts-types"; -import { WebSocketClient } from "../../../common/websocketClient"; +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; +import { AnyJson } from '@salesforce/ts-types'; +import { WebSocketClient } from '../../../common/websocketClient.js'; -// Initialize Messages with the current plugin directory -Messages.importMessagesDirectory(__dirname); +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'org'); -// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, -// or any library that is using the messages framework can also be loaded this way. -const messages = Messages.loadMessages("sfdx-hardis", "org"); +export default class WebSocketAction extends SfCommand { + public static title = 'WebSocket operations'; -export default class WebSocketAction extends SfdxCommand { - public static title = "WebSocket operations"; + public static description = ` +## Command Behavior - public static description = "Technical calls to WebSocket functions"; +**Performs technical operations related to WebSocket communication, primarily for internal use by the sfdx-hardis VS Code Extension.** - public static examples = ["$ sfdx hardis:work:ws --event refreshStatus"]; +This command is not intended for direct end-user interaction. It facilitates communication between the sfdx-hardis CLI and the VS Code Extension, enabling features like real-time status updates and plugin refreshes. + +Key functionalities: + +- **Refresh Status (\`--event refreshStatus\`):** Sends a message to the VS Code Extension to refresh its displayed status, ensuring that the UI reflects the latest state of Salesforce orgs or project activities. +- **Refresh Plugins (\`--event refreshPlugins\`):** Sends a message to the VS Code Extension to refresh its loaded plugins, useful after installing or updating sfdx-hardis or other related extensions. + +
+Technical explanations + +The command's technical implementation involves: + +- **WebSocketClient:** It utilizes the \`WebSocketClient\` utility to establish and manage WebSocket connections. +- **Event-Driven Communication:** It listens for specific events (e.g., \`refreshStatus\`, \`refreshPlugins\`) and triggers corresponding actions on the connected WebSocket client. +- **Internal Use:** This command is primarily called programmatically by the VS Code Extension to maintain synchronization and provide a seamless user experience. +
+`; + + public static uiConfig = { hide: true }; + + public static examples = ['$ sf hardis:work:ws --event refreshStatus']; // public static args = [{name: 'file'}]; - protected static flagsConfig = { - event: flags.string({ - char: "e", - description: "WebSocket event", + public static flags: any = { + event: Flags.string({ + char: 'e', + description: 'WebSocket event', }), - debug: flags.boolean({ - char: "d", + debug: Flags.boolean({ + char: 'd', default: false, - description: messages.getMessage("debugMode"), + description: messages.getMessage('debugMode'), }), - websocket: flags.string({ - description: messages.getMessage("websocket"), + websocket: Flags.string({ + description: messages.getMessage('websocket'), }), - skipauth: flags.boolean({ - description: "Skip authentication check when a default username is required", + skipauth: Flags.boolean({ + description: 'Skip authentication check when a default username is required', }), }; - // Comment this out if your command does not require an org username - protected static requiresUsername = false; - - // Comment this out if your command does not support a hub org username - protected static requiresDevhubUsername = false; - // Set this to true if your command requires a project workspace; 'requiresProject' is false by default - protected static requiresProject = false; + public static requiresProject = false; protected debugMode = false; - protected event = ""; + protected event = ''; /* jscpd:ignore-end */ public async run(): Promise { - this.event = this.flags.event || ""; + const { flags } = await this.parse(WebSocketAction); + this.event = flags.event || ''; if (WebSocketClient.isAlive()) { - if (this.event === "refreshStatus") { - WebSocketClient.sendMessage({ event: "refreshStatus" }); - } else if (this.event === "refreshPlugins") { - WebSocketClient.sendMessage({ event: "refreshPlugins" }); + if (this.event === 'refreshStatus') { + WebSocketClient.sendRefreshStatusMessage(); + } else if (this.event === 'refreshPlugins') { + WebSocketClient.sendRefreshPluginsMessage(); } } diff --git a/src/commands/hello/world.ts b/src/commands/hello/world.ts new file mode 100644 index 000000000..409a9f8fc --- /dev/null +++ b/src/commands/hello/world.ts @@ -0,0 +1,56 @@ +import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; +import { Messages } from '@salesforce/core'; + +Messages.importMessagesDirectoryFromMetaUrl(import.meta.url); +const messages = Messages.loadMessages('sfdx-hardis', 'hello.world'); + +export type HelloWorldResult = { + name: string; + time: string; +}; + +export default class World extends SfCommand { + public static readonly summary = messages.getMessage('summary'); + public static readonly description = ` +## Command Behavior + +**Says hello to the world or a specified person.** + +This is a simple command used for demonstration purposes. It outputs a greeting message to the console. + +Key functionalities: + +- **Customizable Greeting:** You can specify a name using the \`--name\` flag to personalize the greeting. +- **Timestamp:** The greeting includes the current date. + +
+Technical explanations + +The command's technical implementation involves: + +- **Flag Parsing:** It parses the \`--name\` flag to get the recipient of the greeting. +- **Date Retrieval:** It gets the current date using \`new Date().toDateString()\`. +- **Console Output:** It constructs the greeting message using the provided name and the current date, and then logs it to the console using \`this.log()\`. +
+`; + public static readonly examples = messages.getMessages('examples'); + + public static readonly flags: any = { + name: Flags.string({ + char: 'n', + summary: messages.getMessage('flags.name.summary'), + description: messages.getMessage('flags.name.description'), + default: 'World', + }), + }; + + public async run(): Promise { + const { flags } = await this.parse(World); + const time = new Date().toDateString(); + this.log(messages.getMessage('info.hello', [flags.name, time])); + return { + name: flags.name, + time, + }; + } +} diff --git a/src/common/aiProvider/agentforceProvider.ts b/src/common/aiProvider/agentforceProvider.ts new file mode 100644 index 000000000..e65146ba8 --- /dev/null +++ b/src/common/aiProvider/agentforceProvider.ts @@ -0,0 +1,73 @@ + +import { AiResponse } from "./index.js"; +import { AiProviderRoot } from "./aiProviderRoot.js"; +import c from "chalk"; +import { uxLog } from "../utils/index.js"; +import { PromptTemplate } from "./promptTemplates.js"; +import { Connection } from "@salesforce/core"; +import { UtilsAi } from "./utils.js"; + +export class AgentforceProvider extends AiProviderRoot { + protected conn: Connection; + + constructor() { + super(); + this.conn = globalThis.jsForceConnTechnical || globalThis.jsForceConn; + } + + public getLabel(): string { + return "Agentforce connector"; + } + + public async promptAi(promptText: string, template: PromptTemplate): Promise { + if (!this.checkMaxAiCallsNumber()) { + const maxCalls = this.getAiMaxCallsNumber(); + uxLog("warning", this, c.yellow(`[Agentforce] Already performed maximum ${maxCalls} calls. Increase it by defining AI_MAXIMUM_CALL_NUMBER env variable`)); + return null; + } + if (process.env?.DEBUG_PROMPTS === "true") { + uxLog("log", this, c.grey(`[Agentforce] Requesting the following prompt${template ? (' using template ' + template) : ''}:\n${promptText}`)); + } + else { + uxLog("log", this, c.grey(`[Agentforce] Requesting prompt${template ? (' using template ' + template) : ''} (define DEBUG_PROMPTS=true to see details)`)); + } + this.incrementAiCallsNumber(); + const genericPromptTemplate = process.env.GENERIC_AGENTFORCE_PROMPT_TEMPLATE || "SfdxHardisGenericPrompt"; + const promptUrl = process.env.GENERIC_AGENTFORCE_PROMPT_URL || `/services/data/v${this.conn.getApiVersion()}/einstein/prompt-templates/${genericPromptTemplate}/generations` + const payload = { + "isPreview": "false", + "inputParams": { + "valueMap": { + "Input:PromptText": { + "value": promptText + } + } + }, + "outputLanguage": UtilsAi.getPromptsLanguage(), + "additionalConfig": { + /* "numGenerations": 1, + "temperature": 0, + "frequencyPenalty": 0.0, + "presencePenalty": 0.0, + "additionalParameters": {},*/ + "applicationName": "PromptTemplateGenerationsInvocable" + } + } + const agentforceResponse: any = await this.conn.requestPost(promptUrl, payload); + if (process.env?.DEBUG_PROMPTS === "true") { + uxLog("log", this, c.grey("[Agentforce] Received prompt response\n" + JSON.stringify(agentforceResponse, null, 2))); + } + else { + uxLog("log", this, c.grey("[Agentforce] Received prompt response")); + } + const aiResponse: AiResponse = { + success: false, + model: "Agentforce", + }; + if (agentforceResponse?.generations?.length > 0 && agentforceResponse.generations[0]?.text) { + aiResponse.success = true; + aiResponse.promptResponse = agentforceResponse.generations[0]?.text; + } + return aiResponse; + } +} diff --git a/src/common/aiProvider/aiProviderRoot.ts b/src/common/aiProvider/aiProviderRoot.ts index 5e1f27d20..7702d4be7 100644 --- a/src/common/aiProvider/aiProviderRoot.ts +++ b/src/common/aiProvider/aiProviderRoot.ts @@ -1,22 +1,23 @@ -import { SfdxError } from "@salesforce/core"; -import { AiResponse } from "."; -import { getEnvVar } from "../../config"; +import { SfError } from "@salesforce/core"; +import { AiResponse } from "./index.js"; +import { getEnvVar } from "../../config/index.js"; +import { PromptTemplate } from "./promptTemplates.js"; export abstract class AiProviderRoot { protected token: string; public getLabel(): string { - throw new SfdxError("getLabel should be implemented on this call"); + throw new SfError("getLabel should be implemented on this call"); } // eslint-disable-next-line @typescript-eslint/no-unused-vars - public async promptAi(prompt: string): Promise { - throw new SfdxError("promptAi should be implemented on this call"); + public async promptAi(prompt: string, template: PromptTemplate | null = null): Promise { + throw new SfError("promptAi should be implemented on this call"); } - // Get user defined maximum number of calls during an sfdx hardis command + // Get user defined maximum number of calls during an sfdx-hardis command getAiMaxCallsNumber() { - return parseInt(getEnvVar("AI_MAXIMUM_CALL_NUMBER") || "10"); + return parseInt(getEnvVar("AI_MAXIMUM_CALL_NUMBER") || "10000"); } // Increment number of api calls performed diff --git a/src/common/aiProvider/index.ts b/src/common/aiProvider/index.ts index f292b668b..388288c69 100644 --- a/src/common/aiProvider/index.ts +++ b/src/common/aiProvider/index.ts @@ -1,28 +1,117 @@ -import { UtilsAi } from "./utils"; -import { AiProviderRoot } from "./aiProviderRoot"; -import { OpenApiProvider } from "./openapiProvider"; +import { UtilsAi } from "./utils.js"; +import c from 'chalk'; +import { AiProviderRoot } from "./aiProviderRoot.js"; +import { OpenAiProvider } from "./openaiProvider.js"; +import { SfError } from "@salesforce/core"; +import { buildPromptFromTemplate, PromptTemplate } from "./promptTemplates.js"; +import { isCI, uxLog } from "../utils/index.js"; +import { prompts } from "../utils/prompts.js"; +import { AgentforceProvider } from "./agentforceProvider.js"; +import { LangChainProvider } from "./langchainProvider.js"; +import { formatMarkdownForMkDocs } from "../utils/markdownUtils.js"; + +let IS_AI_AVAILABLE: boolean | null = null; export abstract class AiProvider { static isAiAvailable(): boolean { + if (process.env?.DISABLE_AI === "true") { + uxLog("warning", this, c.yellow("[AI Provider] AI calls have been disabled using env var DISABLE_AI=true")) + return false; + } return this.getInstance() != null; } - static getInstance(): AiProviderRoot { + static async isAiAvailableWithUserPrompt() { + if (IS_AI_AVAILABLE !== null) { + return IS_AI_AVAILABLE; + } + if (this.isAiAvailable()) { + IS_AI_AVAILABLE = true; + return IS_AI_AVAILABLE; + } + if (!isCI) { + const promptRes = await prompts({ + type: 'text', + name: 'token', + message: 'Input your OpenAi API token if you want to use it. Leave empty to skip.', + description: 'Provide your OpenAI API key to enable AI-powered features in sfdx-hardis', + }); + if (promptRes.token) { + process.env.OPENAI_API_KEY = promptRes.token; + } + } + IS_AI_AVAILABLE = this.isAiAvailable(); + return IS_AI_AVAILABLE; + } + + static getInstance(): AiProviderRoot | null { + // LangChain + if (UtilsAi.isLangChainAvailable()) { + return new LangChainProvider(); + } // OpenAi - if (UtilsAi.isOpenApiAvailable()) { - return new OpenApiProvider(); + else if (UtilsAi.isOpenAiAvailable()) { + return new OpenAiProvider(); + } + else if (UtilsAi.isAgentforceAvailable()) { + return new AgentforceProvider(); } return null; } - static async promptAi(prompt: string): Promise { + static async promptAi(prompt: string, template: PromptTemplate): Promise { const aiInstance = this.getInstance(); - return await aiInstance.promptAi(prompt); + if (!aiInstance) { + throw new SfError("aiInstance should be set"); + } + // Stop calling AI if a timeout has been reached + const aiMaxTimeoutMinutes = parseInt(process.env.AI_MAX_TIMEOUT_MINUTES || (isCI ? "30" : "0"), 10); + if (aiMaxTimeoutMinutes > 0) { + globalThis.currentAiStartTime = globalThis.currentAiStartTime || Date.now(); + const elapsedMinutes = (Date.now() - globalThis.currentAiStartTime) / 60000; // Convert milliseconds to minutes + if (elapsedMinutes >= aiMaxTimeoutMinutes) { + uxLog("warning", this, c.yellow(`AI calls reached maximum time allowed of ${aiMaxTimeoutMinutes} minutes. You can either: +- Run command locally then commit + push +- Increase using variable \`AI_MAX_TIMEOUT_MINUTES\` in your CI config (ex: AI_MAX_TIMEOUT_MINUTES=120) after making sure than your CI job timeout can handle it :)`)); + return { success: false, model: "none", forcedTimeout: true }; + } + } + // Call AI using API + try { + const aiResponse = await aiInstance.promptAi(prompt, template); + if (aiResponse?.success && aiResponse?.promptResponse) { + aiResponse.promptResponse = formatMarkdownForMkDocs(aiResponse.promptResponse); + } + return aiResponse; + } catch (e: any) { + if (e.message.includes("on tokens per min (TPM)")) { + try { + uxLog("warning", this, c.yellow(`Error while calling AI provider: ${e.message}`)); + uxLog("warning", this, c.yellow(`Trying again in 60 seconds...`)); + await new Promise((resolve) => setTimeout(resolve, 60000)); + return await aiInstance.promptAi(prompt, template); + } catch (e2: any) { + uxLog("error", this, c.red(`Error while calling AI provider: ${e2.message}`)); + return null; + } + } + uxLog("error", this, c.red(`Error while calling AI provider: ${e.message}`)); + return null; + } + } + + static buildPrompt(template: PromptTemplate, variables: object): string { + return buildPromptFromTemplate(template, variables); } + } export interface AiResponse { success: boolean; model: string; promptResponse?: string; + forcedTimeout?: boolean // In case AI_MAX_TIMEOUT_MINUTES has been set } + + + diff --git a/src/common/aiProvider/langChainProviders/langChainAnthropicProvider.ts b/src/common/aiProvider/langChainProviders/langChainAnthropicProvider.ts new file mode 100644 index 000000000..9cb4df645 --- /dev/null +++ b/src/common/aiProvider/langChainProviders/langChainAnthropicProvider.ts @@ -0,0 +1,25 @@ +import { ChatAnthropic } from "@langchain/anthropic"; +import { BaseChatModel } from "@langchain/core/language_models/chat_models"; +import { AbstractLLMProvider, ModelConfig } from "./langChainBaseProvider.js"; + +export class LangChainAnthropicProvider extends AbstractLLMProvider { + constructor(modelName: string, config: ModelConfig) { + if (!config.apiKey) { + throw new Error("API key is required for Anthropic provider. Define it in a secured env var LANGCHAIN_LLM_MODEL_API_KEY"); + } + super(modelName, config); + this.model = this.getModel(); + } + + getModel(): BaseChatModel { + const config = { + modelName: this.modelName, + anthropicApiKey: this.config.apiKey!, + temperature: this.config.temperature, + maxTokens: this.config.maxTokens, + maxRetries: this.config.maxRetries + }; + + return new ChatAnthropic(config) as BaseChatModel; + } +} \ No newline at end of file diff --git a/src/common/aiProvider/langChainProviders/langChainBaseProvider.ts b/src/common/aiProvider/langChainProviders/langChainBaseProvider.ts new file mode 100644 index 000000000..2f1bb55d8 --- /dev/null +++ b/src/common/aiProvider/langChainProviders/langChainBaseProvider.ts @@ -0,0 +1,39 @@ +import { BaseChatModel } from "@langchain/core/language_models/chat_models"; + +export interface ModelConfig { + temperature?: number; + timeout?: number; + maxTokens?: number; + maxRetries?: number; + baseUrl?: string; + apiKey?: string; +} + +export type ProviderType = "ollama" | "openai" | "anthropic"; + +export interface BaseLLMProvider { + getModel(): BaseChatModel; + getModelName(): string; + getLabel(): string; +} + +export abstract class AbstractLLMProvider implements BaseLLMProvider { + protected model: BaseChatModel; + protected modelName: string; + protected config: ModelConfig; + + constructor(modelName: string, config: ModelConfig) { + this.modelName = modelName; + this.config = config; + } + + abstract getModel(): BaseChatModel; + + getModelName(): string { + return this.modelName; + } + + getLabel(): string { + return "LangChain connector"; + } +} \ No newline at end of file diff --git a/src/common/aiProvider/langChainProviders/langChainGoogleGenAi.ts b/src/common/aiProvider/langChainProviders/langChainGoogleGenAi.ts new file mode 100644 index 000000000..19316c30e --- /dev/null +++ b/src/common/aiProvider/langChainProviders/langChainGoogleGenAi.ts @@ -0,0 +1,25 @@ +import { ChatGoogleGenerativeAI } from "@langchain/google-genai"; +import { BaseChatModel } from "@langchain/core/language_models/chat_models"; +import { AbstractLLMProvider, ModelConfig } from "./langChainBaseProvider.js"; + +export class LangChainGoogleGenAiProvider extends AbstractLLMProvider { + constructor(modelName: string, config: ModelConfig) { + if (!config.apiKey) { + throw new Error("API key is required for Google GenAI provider. Define it in a secured env var LANGCHAIN_LLM_MODEL_API_KEY"); + } + super(modelName, config); + this.model = this.getModel(); + } + + getModel(): BaseChatModel { + const config = { + model: this.modelName, + apiKey: this.config.apiKey!, + temperature: this.config.temperature, + maxTokens: this.config.maxTokens, + maxRetries: this.config.maxRetries + }; + + return new ChatGoogleGenerativeAI(config) as BaseChatModel; + } +} \ No newline at end of file diff --git a/src/common/aiProvider/langChainProviders/langChainOllamaProvider.ts b/src/common/aiProvider/langChainProviders/langChainOllamaProvider.ts new file mode 100644 index 000000000..467973987 --- /dev/null +++ b/src/common/aiProvider/langChainProviders/langChainOllamaProvider.ts @@ -0,0 +1,21 @@ +import { ChatOllama } from "@langchain/ollama"; +import { BaseChatModel } from "@langchain/core/language_models/chat_models"; +import { AbstractLLMProvider, ModelConfig } from "./langChainBaseProvider.js"; + +export class LangChainOllamaProvider extends AbstractLLMProvider { + constructor(modelName: string, config: ModelConfig) { + super(modelName, config); + this.model = this.getModel(); + } + + getModel(): BaseChatModel { + const config = { + model: this.modelName, + baseUrl: this.config.baseUrl || "http://localhost:11434", + temperature: this.config.temperature, + maxRetries: this.config.maxRetries + }; + + return new ChatOllama(config) as BaseChatModel; + } +} \ No newline at end of file diff --git a/src/common/aiProvider/langChainProviders/langChainOpenAIProvider.ts b/src/common/aiProvider/langChainProviders/langChainOpenAIProvider.ts new file mode 100644 index 000000000..1f95407a6 --- /dev/null +++ b/src/common/aiProvider/langChainProviders/langChainOpenAIProvider.ts @@ -0,0 +1,25 @@ +import { ChatOpenAI } from "@langchain/openai"; +import { BaseChatModel } from "@langchain/core/language_models/chat_models"; +import { AbstractLLMProvider, ModelConfig } from "./langChainBaseProvider.js"; + +export class LangChainOpenAIProvider extends AbstractLLMProvider { + constructor(modelName: string, config: ModelConfig) { + if (!config.apiKey) { + throw new Error("API key is required for OpenAI provider. Define it in a secured env var LANGCHAIN_LLM_MODEL_API_KEY"); + } + super(modelName, config); + this.model = this.getModel(); + } + + getModel(): BaseChatModel { + const config = { + modelName: this.modelName, + openAIApiKey: this.config.apiKey!, + temperature: this.config.temperature, + maxTokens: this.config.maxTokens, + maxRetries: this.config.maxRetries + }; + + return new ChatOpenAI(config) as BaseChatModel; + } +} \ No newline at end of file diff --git a/src/common/aiProvider/langChainProviders/langChainProviderFactory.ts b/src/common/aiProvider/langChainProviders/langChainProviderFactory.ts new file mode 100644 index 000000000..6e60b07a1 --- /dev/null +++ b/src/common/aiProvider/langChainProviders/langChainProviderFactory.ts @@ -0,0 +1,24 @@ +import { BaseLLMProvider, ModelConfig, ProviderType } from "./langChainBaseProvider.js"; +import { LangChainOllamaProvider } from "./langChainOllamaProvider.js"; +import { LangChainOpenAIProvider } from "./langChainOpenAIProvider.js"; +import { LangChainAnthropicProvider } from "./langChainAnthropicProvider.js"; +import { LangChainGoogleGenAiProvider } from "./langChainGoogleGenAi.js"; + +const ALL_PROVIDERS = ["ollama", "openai", "anthropic", "google-genai"]; + +export class LangChainProviderFactory { + static createProvider(providerType: ProviderType, modelName: string, config: ModelConfig): BaseLLMProvider { + switch (providerType.toLowerCase()) { + case "ollama": + return new LangChainOllamaProvider(modelName, config); + case "openai": + return new LangChainOpenAIProvider(modelName, config); + case "anthropic": + return new LangChainAnthropicProvider(modelName, config); + case "google-genai": + return new LangChainGoogleGenAiProvider(modelName, config); + default: + throw new Error(`Unsupported LLM provider: ${providerType}. Supported providers are: ${ALL_PROVIDERS.join(", ")}`); + } + } +} \ No newline at end of file diff --git a/src/common/aiProvider/langchainProvider.ts b/src/common/aiProvider/langchainProvider.ts new file mode 100644 index 000000000..109a2eb71 --- /dev/null +++ b/src/common/aiProvider/langchainProvider.ts @@ -0,0 +1,101 @@ +import { BaseChatModel } from "@langchain/core/language_models/chat_models"; +import { AiResponse } from "./index.js"; +import { AiProviderRoot } from "./aiProviderRoot.js"; +import c from "chalk"; +import { uxLog } from "../utils/index.js"; +import { PromptTemplate } from "./promptTemplates.js"; +import { getEnvVar } from "../../config/index.js"; +import { LangChainProviderFactory } from "./langChainProviders/langChainProviderFactory.js"; +import { ModelConfig, ProviderType } from "./langChainProviders/langChainBaseProvider.js"; + +export class LangChainProvider extends AiProviderRoot { + private model: BaseChatModel; + private modelName: string; + + constructor() { + super(); + const provider = getEnvVar("LANGCHAIN_LLM_PROVIDER"); + if (!provider) { + throw new Error("LANGCHAIN_LLM_PROVIDER environment variable must be set to use LangChain integration"); + } + + const providerType = provider.toLowerCase() as ProviderType; + const modelName = getEnvVar("LANGCHAIN_LLM_MODEL"); + const apiKey = getEnvVar("LANGCHAIN_LLM_MODEL_API_KEY"); + + if (!modelName) { + throw new Error("LANGCHAIN_LLM_MODEL environment variable must be set to use LangChain integration"); + } + + this.modelName = modelName; + + // Common configuration for all providers + const config: ModelConfig = { + temperature: Number(getEnvVar("LANGCHAIN_LLM_TEMPERATURE")) || undefined, + timeout: Number(getEnvVar("LANGCHAIN_LLM_TIMEOUT")) || undefined, + maxTokens: Number(getEnvVar("LANGCHAIN_LLM_MAX_TOKENS")) || undefined, + maxRetries: Number(getEnvVar("LANGCHAIN_LLM_MAX_RETRIES")) || undefined, + baseUrl: getEnvVar("LANGCHAIN_LLM_BASE_URL") || undefined, + apiKey: apiKey || undefined + }; + + // factory pattern so that adding support for new providers is easy in the future + const llmProvider = LangChainProviderFactory.createProvider(providerType, modelName, config); + this.model = llmProvider.getModel(); + } + + public getLabel(): string { + return "LangChain connector"; + } + + public async promptAi(promptText: string, template: PromptTemplate | null = null): Promise { + // re-use the same check for max ai calls number as in the original openai provider implementation + if (!this.checkMaxAiCallsNumber()) { + const maxCalls = this.getAiMaxCallsNumber(); + uxLog("warning", this, c.yellow(`[LangChain] Already performed maximum ${maxCalls} calls. Increase it by defining AI_MAXIMUM_CALL_NUMBER env variable`)); + return null; + } + + if (process.env?.DEBUG_PROMPTS === "true") { + uxLog("log", this, c.grey(`[LangChain] Requesting the following prompt to ${this.modelName}${template ? ' using template ' + template : ''}:\n${promptText}`)); + } else { + uxLog("log", this, c.grey(`[LangChain] Requesting prompt to ${this.modelName}${template ? ' using template ' + template : ''} (define DEBUG_PROMPTS=true to see details)`)); + } + + this.incrementAiCallsNumber(); + + try { + const response = await this.model.invoke([ + { + role: "user", + content: promptText + } + ]); + + if (process.env?.DEBUG_PROMPTS === "true") { + uxLog("log", this, c.grey("[LangChain] Received prompt response\n" + JSON.stringify(response, null, 2))); + } else { + uxLog("log", this, c.grey("[LangChain] Received prompt response")); + } + + const aiResponse: AiResponse = { + success: false, + model: this.modelName, + }; + + if (response.content) { + aiResponse.success = true; + aiResponse.promptResponse = typeof response.content === 'string' ? response.content : JSON.stringify(response.content); + } + + return aiResponse; + } catch (error: unknown) { + if (error instanceof Error) { + uxLog("error", this, c.red(`[LangChain] Error while calling LLM API: ${error.message}`)); + } else { + uxLog("error", this, c.red(`[LangChain] Unexpected error occurred`)); + } + return null; + } + } +} \ No newline at end of file diff --git a/src/common/aiProvider/openaiProvider.ts b/src/common/aiProvider/openaiProvider.ts new file mode 100644 index 000000000..3f03dbf02 --- /dev/null +++ b/src/common/aiProvider/openaiProvider.ts @@ -0,0 +1,54 @@ +import { OpenAI } from "openai"; +import { AiResponse } from "./index.js"; +import { AiProviderRoot } from "./aiProviderRoot.js"; +import c from "chalk"; +import { uxLog } from "../utils/index.js"; +import { PromptTemplate } from "./promptTemplates.js"; + +export class OpenAiProvider extends AiProviderRoot { + protected openai: OpenAI; + + constructor() { + super(); + this.openai = new OpenAI(); + } + + public getLabel(): string { + return "OpenAi connector"; + } + + public async promptAi(promptText: string, template: PromptTemplate | null = null): Promise { + if (!this.checkMaxAiCallsNumber()) { + const maxCalls = this.getAiMaxCallsNumber(); + uxLog("warning", this, c.yellow(`[OpenAi] Already performed maximum ${maxCalls} calls. Increase it by defining AI_MAXIMUM_CALL_NUMBER env variable`)); + return null; + } + const gptModel = process.env.OPENAI_MODEL || "gpt-4o-mini"; + if (process.env?.DEBUG_PROMPTS === "true") { + uxLog("log", this, c.grey(`[OpenAi] Requesting the following prompt to ${gptModel}${template ? ' using template ' + template : ''}:\n${promptText}`)); + } + else { + uxLog("log", this, c.grey(`[OpenAi] Requesting prompt to ${gptModel}${template ? ' using template ' + template : ''} (define DEBUG_PROMPTS=true to see details)`)); + } + this.incrementAiCallsNumber(); + const completion = await this.openai.chat.completions.create({ + messages: [{ role: "system", content: promptText }], + model: gptModel, + }); + if (process.env?.DEBUG_PROMPTS === "true") { + uxLog("log", this, c.grey("[OpenAi] Received prompt response from " + gptModel + "\n" + JSON.stringify(completion, null, 2))); + } + else { + uxLog("log", this, c.grey("[OpenAi] Received prompt response from " + gptModel)); + } + const aiResponse: AiResponse = { + success: false, + model: completion.model, + }; + if (completion?.choices?.length > 0) { + aiResponse.success = true; + aiResponse.promptResponse = completion.choices[0].message.content ?? undefined; + } + return aiResponse; + } +} diff --git a/src/common/aiProvider/openapiProvider.ts b/src/common/aiProvider/openapiProvider.ts deleted file mode 100644 index f48a64524..000000000 --- a/src/common/aiProvider/openapiProvider.ts +++ /dev/null @@ -1,42 +0,0 @@ -import OpenAI from "openai"; -import { AiResponse } from "."; -import { AiProviderRoot } from "./aiProviderRoot"; -import * as c from "chalk"; -import { uxLog } from "../utils"; - -export class OpenApiProvider extends AiProviderRoot { - protected openai: OpenAI; - - constructor() { - super(); - this.openai = new OpenAI(); - } - - public getLabel(): string { - return "OpenApi connector"; - } - - public async promptAi(promptText: string): Promise { - if (!this.checkMaxAiCallsNumber()) { - const maxCalls = this.getAiMaxCallsNumber(); - uxLog(this, c.grey(`[OpenAi] Already performed maximum ${maxCalls} calls. Increase it by defining OPENAI_MAXIMUM_CALL_NUMBER`)); - return null; - } - const gptModel = process.env.OPENAI_MODEL || "gpt-4o"; - uxLog(this, c.grey("[OpenAi] Requesting the following prompt to " + gptModel + ": " + promptText + " ...")); - this.incrementAiCallsNumber(); - const completion = await this.openai.chat.completions.create({ - messages: [{ role: "system", content: promptText }], - model: gptModel, - }); - const aiResponse: AiResponse = { - success: false, - model: completion.model, - }; - if (completion?.choices?.length > 0) { - aiResponse.success = true; - aiResponse.promptResponse = completion.choices[0].message.content; - } - return aiResponse; - } -} diff --git a/src/common/aiProvider/promptTemplates.ts b/src/common/aiProvider/promptTemplates.ts new file mode 100644 index 000000000..f51f82c1e --- /dev/null +++ b/src/common/aiProvider/promptTemplates.ts @@ -0,0 +1,123 @@ +import { UtilsAi } from "./utils.js"; +import path from "path"; +import fs from "fs-extra"; +import { PromptTemplateDefinition } from "./promptTemplates/types.js"; +import { PROMPT_TEMPLATES as IMPORTED_PROMPT_TEMPLATES } from "./promptTemplates/index.js"; +import { PROMPT_VARIABLES, PromptVariable } from "./promptTemplates/variablesIndex.js"; +import { uxLog } from "../utils/index.js"; + +export type PromptTemplate = + "PROMPT_SOLVE_DEPLOYMENT_ERROR" | + "PROMPT_DESCRIBE_FLOW" | + "PROMPT_DESCRIBE_FLOW_DIFF" | + "PROMPT_DESCRIBE_OBJECT" | + "PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD" | + "PROMPT_DESCRIBE_APEX" | + "PROMPT_DESCRIBE_PAGE" | + "PROMPT_DESCRIBE_PACKAGE" | + "PROMPT_DESCRIBE_PROFILE" | + "PROMPT_DESCRIBE_PERMISSION_SET" | + "PROMPT_DESCRIBE_PERMISSION_SET_GROUP" | + "PROMPT_DESCRIBE_ROLES" | + "PROMPT_DESCRIBE_ASSIGNMENT_RULES" | + "PROMPT_DESCRIBE_APPROVAL_PROCESS" | + "PROMPT_DESCRIBE_LWC" | + "PROMPT_DESCRIBE_AUTORESPONSE_RULES" | + "PROMPT_DESCRIBE_ESCALATION_RULES"; + +// Loads a template, allowing override from local file, with caching +const promptTemplateCache: Record = {}; + +function getPromptTemplate(template: PromptTemplate): PromptTemplateDefinition { + if (promptTemplateCache[template]) { + return promptTemplateCache[template]; + } + + const templateData = { ...IMPORTED_PROMPT_TEMPLATES[template] }; + if (!templateData) { + throw new Error(`Unknown prompt template: ${template}`); + } + + // Check for local override (Text file) + const localPath = path.resolve(process.cwd(), "config", "prompt-templates", `${template}.txt`); + if (fs.existsSync(localPath)) { + try { + const localTemplate = fs.readFileSync(localPath, "utf-8"); + templateData.text = { + "en": localTemplate, + }; + uxLog("log", this, `Loaded local prompt template for ${template} from ${localPath}`); + } catch (e: any) { + // fallback to default if error + uxLog("warning", this, `Error loading local template for ${template}: ${e.message}`); + } + } + + promptTemplateCache[template] = templateData; + return templateData; +} + +// Loads a prompt variable, allowing override from local file, with caching +const promptVariableCache: Record = {}; + +function getPromptVariable(variable: PromptVariable): string { + if (promptVariableCache[variable]) { + return promptVariableCache[variable]; + } + + const variableData = PROMPT_VARIABLES[variable]; + if (!variableData) { + throw new Error(`Unknown prompt variable: ${variable}`); + } + + // Check for local override (Text file) + const localPath = path.resolve(process.cwd(), "config", "prompt-templates", `${variable}.txt`); + if (fs.existsSync(localPath)) { + try { + const localVariable = fs.readFileSync(localPath, "utf-8"); + uxLog("log", this, `Loaded local prompt variable for ${variable} from ${localPath}`); + promptVariableCache[variable] = localVariable; + return localVariable; + } catch (e: any) { + // fallback to default if error + uxLog("warning", this, `Error loading local variable for ${variable}: ${e.message}`); + } + } + + const promptsLanguage = UtilsAi.getPromptsLanguage(); + const value = variableData.text?.[promptsLanguage] || variableData.text?.["en"] || ""; + promptVariableCache[variable] = value; + return value; +} + +export function buildPromptFromTemplate(template: PromptTemplate, variables: object): string { + const templateData = getPromptTemplate(template); + const missingVariables = templateData.variables.filter((variable) => !variables[variable.name]); + if (missingVariables.length > 0) { + throw new Error(`Missing variables for prompt template ${template}: ${missingVariables.map(variable => variable.name).join(", ")}`); + } + // Truncate variable values if necessary + for (const variable of templateData.variables) { + if (variable.truncateAfter && variables[variable.name]?.length > variable.truncateAfter) { + variables[variable.name] = variables[variable.name].slice(0, variable.truncateAfter) + "(truncated first " + variable.truncateAfter + " characters on a total of " + variables[variable.name].length + " characters)"; + } + } + + const promptsLanguage = UtilsAi.getPromptsLanguage(); + let prompt: string = process.env?.[template] || templateData.text?.[promptsLanguage] || (templateData.text?.["en"] + `\n\nIMPORTANT: Please reply in the language corresponding to ISO code "${promptsLanguage}" (for example, in french for "fr", in english for "en", in german for "de", etc.)`); + + // Replace prompt variables first (format: {{VARIABLE_NAME}}) + for (const variableName of Object.keys(PROMPT_VARIABLES) as PromptVariable[]) { + const variableContent = getPromptVariable(variableName); + prompt = prompt.replaceAll(`{{${variableName}}}`, variableContent); + } + + // Then replace user variables + for (const variable in variables) { + prompt = prompt.replaceAll(`{{${variable}}}`, variables[variable]); + } + + return prompt; +} + + diff --git a/src/common/aiProvider/promptTemplates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.ts b/src/common/aiProvider/promptTemplates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.ts new file mode 100644 index 000000000..f82b4e083 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.ts @@ -0,0 +1,53 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "OBJECT_NAME", + description: "The API name of the Salesforce object whose fields and validation rules are being described.", + example: "Account" + }, + { + name: "MARKDOWN", + description: "The markdown table containing the fields and validation rules to be reviewed and refined.", + example: "| Field | Label | Description | ... |\n|-------|-------|-------------| ... |" + } + ], + text: { + "en": `You are a skilled Business Analyst working on a Salesforce project. Your task is to review and refine the fields and validation rules of the Salesforce object "{{OBJECT_NAME}}" and describe them in plain English. The goal is to create a detailed, user-friendly explanation of each field and validation rule that a non-technical business user can easily understand. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +## Instructions: +1. **Enhancing Fields Descriptions**: + - If an field's description is missing, generate a meaningful description using the context provided by the other column values (e.g., name, data type, or usage). + - If a field description already exists, improve its clarity and comprehensiveness by incorporating insights from the other column values. + - If an attribute's label is missing, generate a meaningful label using the context provided by the other column values. + +2. **Enhancing Validation Rules Descriptions**: + - If an field's description is missing, generate a meaningful description using the context provided by the other column values (especially formula column). + - If a validation rule description already exists, improve its clarity and comprehensiveness by incorporating insights from the other column values (especially formula column). + - If an validation rule label is missing, generate a meaningful label using the context provided by the other column values. + +3. **Output Format**: + - Return the updated descriptions in the **Markdown tables** format provided below. + - Ensure the tables aligns with Markdown syntax conventions for proper rendering. + +4. **Tone and Style**: + - Use plain English suitable for business users with minimal technical jargon. + - Focus on clarity, completeness, and practical usage examples if applicable. + +5. **Output Requirements**: + - Respond **only in Markdown** format. + - Do not include any additional text or commentary outside of the Markdown. + +## Reference Data: +- Use the following markdown as the basis for your updates: + {{MARKDOWN}} + +## Additional Guidance: +- **Consistency**: Maintain consistent formatting and ensure the descriptions are cohesive across all attributes. +- **Use Examples**: When applicable, include simple examples to illustrate the attribute's purpose or use case. + `, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_APEX.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_APEX.ts new file mode 100644 index 000000000..e8f0c1b61 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_APEX.ts @@ -0,0 +1,36 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "CLASS_NAME", + description: "The name of the Salesforce Apex class to describe.", + example: "MyCustomController" + }, + { + name: "APEX_CODE", + description: "The full source code of the Apex class.", + example: "public class MyCustomController { ... }" + } + ], text: { + "en": `You are a developer working on a Salesforce project. Your goal is to summarize the behavior of the Salesforce Apex class "{{CLASS_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the apex class. + - List the key functionalities and business logic implemented in the class. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The code for Apex class "{{CLASS_NAME}}" is: +{{APEX_CODE}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_APPROVAL_PROCESS.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_APPROVAL_PROCESS.ts new file mode 100644 index 000000000..829633fa6 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_APPROVAL_PROCESS.ts @@ -0,0 +1,36 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "APPROVALPROCESS_NAME", + description: "The name of the Salesforce Approval Process to describe.", + example: "Opportunity_Approval" + }, + { + name: "APPROVALPROCESS_XML", + description: "The XML metadata for the Salesforce Approval Process.", + example: "..." + } + ], text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to explain the what is the Salesforce Approval Process "{{APPROVALPROCESS_NAME}}" about in plain English, provide a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose of the approval process. + - List the key functionalities and business logic implemented in the approval process. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Approval Process "{{APPROVALPROCESS_NAME}}" is: +{{APPROVALPROCESS_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.ts new file mode 100644 index 000000000..abed455ae --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ASSIGNMENT_RULES.ts @@ -0,0 +1,38 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "ASSIGNMENTRULES_NAME", + description: "The name of the Salesforce Assignment Rules to describe.", + example: "Case_Assignment_Rules" + }, + { + name: "ASSIGNMENTRULES_XML", + description: "The XML metadata for the Salesforce Assignment Rules.", + example: "..." + } + ], text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Assignment Rules "{{ASSIGNMENTRULES_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by explaining the role of the Salesforce Assignment Rules that you can guess according to the content of the XML and the name. + Try to guess the role of users assigned to this assignment rule. Do not mention the email of assigned users, but you can mention type of assigned users. + Based by Criteria items, explain what should so the record will be assigned. + - Analyze all the assignment rules for objects and in the description tell what are the aim of those rules. What is the role of the object in the system, based by the assignment rules. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Assignment Rule "{{ASSIGNMENTRULES_NAME}}" is: +{{ASSIGNMENTRULES_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.ts new file mode 100644 index 000000000..aa9032660 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_AUTORESPONSE_RULES.ts @@ -0,0 +1,38 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "AUTORESPONSERULES_NAME", + description: "The name of the Salesforce AutoResponse Rules to describe.", + example: "Case_AutoResponse_Rules" + }, + { + name: "AUTORESPONSERULES_XML", + description: "The XML metadata for the Salesforce AutoResponse Rules.", + example: "..." + } + ], text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce AutoResponse Rules "{{AUTORESPONSERULES_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by explaining the role of the Salesforce AutoResponse Rules that you can guess according to the content of the XML and the name. + Try to guess the role of users assigned to this AutoResponse rule. Do not mention the email of assigned users, but you can mention type of assigned users. + - Analyze all the AutoResponse rules for objects and in the description tell what are the aim of those rules. What is the role of the object in the system, based by the AutoResponse rules. + - Based by Criteria items, explain what would be the response to the user, if the criteria are met. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce AutoResponse Rule "{{AUTORESPONSERULES_NAME}}" is: +{{AUTORESPONSERULES_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ESCALATION_RULES.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ESCALATION_RULES.ts new file mode 100644 index 000000000..6b7d5f788 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ESCALATION_RULES.ts @@ -0,0 +1,36 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "ESCALATIONRULES_NAME", + description: "The name of the Salesforce Escalation Rule to describe.", + example: "Case_Escalation_Rule" + }, + { + name: "ESCALATIONRULES_XML", + description: "The XML metadata for the Salesforce Escalation Rule.", + example: "..." + } + ], text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to explain the what is the Salesforce Escalation Rule "{{ESCALATIONRULES_NAME}}" about in plain English, provide a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose of the escalation rule. + - List the key functionalities and business logic implemented in the escalation rule. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Escalation Rule "{{ESCALATIONRULES_NAME}}" is: +{{ESCALATIONRULES_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_FLOW.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_FLOW.ts new file mode 100644 index 000000000..aaae1288a --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_FLOW.ts @@ -0,0 +1,36 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "FLOW_XML", + description: "The XML definition of the Salesforce Flow to describe.", + example: "..." + } + ], text: { + "en": `You are a business analyst working on a Salesforce project. Your goal is to describe the Salesforce Flow in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose and business context of the flow. + - Explain what business process or automation this flow supports. + +2. **Step-by-Step Description**: + - Describe the main steps, decisions, and actions in the flow. + - Use plain English and avoid technical jargon when possible. + - If there are sub-flows or important conditions, mention them clearly. + +3. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The flow XML is: +{{FLOW_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_FLOW_DIFF.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_FLOW_DIFF.ts new file mode 100644 index 000000000..19e779986 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_FLOW_DIFF.ts @@ -0,0 +1,47 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "FLOW_XML_NEW", + description: "The XML definition of the new version of the Salesforce Flow.", + example: "..." + }, + { + name: "FLOW_XML_PREVIOUS", + description: "The XML definition of the previous version of the Salesforce Flow.", + example: "..." + } + ], + text: { + "en": `You are a business analyst working on a Salesforce project. Your goal is to describe the differences between the new and previous versions of a Salesforce Flow in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose of the flow and the context for the changes. + - Explain why a new version was created if possible. + +2. **Describe the Differences**: + - List and explain the key changes between the new and previous versions. + - Ignore tags related to location attributes (locationX and locationY) or positions: do not mention them in your response. + - Ignore nodes and elements that have not changed: do not mention them in your response. + - Ignore connector changes: do not mention them in your response. + - Use plain English and avoid technical jargon when possible. + +{{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The new version flow XML is: +{{FLOW_XML_NEW}} + +- The previous version flow XML is: +{{FLOW_XML_PREVIOUS}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_LWC.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_LWC.ts new file mode 100644 index 000000000..1d171ff8d --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_LWC.ts @@ -0,0 +1,66 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "LWC_NAME", + description: "The name of the Lightning Web Component to describe.", + example: "myCustomComponent" + }, + { + name: "LWC_JS_CODE", + description: "The JavaScript code of the Lightning Web Component.", + example: "import { LightningElement } from 'lwc'; ..." + }, + { + name: "LWC_HTML_CODE", + description: "The HTML template code of the Lightning Web Component.", + example: "" + }, + { + name: "LWC_JS_META", + description: "The meta configuration file for the Lightning Web Component.", + example: "..." + } + ], + text: { + "en": `You are a skilled Salesforce developer working on a Lightning Web Components (LWC) project. Your goal is to explain the Salesforce Lightning Web Component "{{LWC_NAME}}" in plain English, providing a detailed explanation suitable for other developers and business users. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the purpose and functionality of the Lightning Web Component. + - Describe the key features and capabilities it provides to users. + - Explain how it interacts with Salesforce data or other components. + +2. **Technical Analysis**: + - Describe the main JavaScript methods and their purposes. + - Explain how the component handles data binding and events. + - Mention any wire services, apex methods, or external services the component uses. + - Identify any custom properties or special configurations. + +{{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The HTML template for component "{{LWC_NAME}}": +\`\`\` +{{LWC_HTML_CODE}} +\`\`\` + +- The JavaScript controller for component "{{LWC_NAME}}": +\`\`\` +{{LWC_JS_CODE}} +\`\`\` + +- The metadata configuration for component "{{LWC_NAME}}": +\`\`\` +{{LWC_JS_META}} +\`\`\` + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_OBJECT.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_OBJECT.ts new file mode 100644 index 000000000..a4fa47ee5 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_OBJECT.ts @@ -0,0 +1,63 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "OBJECT_NAME", + description: "The API name of the Salesforce object to describe.", + example: "Account" + }, + { + name: "OBJECT_XML", + description: "The XML metadata definition of the Salesforce object.", + example: "..." + }, + { + name: "ALL_OBJECTS_LIST", + description: "A list of all objects in the Salesforce org.", + example: "Account, Contact, Opportunity, ..." + }, + { + name: "ALL_OBJECT_LINKS", + description: "The object model (MasterDetail and Lookup relationships) for all objects.", + example: "Account->Contact (Lookup), Opportunity->Account (MasterDetail)" + } + ], + text: { + "en": `You are a business analyst working on a Salesforce project. Your goal is to describe the Salesforce object "{{OBJECT_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role and purpose of the object "{{OBJECT_NAME}}" in the Salesforce org. + - Explain its significance in the project, its purpose in the org's implementation, and any key business processes it supports. + +2. **Relationships**: + - Use the provided object model data to describe how "{{OBJECT_NAME}}" relates to other objects. + - Include: + - Direct relationships (MasterDetail and Lookup fields on the object). + - Inverse relationships (other objects referencing "{{OBJECT_NAME}}"). + - Highlight any key dependencies or implications of these relationships in plain English. + +3. **Additional Guidance**: + - **Do NOT include** fields table or validation rules table in the response + - Use the acronyms provided to interpret metadata names (e.g., TR: Trigger, VR: Validation Rule, WF: Workflow). + - If the XML metadata contains sensitive information (e.g., tokens, passwords), replace them with a placeholder (e.g., \`[REDACTED]\`). + +4. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The list of all objects in the Salesforce org is: {{ALL_OBJECTS_LIST}} + +- The object model (MasterDetail and Lookup relationships) is: {{ALL_OBJECT_LINKS}} + +- The metadata XML for "{{OBJECT_NAME}}" is: +{{OBJECT_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PACKAGE.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PACKAGE.ts new file mode 100644 index 000000000..0293f9e28 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PACKAGE.ts @@ -0,0 +1,64 @@ +import { PromptTemplateDefinition } from "./types.js"; + + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "PACKAGE_NAME", + description: "The name of the package to describe.", + example: "Pardot" + }, + { + name: "PACKAGE_XML", + description: "The JsonL metadata for the package", + example: "{\"SubscriberPackageName\":\"Pardot\",\"SubscriberPackageNamespace\":\"pi\",\"SubscriberPackageVersionNumber\":\"1.0.0\",\"SubscriberPackageVersionId\":\"04t1t0000000abcAAA\",\"SubscriberPackageVersionName\":\"Pardot Version 1.0\"}" + }, + { + name: "PACKAGE_METADATAS", + description: "A list of all metadata items (Apex classes, objects, flows, etc.) in the org that are provided by this package (namespaced).", + example: "ApexClass: pi__MyClass, CustomObject: pi__MyObject, Flow: pi__MyFlow", + truncateAfter: 100000 + } + ], + text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Installed package "{{PACKAGE_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Browse the internet using Google to find the package's official documentation and provide an overview of its purpose and capabilities, with links to the documentation. + - If you found the package's official documentation, summarize its key features and functionalities. + - If you can not find the package's official documentation, provide a general overview based on the package attributes and its metadata components (but do not output the list of metadatas, it will be for paragraph 2). + - Include any relevant information about the package's intended use cases or target audience. + - If you can find other relevant information about the package, like articles or blog posts, in english or in the prompt reply language, provide them as a list of links + - If you find the AppExchange page, include it in your response. Otherwise, don't mention it. + - If you find the package's GitHub repository, include it in your response. Otherwise, don't mention it. + - If you find the vendor information, include it in your response. Otherwise, don't mention it. + - Make sure that hyperlinks are not dead links leading to 404 pages. + +2. **Package Metadata**: + - Review the list of metadata items (Apex classes, objects, flows, etc.) provided by this package, as listed in reference data. + - Highlight the most important or business-relevant components. + +3. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The attributes for Installed package "{{PACKAGE_NAME}}" are: +{{PACKAGE_XML}} + +- The list of metadata items provided by this package is: +{{PACKAGE_METADATAS}} + +- Many Salesforce managed packages are published by third-party vendors. You can find the package's vendor information in the Salesforce AppExchange (https://appexchange.salesforce.com/). + +- There are also many open-source packages available on GitHub (github.com) + +- Other relevant sources for articles or blog posts about the package may include the vendor's website, community forums, or Salesforce-related blogs, like Salesforce Ben or medium.com. Do not mention these source if you don't have a direct link to a page explicitly related to package "{{PACKAGE_NAME}}". + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PAGE.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PAGE.ts new file mode 100644 index 000000000..f9515d467 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PAGE.ts @@ -0,0 +1,37 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "PAGE_NAME", + description: "The name of the Salesforce Lightning Page to describe.", + example: "Account_Record_Page" + }, + { + name: "PAGE_XML", + description: "The XML metadata for the Lightning Page.", + example: "..." + } + ], + text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Lightning Page "{{PAGE_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the lightning page. + - List the key tabs, sections, views, related lists and actions described in the lightning page. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Lightning page "{{PAGE_NAME}}" is: +{{PAGE_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PERMISSION_SET.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PERMISSION_SET.ts new file mode 100644 index 000000000..bf5009a70 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PERMISSION_SET.ts @@ -0,0 +1,39 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "PERMISSIONSET_NAME", + description: "The name of the Salesforce Permission Set to describe.", + example: "PS_CloudityAccount" + }, + { + name: "PERMISSIONSET_XML", + description: "The XML metadata for the Salesforce Permission Set.", + example: "..." + } + ], text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce PermissionSet "{{PERMISSIONSET_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the Salesforce PermissionSet that you can guess according to the content of the XML. Try to guess the role of users assigned to this permission set according to applicationVisibilities, objectVisibilities and userPermissions. + - List the key features of the Permission Set. + - The most important features are License, Applications, User Permissions ,features with default values ,Custom Objects and Record Types + - Ignore Apex classes and Custom Fields + - Ignore blocks who has access or visibility set to "false" + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Permission Set "{{PERMISSIONSET_NAME}}" is: +{{PERMISSIONSET_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.ts new file mode 100644 index 000000000..d48f14b17 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PERMISSION_SET_GROUP.ts @@ -0,0 +1,36 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "PERMISSIONSETGROUP_NAME", + description: "The name of the Salesforce Permission Set Group to describe.", + example: "PS_CloudityAdmin" + }, + { + name: "PERMISSIONSETGROUP_XML", + description: "The XML metadata for the Salesforce Permission Set Group.", + example: "..." + } + ], text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce PermissionSetGroup "{{PERMISSIONSETGROUP_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the Salesforce PermissionSetGroup that you can guess according to the content of the XML. Try to guess the role of users assigned to this permission set group according to the name, description and related Permission Sets + - List the key features of the Permission Set. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Permission Set Group "{{PERMISSIONSETGROUP_NAME}}" is: +{{PERMISSIONSETGROUP_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PROFILE.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PROFILE.ts new file mode 100644 index 000000000..b8bb309fb --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_PROFILE.ts @@ -0,0 +1,40 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "PROFILE_NAME", + description: "The name of the Salesforce Profile to describe.", + example: "Cloudity Sales" + }, + { + name: "PROFILE_XML", + description: "The XML metadata for the Salesforce Profile.", + example: "..." + } + ], + text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the content and behavior of the Salesforce Profile "{{PROFILE_NAME}}" in plain English, providing a detailed explanation suitable for a business user. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Begin by summarizing the role of the Salesforce Profile that you can guess according to the content of the XML. Try to guess the role of users assigned to this profile according to applicationVisibilities, objectVisibilities and userPermissions. + - List the key features of the Profiles. + - The most important features are License, Applications, User Permissions ,features with default values ,Custom Objects and Record Types + - Ignore Apex classes and Custom Fields + - Ignore blocks who has access or visibility set to "false" + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The metadata XML for Salesforce Profile "{{PROFILE_NAME}}" is: +{{PROFILE_XML}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ROLES.ts b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ROLES.ts new file mode 100644 index 000000000..29a2f3869 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_DESCRIBE_ROLES.ts @@ -0,0 +1,35 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "ROLES_DESCRIPTION", + description: "Description of all roles of the org", + example: "- **Role Name (id:role_api_name)**: Role description (parentId: parent_role_id)\n- **Another Role (id:another_role_api_name)**: Another role description (parentId: another_parent_role_id)\n - **Root Role (id:root_role_api_name)**: Root role description (parentId: ROOT)", + truncateAfter: 100000 + }, + ], + text: { + "en": `You are a skilled business analyst working on a Salesforce project. Your goal is to summarize the business organization of the company. {{VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC}} + +### Instructions: + +1. **Contextual Overview**: + - Analyze the provided role hierarchy data to understand the organizational structure. + - Identify key roles and their relationships within the hierarchy. + - Summarize the roles in a way that is clear and understandable for business stakeholders. + - Ensure the summary is concise yet comprehensive, highlighting the most important aspects of the role hierarchy. + +2. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The description of all role hierarchies is: +{{ROLES_DESCRIPTION}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/PROMPT_SOLVE_DEPLOYMENT_ERROR.ts b/src/common/aiProvider/promptTemplates/PROMPT_SOLVE_DEPLOYMENT_ERROR.ts new file mode 100644 index 000000000..ba59ee2dd --- /dev/null +++ b/src/common/aiProvider/promptTemplates/PROMPT_SOLVE_DEPLOYMENT_ERROR.ts @@ -0,0 +1,36 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [ + { + name: "ERROR", + description: "The Salesforce deployment error message to analyze and solve.", + example: "Cannot deploy component: missing field 'X' on object 'Y'" + } + ], text: { + "en": `You are a Salesforce release manager using Salesforce CLI commands to perform deployments. Your goal is to help solve the following Salesforce deployment error in a clear, actionable way for a technical user. + +### Instructions: + +1. **Error Analysis**: + - Analyze the error message and identify the root cause. + - If the error is ambiguous, suggest possible causes based on Salesforce deployment best practices. + +2. **Solution Proposal**: + - Provide a step-by-step solution to resolve the error. + - If applicable, include the correct sfdx source format or XML example. + - Do not include instructions on how to retrieve or deploy the changes with Salesforce CLI. + +3. {{VARIABLE_FORMATTING_REQUIREMENTS}} + +### Reference Data: + +- The deployment error returned by Salesforce CLI is: +{{ERROR}} + +{{VARIABLE_ADDITIONAL_INSTRUCTIONS}} +`, + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/VARIABLE_ADDITIONAL_INSTRUCTIONS.ts b/src/common/aiProvider/promptTemplates/VARIABLE_ADDITIONAL_INSTRUCTIONS.ts new file mode 100644 index 000000000..6f1da9745 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/VARIABLE_ADDITIONAL_INSTRUCTIONS.ts @@ -0,0 +1,13 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [], + text: { + "en": `### Additional Instructions + +- Caution: Redact any sensitive information (tokens, passwords, API keys, etc.) and replace with \`[HIDDEN_SENSITIVE_INFOS]\`. +- Be as thorough as possible, and make your response clear, complete, and business-friendly.` + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/VARIABLE_FORMATTING_REQUIREMENTS.ts b/src/common/aiProvider/promptTemplates/VARIABLE_FORMATTING_REQUIREMENTS.ts new file mode 100644 index 000000000..3da815284 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/VARIABLE_FORMATTING_REQUIREMENTS.ts @@ -0,0 +1,15 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [], + text: { + "en": `**Formatting Requirements**: + - Use markdown formatting suitable for embedding in a level 2 header (\`##\`). + - Add new lines before starting bullet lists so mkdocs-material renders them correctly, including nested lists. + - Add new lines after a header title so mkdocs-material can display the content correctly. + - Never truncate any information in the response. + - Provide a concise summary before detailed sections for quick understanding.` + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.ts b/src/common/aiProvider/promptTemplates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.ts new file mode 100644 index 000000000..d98d76c4d --- /dev/null +++ b/src/common/aiProvider/promptTemplates/VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.ts @@ -0,0 +1,10 @@ +import { PromptTemplateDefinition } from "./types.js"; + +const template: PromptTemplateDefinition = { + variables: [], + text: { + "en": `The output will be in markdown format, which will be used in a documentation site aiming to retrospectively document the Salesforce org.` + }, +}; + +export default template; diff --git a/src/common/aiProvider/promptTemplates/index.ts b/src/common/aiProvider/promptTemplates/index.ts new file mode 100644 index 000000000..601ce7dbd --- /dev/null +++ b/src/common/aiProvider/promptTemplates/index.ts @@ -0,0 +1,39 @@ +import { PromptTemplateDefinition } from "./types.js"; + +import PROMPT_SOLVE_DEPLOYMENT_ERROR from "./PROMPT_SOLVE_DEPLOYMENT_ERROR.js"; +import PROMPT_DESCRIBE_FLOW from "./PROMPT_DESCRIBE_FLOW.js"; +import PROMPT_DESCRIBE_FLOW_DIFF from "./PROMPT_DESCRIBE_FLOW_DIFF.js"; +import PROMPT_DESCRIBE_OBJECT from "./PROMPT_DESCRIBE_OBJECT.js"; +import PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD from "./PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD.js"; +import PROMPT_DESCRIBE_APEX from "./PROMPT_DESCRIBE_APEX.js"; +import PROMPT_DESCRIBE_PAGE from "./PROMPT_DESCRIBE_PAGE.js"; +import PROMPT_DESCRIBE_PROFILE from "./PROMPT_DESCRIBE_PROFILE.js"; +import PROMPT_DESCRIBE_PERMISSION_SET from "./PROMPT_DESCRIBE_PERMISSION_SET.js"; +import PROMPT_DESCRIBE_PERMISSION_SET_GROUP from "./PROMPT_DESCRIBE_PERMISSION_SET_GROUP.js"; +import PROMPT_DESCRIBE_ASSIGNMENT_RULES from "./PROMPT_DESCRIBE_ASSIGNMENT_RULES.js"; +import PROMPT_DESCRIBE_APPROVAL_PROCESS from "./PROMPT_DESCRIBE_APPROVAL_PROCESS.js"; +import PROMPT_DESCRIBE_LWC from "./PROMPT_DESCRIBE_LWC.js"; +import PROMPT_DESCRIBE_AUTORESPONSE_RULES from "./PROMPT_DESCRIBE_AUTORESPONSE_RULES.js"; +import PROMPT_DESCRIBE_ESCALATION_RULES from "./PROMPT_DESCRIBE_ESCALATION_RULES.js"; +import PROMPT_DESCRIBE_PACKAGE from "./PROMPT_DESCRIBE_PACKAGE.js"; +import PROMPT_DESCRIBE_ROLES from "./PROMPT_DESCRIBE_ROLES.js"; + +export const PROMPT_TEMPLATES: Record = { + PROMPT_SOLVE_DEPLOYMENT_ERROR, + PROMPT_DESCRIBE_FLOW, + PROMPT_DESCRIBE_FLOW_DIFF, + PROMPT_DESCRIBE_OBJECT, + PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD, + PROMPT_DESCRIBE_APEX, + PROMPT_DESCRIBE_PAGE, + PROMPT_DESCRIBE_PACKAGE, + PROMPT_DESCRIBE_PROFILE, + PROMPT_DESCRIBE_PERMISSION_SET, + PROMPT_DESCRIBE_PERMISSION_SET_GROUP, + PROMPT_DESCRIBE_ASSIGNMENT_RULES, + PROMPT_DESCRIBE_APPROVAL_PROCESS, + PROMPT_DESCRIBE_LWC, + PROMPT_DESCRIBE_AUTORESPONSE_RULES, + PROMPT_DESCRIBE_ESCALATION_RULES, + PROMPT_DESCRIBE_ROLES, +}; diff --git a/src/common/aiProvider/promptTemplates/types.ts b/src/common/aiProvider/promptTemplates/types.ts new file mode 100644 index 000000000..482eb48b7 --- /dev/null +++ b/src/common/aiProvider/promptTemplates/types.ts @@ -0,0 +1,13 @@ +export interface PromptTemplateVariable { + name: string; + description: string; + example: string; + truncateAfter?: number; +} + +export interface PromptTemplateDefinition { + variables: PromptTemplateVariable[]; + text: { + [language: string]: string; + }; +} diff --git a/src/common/aiProvider/promptTemplates/variablesIndex.ts b/src/common/aiProvider/promptTemplates/variablesIndex.ts new file mode 100644 index 000000000..14db1ee4e --- /dev/null +++ b/src/common/aiProvider/promptTemplates/variablesIndex.ts @@ -0,0 +1,13 @@ +import { PromptTemplateDefinition } from "./types.js"; + +import VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC from "./VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC.js"; +import VARIABLE_FORMATTING_REQUIREMENTS from "./VARIABLE_FORMATTING_REQUIREMENTS.js"; +import VARIABLE_ADDITIONAL_INSTRUCTIONS from "./VARIABLE_ADDITIONAL_INSTRUCTIONS.js"; + +export const PROMPT_VARIABLES: Record = { + VARIABLE_OUTPUT_FORMAT_MARKDOWN_DOC, + VARIABLE_FORMATTING_REQUIREMENTS, + VARIABLE_ADDITIONAL_INSTRUCTIONS, +}; + +export type PromptVariable = keyof typeof PROMPT_VARIABLES; diff --git a/src/common/aiProvider/utils.ts b/src/common/aiProvider/utils.ts index e3b93806a..f4b98f1ad 100644 --- a/src/common/aiProvider/utils.ts +++ b/src/common/aiProvider/utils.ts @@ -1,10 +1,93 @@ -import { getEnvVar } from "../../config"; +import { getEnvVar } from "../../config/index.js"; +import { PromptTemplate } from "./promptTemplates.js"; +import path from 'path'; +import fs from 'fs-extra'; +import { XMLParser } from "fast-xml-parser"; +import farmhash from 'farmhash'; export class UtilsAi { - public static isOpenApiAvailable() { + public static isOpenAiAvailable() { if (getEnvVar("OPENAI_API_KEY")) { return true; } return false; } + + public static isLangChainAvailable() { + if (getEnvVar("USE_LANGCHAIN_LLM") === "true" && getEnvVar("LANGCHAIN_LLM_MODEL")) { + return true; + } + return false; + } + + public static isAgentforceAvailable() { + if (getEnvVar("USE_AGENTFORCE") === "true" && (globalThis.jsForceConn || globalThis.jsForceConnTechnical)) { + return true; + } + return false; + } + + public static getPromptsLanguage(): string { + return process.env.PROMPTS_LANGUAGE || "en"; + } + + public static async findAiCache(template: PromptTemplate, promptParameters: any[], uniqueId: string): Promise<{ success: boolean, cacheText?: string, fingerPrint: string, aiCacheDirFile: string }> { + const fingerPrint = this.getFingerPrint(promptParameters); + const lang = this.getPromptsLanguage(); + + // Manual override by user + const aiManualOverride = path.join("docs", "cache-ai-results", `${lang}-${template}-${uniqueId}.md`); + if (fs.existsSync(aiManualOverride)) { + const cacheText = await fs.readFile(aiManualOverride, "utf8"); + return { success: true, cacheText, fingerPrint, aiCacheDirFile: aiManualOverride.replace(/\\/g, '/') }; + } + + // Cache of latest generated AI result + const aiCacheDirFile = path.join("docs", "cache-ai-results", `${lang}-${template}-${uniqueId}-${fingerPrint}.md`); + if (process.env?.IGNORE_AI_CACHE === "true") { + return { success: false, fingerPrint, aiCacheDirFile: aiCacheDirFile.replace(/\\/g, '/') }; + } + if (fs.existsSync(aiCacheDirFile)) { + const cacheText = await fs.readFile(aiCacheDirFile, "utf8"); + return { success: true, cacheText, fingerPrint, aiCacheDirFile: aiCacheDirFile.replace(/\\/g, '/') }; + } + return { success: false, fingerPrint, aiCacheDirFile: aiCacheDirFile.replace(/\\/g, '/') }; + } + + public static async writeAiCache(template: PromptTemplate, promptParameters: any[], uniqueId: string, aiCacheText: string): Promise { + const fingerPrint = this.getFingerPrint(promptParameters); + const aiCacheDir = path.join("docs", "cache-ai-results"); + await fs.ensureDir(aiCacheDir); + const lang = this.getPromptsLanguage(); + const aiCacheDirFile = path.join(aiCacheDir, `${lang}-${template}-${uniqueId}-${fingerPrint}.md`); + const otherCacheFiles = fs.readdirSync(aiCacheDir).filter((file) => file.includes(`${lang}-${template}-${uniqueId}`) && !file.includes(fingerPrint)); + for (const otherCacheFile of otherCacheFiles) { + await fs.remove(path.join(aiCacheDir, otherCacheFile)); + } + await fs.writeFile(aiCacheDirFile, aiCacheText); + } + + public static getFingerPrint(promptParameters: any[]): string { + const parametersFingerPrints = promptParameters.map((promptParameter) => { + if (typeof promptParameter === "string" && promptParameter.includes(" => { @@ -46,7 +46,7 @@ export const setCache = async (key: string, val: any): Promise => { }; // Clear cache property, or all cache if property is empty -export const clearCache = async (key: string = null): Promise => { +export const clearCache = async (key: string | null = null): Promise => { await readCache(); if (key) { delete MEMORY_CACHE[key]; diff --git a/src/common/cryptoUtils.ts b/src/common/cryptoUtils.ts index 508eba608..e3eacd630 100644 --- a/src/common/cryptoUtils.ts +++ b/src/common/cryptoUtils.ts @@ -1,43 +1,46 @@ -"use strict"; - -import * as crypto from "crypto"; -import * as fs from "fs-extra"; +import * as crypto from 'crypto'; +import c from "chalk"; +import fs from 'fs-extra'; //const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY; // Must be 256 bits (32 characters) const IV_LENGTH = 16; // For AES, this is always 16 export async function encryptFile(filePath) { - const fileContent = await fs.readFile(filePath, "utf8"); + const fileContent = await fs.readFile(filePath, 'utf8'); const encryptedFileContent = encrypt(fileContent); await fs.writeFile(filePath, encryptedFileContent.text); return encryptedFileContent.encryptionKey; } export async function decryptFile(filePath, targetFile, encryptionKey) { - const fileContent = await fs.readFile(filePath, "utf8"); - const decryptedFileContent = decrypt(fileContent, encryptionKey); - await fs.writeFile(targetFile, decryptedFileContent); + try { + const fileContent = await fs.readFile(filePath, 'utf8'); + const decryptedFileContent = decrypt(fileContent, encryptionKey); + await fs.writeFile(targetFile, decryptedFileContent); + } catch (error: any) { + console.error(c.red(`Error while decrypting file ${filePath}: ${error.message}`)); + } } export function encrypt(text) { const iv = crypto.randomBytes(IV_LENGTH); - const encryptionKey = crypto.randomBytes(16).toString("hex"); - const cipher = crypto.createCipheriv("aes-256-cbc", Buffer.from(encryptionKey), iv); + const encryptionKey = crypto.randomBytes(16).toString('hex'); + const cipher = crypto.createCipheriv('aes-256-cbc', Buffer.from(encryptionKey), iv); let encrypted = cipher.update(text); encrypted = Buffer.concat([encrypted, cipher.final()]); return { - text: iv.toString("hex") + ":" + encrypted.toString("hex"), + text: iv.toString('hex') + ':' + encrypted.toString('hex'), encryptionKey: encryptionKey, }; } export function decrypt(text, encryptionKey) { - const textParts = text.split(":"); - const iv = Buffer.from(textParts.shift(), "hex"); - const encryptedText = Buffer.from(textParts.join(":"), "hex"); - const decipher = crypto.createDecipheriv("aes-256-cbc", Buffer.from(encryptionKey), iv); + const textParts = text.split(':'); + const iv = Buffer.from(textParts.shift(), 'hex'); + const encryptedText = Buffer.from(textParts.join(':'), 'hex'); + const decipher = crypto.createDecipheriv('aes-256-cbc', Buffer.from(encryptionKey), iv); let decrypted = decipher.update(encryptedText); decrypted = Buffer.concat([decrypted, decipher.final()]); diff --git a/src/common/docBuilder/docBuilderApex.ts b/src/common/docBuilder/docBuilderApex.ts new file mode 100644 index 000000000..995784ba1 --- /dev/null +++ b/src/common/docBuilder/docBuilderApex.ts @@ -0,0 +1,142 @@ +import * as fs from "fs"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { sortCrossPlatform } from "../utils/index.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; + +export class DocBuilderApex extends DocBuilderRoot { + + public docType = "APEX"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_APEX"; + public placeholder = ""; + + public static buildIndexTable(prefix: string, apexDescriptions: any[], filterObject: string | null = null): string[] { + const filteredApex = filterObject ? apexDescriptions.filter(apex => apex.impactedObjects.includes(filterObject)) : apexDescriptions; + if (filteredApex.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Apex Classes" : "## Apex Classes", + "", + "| Apex Class | Type |", + "| :---- | :--: | " + ]); + for (const apex of filteredApex) { + const flowNameCell = `[${apex.name}](${prefix}${apex.name}.md)`; + lines.push(...[ + `| ${flowNameCell} | ${apex.type} |` + ]); + } + lines.push(""); + return lines; + } + + // Build Mermaid class diagram with all direct and reverse relationships with className + public static buildMermaidClassDiagram(className: string, apexDescriptions: any[]): string { + const classNameDescription = apexDescriptions.find(apex => apex.name === className); + if (!classNameDescription) { + return ""; + } + const relatedClasses: string[] = sortCrossPlatform(classNameDescription.relatedClasses || []); + const reverseRelatedClasses: string[] = sortCrossPlatform( + apexDescriptions.map(apex => ({ + name: apex.name, + relatedClasses: apex.relatedClasses || [] + })).filter(apex => apex.relatedClasses.includes(className)).map(apex => apex.name)); + const allRelatedClasses = [...new Set([...relatedClasses, ...reverseRelatedClasses])]; + + const lines: string[] = ["## Class Diagram"]; + lines.push(""); + lines.push("```mermaid"); + lines.push("graph TD"); + lines.push(` ${className}["${className}"]:::mainApexClass`); + if (fs.existsSync(`docs/apex/${className}.md`)) { + lines.push(` click ${className} "/objects/${className}/"`); + } + + // Declare all classes related to the className + for (const relatedClassName of allRelatedClasses) { + const relatedClassDescription = apexDescriptions.find(apex => apex.name === relatedClassName); + if (relatedClassDescription?.type.includes("Test")) { + lines.push(` ${relatedClassName}["${relatedClassName}"]:::apexTestClass`); + } + else { + lines.push(` ${relatedClassName}["${relatedClassName}"]:::apexClass`); + } + if (fs.existsSync(`docs/apex/${relatedClassName}.md`)) { + lines.push(` click ${relatedClassName} "/apex/${relatedClassName}/"`); + } + } + lines.push(""); + let pos = 0; + const directLinksPos: number[] = []; + const reverseLinksPos: number[] = []; + const transverseLinksPos: number[] = []; + // Add relationships + for (const relatedClassName of relatedClasses) { + if (relatedClassName !== className) { + lines.push(` ${className} --> ${relatedClassName}`); + directLinksPos.push(pos); + pos++; + } + } + lines.push(""); + // Add reverse relationships + for (const relatedClassName of reverseRelatedClasses) { + if (relatedClassName !== className) { + lines.push(` ${relatedClassName} --> ${className}`); + reverseLinksPos.push(pos); + pos++; + } + } + lines.push(""); + + // If the number of lines is not too big, calculate relations between related classes using allRelatedClasses and apexDescriptions + // This is to avoid too many links in the diagram + if (allRelatedClasses.length > 10) { + lines.push(" %% Too many related classes, skipping transverse links"); + lines.push(""); + } + else { + for (const relatedClassName of allRelatedClasses) { + const relatedClassDescription = apexDescriptions.find(apex => apex.name === relatedClassName); + if (relatedClassDescription) { + const relatedRelatedClasses = relatedClassDescription.relatedClasses || []; + for (const otherRelatedClassName of relatedRelatedClasses) { + if (otherRelatedClassName !== className && allRelatedClasses.includes(otherRelatedClassName) && otherRelatedClassName !== relatedClassName) { + lines.push(` ${relatedClassName} --> ${otherRelatedClassName}`); + transverseLinksPos.push(pos); + pos++; + } + } + } + } + } + + // Add styles for classes + lines.push(""); + lines.push(`classDef apexClass fill:#FFF4C2,stroke:#CCAA00,stroke-width:3px,rx:12px,ry:12px,shadow:drop,color:#333;`); + lines.push(`classDef apexTestClass fill:#F5F5F5,stroke:#999999,stroke-width:3px,rx:12px,ry:12px,shadow:drop,color:#333;`); + lines.push(`classDef mainApexClass fill:#FFB3B3,stroke:#A94442,stroke-width:4px,rx:14px,ry:14px,shadow:drop,color:#333,font-weight:bold;`); + lines.push(""); + // Add classes to links + if (directLinksPos.length > 0) { + lines.push("linkStyle " + directLinksPos.join(",") + " stroke:#4C9F70,stroke-width:4px;"); + } + if (reverseLinksPos.length > 0) { + lines.push("linkStyle " + reverseLinksPos.join(",") + " stroke:#FF8C00,stroke-width:2px;"); + } + if (transverseLinksPos.length > 0) { + lines.push("linkStyle " + transverseLinksPos.join(",") + " stroke:#A6A6A6,stroke-width:2px;"); + } + lines.push("```"); + + // Use Graph LR if there are too many lines for a nice mermaid display + if (lines.length > 50) { + lines[3] = "graph LR"; + } + + return lines.join("\n"); + } + +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderApprovalProcess.ts b/src/common/docBuilder/docBuilderApprovalProcess.ts new file mode 100644 index 000000000..fbd026733 --- /dev/null +++ b/src/common/docBuilder/docBuilderApprovalProcess.ts @@ -0,0 +1,73 @@ +import { buildGenericMarkdownTable } from "../utils/flowVisualiser/nodeFormatUtils.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { XMLBuilder, XMLParser } from "fast-xml-parser"; + +export class DocBuilderApprovalProcess extends DocBuilderRoot { + + public docType = "ApprovalProcess"; + public placeholder = ""; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_APPROVAL_PROCESS"; + public xmlRootKey = "ApprovalProcess"; + + public static buildIndexTable(prefix: string, approvalProcessDescriptions: any, filterObject: string | null = null) { + const filteredApprovalProcesses = filterObject ? approvalProcessDescriptions.filter(appProcess => appProcess.impactedObjects.includes(filterObject)) : approvalProcessDescriptions; + if (filteredApprovalProcesses.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Approval Processes" : "## Approval Processes", + "", + "| Approval Process | Is Active |", + "| :---- | :--: |" + ]); + + for (const approvalProcess of filteredApprovalProcesses) { + const approvalProcessNameCell = `[${approvalProcess.name}](${prefix}${approvalProcess.name}.md)`; + lines.push(...[ + `| ${approvalProcessNameCell} | ${approvalProcess.active} |` + ]); + } + lines.push(""); + + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + return [ + `## ${this.metadataName}`, + '', + buildGenericMarkdownTable(this.parsedXmlObject, [ + "label", + "active", + "description", + ], "## Approval Process attributes", []), + '', + '', + '', + ]; + } + + public stripXmlForAi(): Promise { + + const xmlObj = new XMLParser().parse(this.metadataXml); + + // Remove var that defines if Approval History is enabled: not relevant for prompt + if (xmlObj?.ApprovalProcess?.showApprovalHistory) { + delete xmlObj.ApprovalProcess.showApprovalHistory; + } + + // Remove var that defines if user has access to AP on mobile devices: not relevant for prompt + if (xmlObj?.ApprovalProcess?.enableMobileDeviceAccess) { + delete xmlObj.ApprovalProcess.enableMobileDeviceAccess; + } + + // Remove settings that define if the record is editable while locked: not relevant for prompt + if (xmlObj?.ApprovalProcess?.recordEditability) { + delete xmlObj.ApprovalProcess.recordEditability; + } + + return new XMLBuilder().build(xmlObj); + } +} diff --git a/src/common/docBuilder/docBuilderAssignmentRules.ts b/src/common/docBuilder/docBuilderAssignmentRules.ts new file mode 100644 index 000000000..1b6987beb --- /dev/null +++ b/src/common/docBuilder/docBuilderAssignmentRules.ts @@ -0,0 +1,51 @@ +import {DocBuilderRoot} from "./docBuilderRoot.js"; +import {PromptTemplate} from "../aiProvider/promptTemplates.js"; +import {RulesBuilderUtil} from "../utils/rulesBuilderUtil.js"; + +export class DocBuilderAssignmentRules extends DocBuilderRoot { + + public docType = "AssignmentRules"; + public placeholder = ""; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_ASSIGNMENT_RULES"; + public xmlRootKey = "assignmentRule"; + + public static buildIndexTable(prefix: string, assignmentRulesDescriptions: any, filterObject: string | null = null) { + const filteredAssignmentRules = filterObject ? assignmentRulesDescriptions.filter(assignmentRule => assignmentRule.impactedObjects.includes(filterObject)) : assignmentRulesDescriptions; + if (filteredAssignmentRules.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Assignment Rules" : "## Assignment Rules", + "", + "| Assignment Rule | Is Active |", + "| :---- | :--: | " + ]); + + for (const assignmentRule of filteredAssignmentRules) { + const assignmentRuleNameCell = `[${assignmentRule.name}](${prefix}${assignmentRule.name}.md)`; + lines.push(...[ + `| ${assignmentRuleNameCell} | ${assignmentRule.active} |` + ]); + } + lines.push(""); + + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + + const ruleBuilderUtil = new RulesBuilderUtil(); + + await ruleBuilderUtil.buildInitialMarkDownLinesForRules(this.parsedXmlObject); + + const assignmentRuleTableLines: string [] = [...ruleBuilderUtil.globalRuleTableLines]; + + return [ + '', + '## Assignment Rules list', + ...assignmentRuleTableLines, + '', + ]; + } +} diff --git a/src/common/docBuilder/docBuilderAutoResponseRules.ts b/src/common/docBuilder/docBuilderAutoResponseRules.ts new file mode 100644 index 000000000..547100a8f --- /dev/null +++ b/src/common/docBuilder/docBuilderAutoResponseRules.ts @@ -0,0 +1,50 @@ +import {DocBuilderRoot} from "./docBuilderRoot.js"; +import {PromptTemplate} from "../aiProvider/promptTemplates.js"; +import {RulesBuilderUtil} from "../utils/rulesBuilderUtil.js"; + +export class DocBuilderAutoResponseRules extends DocBuilderRoot { + + public docType = "AutoResponseRules"; + public placeholder = ""; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_AUTORESPONSE_RULES"; + public xmlRootKey = "autoResponseRule"; + + public static buildIndexTable(prefix: string, autoResponseRulesDescriptions: any, filterObject: string | null = null) { + const filteredAutoResponseRules = filterObject ? autoResponseRulesDescriptions.filter(autoResponseRule => autoResponseRule.impactedObjects.includes(filterObject)) : autoResponseRulesDescriptions; + if (filteredAutoResponseRules.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related AutoResponse Rules" : "## AutoResponse Rules", + "", + "| AutoResponse Rule | Is Active |", + "| :---- | :--: | " + ]); + + for (const autoResponseRule of filteredAutoResponseRules) { + const autoResponseRuleNameCell = `[${autoResponseRule.name}](${prefix}${autoResponseRule.name}.md)`; + lines.push(...[ + `| ${autoResponseRuleNameCell} | ${autoResponseRule.active} |` + ]); + } + lines.push(""); + + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + + const ruleBuilderUtil = new RulesBuilderUtil(); + await ruleBuilderUtil.buildInitialMarkDownLinesFoAutoResponseRules(this.parsedXmlObject); + const autoResponseRuleTableLines: string [] = [...ruleBuilderUtil.globalRuleTableLines]; + + return [ + `## ${this.metadataName}`, + '', + '', + '## AutoResponse Rules list', + ...autoResponseRuleTableLines + ]; + } +} diff --git a/src/common/docBuilder/docBuilderEscalationRules.ts b/src/common/docBuilder/docBuilderEscalationRules.ts new file mode 100644 index 000000000..95a2334e9 --- /dev/null +++ b/src/common/docBuilder/docBuilderEscalationRules.ts @@ -0,0 +1,51 @@ +import {DocBuilderRoot} from "./docBuilderRoot.js"; +import {PromptTemplate} from "../aiProvider/promptTemplates.js"; +import {RulesBuilderUtil} from "../utils/rulesBuilderUtil.js"; + +export class DocBuilderEscalationRules extends DocBuilderRoot { + + public docType = "EscalationRules"; + public placeholder = ""; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_ESCALATION_RULES"; + public xmlRootKey = "escalationRule"; + + public static buildIndexTable(prefix: string, escalationRulesDescriptions: any, filterObject: string | null = null) { + const filteredEscalationRules = filterObject ? escalationRulesDescriptions.filter(escalationRule => escalationRule.impactedObjects.includes(filterObject)) : escalationRulesDescriptions; + if (filteredEscalationRules.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Escalation Rules" : "## Escalation Rules", + "", + "| Escalation Rule | Is Active |", + "| :---- | :--: | " + ]); + + for (const escalationRule of filteredEscalationRules) { + const escalationRuleNameCell = `[${escalationRule.name}](${prefix}${escalationRule.name}.md)`; + lines.push(...[ + `| ${escalationRuleNameCell} | ${escalationRule.active} |` + ]); + } + lines.push(""); + + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + + const ruleBuilderUtil = new RulesBuilderUtil(); + + await ruleBuilderUtil.buildInitialMarkDownLinesForEscalationRules(this.parsedXmlObject); + + const escalationRuleTableLines: string [] = [...ruleBuilderUtil.globalRuleTableLines]; + + return [ + '', + '## Escalation Rules list', + ...escalationRuleTableLines, + '', + ]; + } +} diff --git a/src/common/docBuilder/docBuilderFlow.ts b/src/common/docBuilder/docBuilderFlow.ts new file mode 100644 index 000000000..ca86e6634 --- /dev/null +++ b/src/common/docBuilder/docBuilderFlow.ts @@ -0,0 +1,48 @@ +import { XMLBuilder, XMLParser } from "fast-xml-parser"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; +import * as path from "path"; +import { prettifyFieldName } from "../utils/flowVisualiser/nodeFormatUtils.js"; +import { mdTableCell } from "../gitProvider/utilsMarkdown.js"; +import fs from "fs"; + +export class DocBuilderFlow extends DocBuilderRoot { + + public docType = "Flow"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_FLOW"; + public placeholder = ""; + public xmlRootKey = "Flow"; + + public static buildIndexTable(prefix: string, flowDescriptions: any[], outputMarkdownRoot: string, filterObject: string | null = null): string[] { + const filteredFlows = filterObject ? flowDescriptions.filter(flow => flow.object === filterObject || flow.impactedObjects.includes(filterObject)) : flowDescriptions; + if (filteredFlows.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Flows" : "## Flows", + "", + "| Object | Name | Type | Description |", + "| :---- | :-------- | :--: | :---------- | " + ]); + for (const flow of filteredFlows) { + const outputFlowHistoryMdFile = path.join(outputMarkdownRoot, "flows", flow.name + "-history.md"); + const flowNameCell = fs.existsSync(outputFlowHistoryMdFile) ? + `[${flow.name}](${prefix}${flow.name}.md) [🕒](${prefix}${flow.name}-history.md)` : + `[${flow.name}](${prefix}${flow.name}.md)`; + lines.push(...[ + `| ${flow.object || "💻"} | ${flowNameCell} | ${prettifyFieldName(flow.type)} | ${mdTableCell(flow.description)} |` + ]); + } + lines.push(""); + return lines; + } + + public async stripXmlForAi(): Promise { + const xmlStringStripped = this.metadataXml.replace(/.*?<\/locationX>\s*|.*?<\/locationY>\s*/g, ''); + const xmlObj = new XMLParser().parse(xmlStringStripped); + const xmlStripped = new XMLBuilder().build(xmlObj); + return xmlStripped; + } + +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderLwc.ts b/src/common/docBuilder/docBuilderLwc.ts new file mode 100644 index 000000000..f37af1a2f --- /dev/null +++ b/src/common/docBuilder/docBuilderLwc.ts @@ -0,0 +1,118 @@ +import { DocBuilderRoot } from "./docBuilderRoot.js"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import jsdoc2md from "jsdoc-to-markdown"; +import fs from "fs-extra"; +import path from "path"; + +export class DocBuilderLwc extends DocBuilderRoot { + + public docType = "Lwc"; + public placeholder = ""; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_LWC"; + public xmlRootKey = ""; + public docsSection = "lwc"; + + public static buildIndexTable(prefix: string, lwcDescriptions: any, filterObject: string | null = null) { + const filteredLwcs = filterObject + ? lwcDescriptions.filter(lwc => lwc.impactedObjects.includes(filterObject)) + : lwcDescriptions; + + if (filteredLwcs.length === 0) { + return []; + } + + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Lightning Web Components" : "## Lightning Web Components", + "", + "| Component | Description | Exposed | Targets |", + "| :-------- | :---------- | :-----: | :------------- |" + ]); + + for (const lwc of filteredLwcs) { + const lwcNameCell = `[${lwc.name}](${prefix}${lwc.name}.md)`; + const exposedCell = lwc.isExposed ? "✅" : "❌"; + lines.push(...[ + `| ${lwcNameCell} | ${lwc.description || ""} | ${exposedCell} | ${lwc.targets || ""} |` + ]); + } + lines.push(""); + + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + return [ + `## ${this.metadataName}`, + '', + '', + '', + '## JS Documentation', + '', + await this.generateJsDocumentation(), + '', + '## Files', + '', + await this.listComponentFiles(), + '' + ]; + } + + private async generateJsDocumentation(): Promise { + try { + const lwcPath = this.additionalVariables.LWC_PATH; + const jsFile = path.join(lwcPath, `${this.metadataName}.js`); + + if (fs.existsSync(jsFile)) { + const jsdocOutput = await jsdoc2md.render({ files: jsFile }); + return jsdocOutput || "No JSDoc documentation available for this component."; + } else { + return "No JavaScript file found for this component."; + } + } catch (error) { + return `Error generating JS documentation: ${(error as any).message}`; + } + } + + private async listComponentFiles(): Promise { + try { + const lwcPath = this.additionalVariables.LWC_PATH; + const files = await fs.readdir(lwcPath); + + let fileList = ""; + for (const file of files) { + const stats = await fs.stat(path.join(lwcPath, file)); + if (stats.isFile()) { + fileList += `- \`${file}\`\n`; + } + } + + return fileList || "No files found for this component."; + } catch (error) { + return `Error listing component files: ${(error as any).message}`; + } + } + + public async stripXmlForAi(): Promise { + const lwcPath = this.additionalVariables.LWC_PATH; + const files = await fs.readdir(lwcPath); + + let componentCode = ""; + for (const file of files) { + const filePath = path.join(lwcPath, file); + const stats = await fs.stat(filePath); + + if (stats.isFile()) { + // Skip CSS files + if (file.endsWith('.css')) { + continue; + } + + const fileContent = await fs.readFile(filePath, 'utf-8'); + componentCode += `// File: ${file}\n${fileContent}\n\n`; + } + } + + return componentCode; + } +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderObject.ts b/src/common/docBuilder/docBuilderObject.ts new file mode 100644 index 000000000..43177f27b --- /dev/null +++ b/src/common/docBuilder/docBuilderObject.ts @@ -0,0 +1,128 @@ +import { XMLBuilder, XMLParser } from "fast-xml-parser"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; +import { mdTableCell } from "../gitProvider/utilsMarkdown.js"; + +export class DocBuilderObject extends DocBuilderRoot { + + public docType = "Object"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_OBJECT"; + public placeholder = ""; + public xmlRootKey = "CustomObject"; + + public static buildIndexTable(prefix: string, objectDescriptions: any[]) { + const lines: string[] = []; + lines.push(...[ + "## Objects", + "", + "| Name | Label | Description |", + "| :-------- | :---- | :---------- | " + ]); + for (const objectDescription of objectDescriptions) { + const objectNameCell = `[${objectDescription.name}](${prefix}${objectDescription.name}.md)`; + lines.push(...[ + `| ${objectNameCell} | ${objectDescription.label || ""} | ${mdTableCell(objectDescription.description)} |` + ]); + } + lines.push(""); + return lines; + } + + public static buildCustomFieldsTable(fields: any[]) { + if (!Array.isArray(fields)) { + fields = [fields]; + } + if (fields.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + "## Fields", + "", + "| Name | Label | Type | Description |", + "| :-------- | :---- | :--: | :---------- | " + ]); + for (const field of fields) { + lines.push(...[ + `| ${field.fullName} | ${field.label || ""} | ${field.type || ""} | ${mdTableCell(field.description)} |` + ]); + } + lines.push(""); + return lines; + } + + public static buildValidationRulesTable(validationRules: any[]) { + if (!Array.isArray(validationRules)) { + validationRules = [validationRules]; + } + if (validationRules.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + "## Validation Rules", + "", + "| Rule | Active | Description | Formula |", + "| :-------- | :---- | :---------- | :------ |" + ]); + for (const rule of validationRules) { + lines.push(...[ + `| ${rule.fullName} | ${rule.active ? "Yes" : "No ⚠️"} | ${rule.description || ""} | \`${rule.errorConditionFormula}\` |` + ]); + } + lines.push(""); + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + return [ + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + ]; + } + + public stripXmlForAi(): Promise { + const xmlObj = new XMLParser().parse(this.metadataXml); + // Remove record types picklist values + if (xmlObj?.CustomObject?.recordTypes) { + if (!Array.isArray(xmlObj.CustomObject.recordTypes)) { + xmlObj.CustomObject.recordTypes = [xmlObj.CustomObject.recordTypes]; + } + for (const recordType of xmlObj?.CustomObject?.recordTypes || []) { + delete recordType.picklistValues; + } + } + // Remove actionOverrides with formFactors as they already exist in default + if (xmlObj?.CustomObject?.actionOverrides) { + if (!Array.isArray(xmlObj.CustomObject.actionOverrides)) { + xmlObj.CustomObject.actionOverrides = [xmlObj.CustomObject.actionOverrides]; + } + xmlObj.CustomObject.actionOverrides = xmlObj.CustomObject.actionOverrides.filter(actionOverride => !actionOverride.formFactor); + } + // Remove compact layouts + if (xmlObj?.CustomObject?.compactLayouts) { + delete xmlObj.CustomObject.compactLayouts; + } + // Remove compact layouts + if (xmlObj?.CustomObject?.listViews) { + delete xmlObj.CustomObject.listViews; + } + const xmlStripped = new XMLBuilder().build(xmlObj); + return xmlStripped + } + +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderPackage.ts b/src/common/docBuilder/docBuilderPackage.ts new file mode 100644 index 000000000..194e554c0 --- /dev/null +++ b/src/common/docBuilder/docBuilderPackage.ts @@ -0,0 +1,63 @@ +import sortArray from "sort-array"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { buildGenericMarkdownTable } from "../utils/flowVisualiser/nodeFormatUtils.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; +import { DocBuilderPackageXML } from "./docBuilderPackageXml.js"; +import fs from "fs"; +import { parsePackageXmlFile } from "../utils/xmlUtils.js"; +import { makeFileNameGitCompliant } from "../utils/gitUtils.js"; + +export class DocBuilderPackage extends DocBuilderRoot { + + public docType = "Package"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_PACKAGE"; + public placeholder = ""; + public xmlRootKey = "json"; + public docsSection = "packages"; + + public static buildIndexTable(prefix: string, packageDescriptions: any, filterObject: string | null = null) { + const filteredPackages = filterObject ? packageDescriptions.filter(page => page.impactedObjects.includes(filterObject)) : packageDescriptions; + if (filteredPackages.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + "## Installed packages", + "", + "| Name | Namespace | Version | Version Name |", + "| :---- | :-------- | :------ | :----------: | " + ]); + for (const pckg of sortArray(filteredPackages, { by: ['namespace', 'name'], order: ['asc', 'asc'] }) as any[]) { + const packageNameCell = `[${pckg.name}](${prefix}${makeFileNameGitCompliant(pckg.name)}.md)`; + lines.push(...[ + `| ${packageNameCell} | ${pckg.namespace || ""} | [${pckg.versionNumber}](https://test.salesforce.com/packaging/installPackage.apexp?p0=${pckg.versionId}) | ${pckg.versionName} |` + ]); + } + lines.push(""); + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + return [ + `## ${this.metadataName}`, + '', + '', + '', + buildGenericMarkdownTable(this.parsedXmlObject, ["SubscriberPackageName", "SubscriberPackageNamespace", "SubscriberPackageVersionNumber", "SubscriberPackageVersionId", "SubscriberPackageVersionName", "SubscriberPackageId"], "## Package attributes", []), + '', + '## Package Metadatas', + '', + '
', + '' + ]; + } + + // Generate json for display with jsTree npm library + public async generateJsonTree(): Promise { + if (this.additionalVariables.PACKAGE_FILE && fs.existsSync(this.additionalVariables.PACKAGE_FILE)) { + const packageData = await parsePackageXmlFile(this.additionalVariables.PACKAGE_FILE); + return DocBuilderPackageXML.generateJsonTree("all", packageData); + } + return null; + } +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderPackageXml.ts b/src/common/docBuilder/docBuilderPackageXml.ts new file mode 100644 index 000000000..0ae910d84 --- /dev/null +++ b/src/common/docBuilder/docBuilderPackageXml.ts @@ -0,0 +1,233 @@ +import c from 'chalk'; +import * as path from 'path'; +import fs from 'fs-extra'; +import { SfError } from '@salesforce/core'; +import { sortCrossPlatform, uxLog } from '../utils/index.js'; +import { countPackageXmlItems, parsePackageXmlFile } from '../utils/xmlUtils.js'; +import { SalesforceSetupUrlBuilder } from './docUtils.js'; +import { CONSTANTS } from '../../config/index.js'; +import { prettifyFieldName } from '../utils/flowVisualiser/nodeFormatUtils.js'; + +export class DocBuilderPackageXML { + + public static async buildIndexTable(outputPackageXmlMarkdownFiles: any[]) { + const packageLines: string[] = []; + const packagesForMenu: any = { "All manifests": "manifests.md" } + packageLines.push(...[ + "## Package XML files", + "", + "| Package name | Description |", + "| :----------- | :---------- |" + ]); + + for (const outputPackageXmlDef of outputPackageXmlMarkdownFiles) { + const metadataNb = await countPackageXmlItems(outputPackageXmlDef.path); + const packageMdFile = path.basename(outputPackageXmlDef.path) + ".md"; + const label = outputPackageXmlDef.name ? `Package folder: ${outputPackageXmlDef.name}` : path.basename(outputPackageXmlDef.path); + const packageTableLine = `| [${label}](${packageMdFile}) (${metadataNb}) | ${outputPackageXmlDef.description} |`; + packageLines.push(packageTableLine); + packagesForMenu[label] = packageMdFile; + } + packageLines.push(""); + packageLines.push("___"); + packageLines.push(""); + return { packageLines, packagesForMenu }; + } + + public static async generatePackageXmlMarkdown(inputFile: string | null, outputFile: string | null = null, packageXmlDefinition: any = null, rootSalesforceUrl: string = "") { + // Find packageXml to parse if not defined + if (inputFile == null) { + inputFile = path.join(process.cwd(), "manifest", "package.xml"); + if (!fs.existsSync(inputFile)) { + throw new SfError("No package.xml found. You need to send the path to a package.xml file in --inputfile option"); + } + } + // Build output file if not defined + if (outputFile == null) { + const packageXmlFileName = path.basename(inputFile); + outputFile = path.join(process.cwd(), "docs", `${packageXmlFileName}.md`); + } + await fs.ensureDir(path.dirname(outputFile)); + + uxLog("other", this, `Generating markdown doc from ${inputFile} to ${outputFile}...`); + + // Read content + const packageXmlContent = await parsePackageXmlFile(inputFile); + const metadataTypes = Object.keys(packageXmlContent); + metadataTypes.sort(); + const nbItems = await countPackageXmlItems(inputFile); + + const mdLines: string[] = [] + + if (packageXmlDefinition && packageXmlDefinition.description) { + // Header + mdLines.push(...[ + `## Content of ${path.basename(inputFile)}`, + '', + packageXmlDefinition.description, + '', + '
', + '', + `Metadatas: ${nbItems}`, + '' + ]); + } + else { + // Header + mdLines.push(...[ + `## Content of ${path.basename(inputFile)}`, + '', + '
', + '', + `Metadatas: ${nbItems}`, + '' + ]); + } + + // Generate package.xml markdown + for (const metadataType of metadataTypes) { + const members = packageXmlContent[metadataType]; + sortCrossPlatform(members); + const memberLengthLabel = members.length === 1 && members[0] === "*" ? "*" : members.length; + mdLines.push(`
${metadataType} (${memberLengthLabel})\n\n`); + for (const member of members) { + const memberLabel = member === "*" ? "ALL (wildcard *)" : member; + const setupUrl = SalesforceSetupUrlBuilder.getSetupUrl(metadataType, member); + if (setupUrl && rootSalesforceUrl) { + mdLines.push(` • ${memberLabel}
`); + } + else { + mdLines.push(` • ${memberLabel}
`); + } + } + mdLines.push(""); + mdLines.push("
"); + mdLines.push(""); + } + mdLines.push(""); + + // Footer + mdLines.push(`_Documentation generated with [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT})_`); + + // Write output file + await fs.writeFile(outputFile, mdLines.join("\n") + "\n"); + uxLog("success", this, c.green(`Successfully generated ${path.basename(inputFile)} documentation into ${outputFile}`)); + + const jsonTree = await this.generateJsonTree(metadataTypes, packageXmlContent); + if (jsonTree) { + const packageXmlFileName = path.basename(outputFile, ".md"); + const jsonFile = `./docs/json/root-${packageXmlFileName}.json`; + await fs.ensureDir(path.dirname(jsonFile)); + await fs.writeFile(jsonFile, JSON.stringify(jsonTree, null, 2)); + uxLog("success", this, c.green(`Successfully generated ${packageXmlFileName} JSON into ${jsonFile}`)); + } + + return outputFile; + } + + public static listPackageXmlCandidates(): any[] { + return [ + // CI/CD package files + { + path: "manifest/package.xml", + description: "Contains all deployable metadatas of the SFDX project" + }, + { + path: "manifest/packageDeployOnce.xml", + description: "Contains all metadatas that will never be overwritten during deployment if they are already existing in the target org" + }, + { + path: "manifest/package-no-overwrite.xml", + description: "Contains all metadatas that will never be overwritten during deployment if they are already existing in the target org" + }, + { + path: "manifest/destructiveChanges.xml", + description: "Contains all metadatas that will be deleted during deployment, in case they are existing in the target org" + }, + // Monitoring package files + { + path: "manifest/package-all-org-items.xml", + description: "Contains the entire list of metadatas that are present in the monitored org (not all of them are in the git backup)" + }, + { + path: "manifest/package-backup-items.xml", + description: "Contains the list of metadatas that are in the git backup" + }, + { + path: "manifest/package-skip-items.xml", + description: "Contains the list of metadatas that are excluded from the backup.
Other metadata types might be skipped using environment variable MONITORING_BACKUP_SKIP_METADATA_TYPES" + }, + ]; + } + + // Generate json for display with jsTree npm library + public static async generateJsonTree(metadataTypes: any, packageXmlContent: any): Promise { + if (metadataTypes === "all") { + metadataTypes = Object.keys(packageXmlContent); + metadataTypes.sort(); + } + const treeElements: any[] = []; + for (const metadataType of metadataTypes) { + const members = packageXmlContent[metadataType] || []; + sortCrossPlatform(members); + const memberLengthLabel = members.length === 1 && members[0] === "*" ? "all" : members.length; + const typeRoot: any = { + text: prettifyFieldName(metadataType) + " (" + memberLengthLabel + ")", + icon: memberLengthLabel !== "all" ? "fa-solid fa-folder icon-blue" : "fa-solid fa-folder icon-warning", + a_attr: { href: null }, + children: [], + } + if (memberLengthLabel !== "all") { + if (metadataType === "CustomField") { + // Sort custom fields by object name + DocBuilderPackageXML.createCustomFieldsTree(members, typeRoot); + } + else { + DocBuilderPackageXML.createMembersTree(members, typeRoot); + } + } + treeElements.push(typeRoot); + } + return treeElements; + } + + private static createCustomFieldsTree(members: any, typeRoot: any) { + const elementsByObject: any = []; + for (const element of members) { + const objectName = element.split('.')[0]; + if (!elementsByObject[objectName]) { + elementsByObject[objectName] = []; + } + elementsByObject[objectName].push(element); + } + // Create object nodes and fields as children + for (const objectName of Object.keys(elementsByObject)) { + const objectNode: any = { + text: objectName + " (" + elementsByObject[objectName].length + ")", + icon: "fa-solid fa-folder icon-blue", + a_attr: { href: null }, + children: [], + }; + for (const element of elementsByObject[objectName]) { + const subElement: any = { + text: element, + icon: "fa-solid fa-circle-check icon-success", + a_attr: { href: null }, + }; + objectNode.children.push(subElement); + } + typeRoot.children.push(objectNode); + } + } + + private static createMembersTree(members: any, typeRoot: any) { + for (const member of members) { + const subElement: any = { + text: member, + icon: "fa-solid fa-circle-check icon-success", + a_attr: { href: null }, + }; + typeRoot.children.push(subElement); + } + } +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderPage.ts b/src/common/docBuilder/docBuilderPage.ts new file mode 100644 index 000000000..1ddabd9ec --- /dev/null +++ b/src/common/docBuilder/docBuilderPage.ts @@ -0,0 +1,46 @@ +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { buildGenericMarkdownTable } from "../utils/flowVisualiser/nodeFormatUtils.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; + +export class DocBuilderPage extends DocBuilderRoot { + + public docType = "Page"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_PAGE"; + public placeholder = ""; + public xmlRootKey = "FlexiPage"; + + + public static buildIndexTable(prefix: string, pageDescriptions: any, filterObject: string | null = null) { + const filteredPages = filterObject ? pageDescriptions.filter(page => page.impactedObjects.includes(filterObject)) : pageDescriptions; + if (filteredPages.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Lightning Pages" : "## Lightning Pages", + "", + "| Lightning Page | Type |", + "| :---- | :--: | " + ]); + for (const page of filteredPages) { + const pageNameCell = `[${page.name}](${prefix}${page.name}.md)`; + lines.push(...[ + `| ${pageNameCell} | ${page.type} |` + ]); + } + lines.push(""); + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + return [ + `## ${this.metadataName}`, + '', + buildGenericMarkdownTable(this.parsedXmlObject, ["sobjectType", "type", "masterLabel", "template"], "## Lightning Page attributes", []), + '', + '', + '', + ]; + } + +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderPermissionSet.ts b/src/common/docBuilder/docBuilderPermissionSet.ts new file mode 100644 index 000000000..ea9b000a7 --- /dev/null +++ b/src/common/docBuilder/docBuilderPermissionSet.ts @@ -0,0 +1,104 @@ +import { XMLBuilder, XMLParser } from "fast-xml-parser"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { buildGenericMarkdownTable, prettifyFieldName } from "../utils/flowVisualiser/nodeFormatUtils.js"; +import { DocBuilderProfile } from "./docBuilderProfile.js"; + +export class DocBuilderPermissionSet extends DocBuilderProfile { + + public docType = "PermissionSet"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_PERMISSION_SET"; + public placeholder = ""; + public xmlRootKey = "PermissionSet"; + public docsSection = "permissionsets"; + + public static buildIndexTable(prefix: string, permissionSetDescriptions: any[], filterObject: string | null = null) { + const filteredPsets = filterObject ? permissionSetDescriptions.filter(pSet => pSet.impactedObjects.includes(filterObject)) : permissionSetDescriptions; + if (filteredPsets.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Permission Sets" : "## Permission Sets", + "", + "| Permission Set | User License |", + "| :---- | :--: | " + ]); + for (const pSet of filteredPsets) { + const pSetNameCell = `[${pSet.name}](${prefix}${encodeURIComponent(pSet.name)}.md)`; + lines.push(...[ + `| ${pSetNameCell} | ${pSet.license || "None"} |` + ]); + } + lines.push(""); + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + return [ + `## ${this.metadataName}`, + '', + '
', + '', + buildGenericMarkdownTable(this.parsedXmlObject, ["label", "description", "license", "hasActivationRequired"], "## Permission Set attributes", []), + '', + '', + '', + '', + '', + ]; + } + + public stripXmlForAi(): Promise { + const xmlObj = new XMLParser().parse(this.metadataXml); + // Remove class access: not relevant for prompt + if (xmlObj?.PermissionSet?.classAccesses) { + delete xmlObj.PermissionSet.classAccesses; + } + // Remove flowAccesses: not relevant for prompt + if (xmlObj?.PermissionSet?.flowAccesses) { + delete xmlObj.PermissionSet.flowAccesses; + } + const xmlStripped = new XMLBuilder().build(xmlObj); + return xmlStripped + } + + // Generate json for display with jsTree npm library + public async generateJsonTree(): Promise { + const xmlObj = new XMLParser().parse(this.metadataXml); + const treeElements: any[] = []; + for (const psRootAttribute of Object.keys(xmlObj?.PermissionSet || {})) { + if (["label", "license", "hasActivationRequired", "description"].includes(psRootAttribute)) { + continue; + } + let attributeValue = xmlObj.PermissionSet[psRootAttribute]; + if (!Array.isArray(attributeValue)) { + attributeValue = [attributeValue] + } + const attributeTreeRoot: any = { + text: prettifyFieldName(psRootAttribute), + icon: "fa-solid fa-folder icon-blue", + a_attr: { href: null }, + children: [], + } + if (psRootAttribute === "fieldPermissions") { + // Sort custom fields by object name + this.buildObjectFieldsTree(attributeValue, attributeTreeRoot); + } + else { + for (const element of attributeValue) { + if (!this.isAccessibleElement(element)) { + continue; + } + const subElement: any = this.getSubElement(element); + attributeTreeRoot.children.push(subElement); + } + attributeTreeRoot.text = attributeTreeRoot.text + " (" + attributeTreeRoot.children.length + ")"; + } + if (attributeTreeRoot.children.length > 0) { + treeElements.push(attributeTreeRoot); + } + } + return treeElements; + } + +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderPermissionSetGroup.ts b/src/common/docBuilder/docBuilderPermissionSetGroup.ts new file mode 100644 index 000000000..3500aa14e --- /dev/null +++ b/src/common/docBuilder/docBuilderPermissionSetGroup.ts @@ -0,0 +1,64 @@ +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { buildGenericMarkdownTable } from "../utils/flowVisualiser/nodeFormatUtils.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; + +export class DocBuilderPermissionSetGroup extends DocBuilderRoot { + + public docType = "PermissionSetGroup"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_PERMISSION_SET_GROUP"; + public placeholder = ""; + public xmlRootKey = "PermissionSetGroup"; + public docsSection = "permissionsetgroups"; + + public static buildIndexTable(prefix: string, permissionSetGroupDescriptions: any[], filterObject: string | null = null) { + const filteredPsetGroups = filterObject ? permissionSetGroupDescriptions.filter(pSetGroup => pSetGroup.relatedPermissionSets.includes(filterObject)) : permissionSetGroupDescriptions; + if (filteredPsetGroups.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Permission Set Groups" : "## Permission Set Groups", + "", + "| Permission Set Group | Description |", + "| :---- | :---------- |" + ]); + for (const pSetGroup of filteredPsetGroups) { + const pSetGroupNameCell = `[${pSetGroup.name}](${prefix}${encodeURIComponent(pSetGroup.name)}.md)`; + lines.push(...[ + `| ${pSetGroupNameCell} | ${pSetGroup.description || "None"} |` + ]); + } + lines.push(""); + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + const permissionSetTableLines = [ + "| Permission Set |", + "| :------------- |" + ]; + if (this.parsedXmlObject.permissionSets) { + if (!Array.isArray(this.parsedXmlObject.permissionSets)) { + this.parsedXmlObject.permissionSets = [this.parsedXmlObject.permissionSets]; + } + for (const permissionSet of this.parsedXmlObject.permissionSets) { + const permissionSetNameCell = `[${permissionSet}](../permissionsets/${encodeURIComponent(permissionSet)}.md)`; + permissionSetTableLines.push(`| ${permissionSetNameCell} |`); + } + } else { + permissionSetTableLines.push("| None |"); + } + + return [ + `## ${this.metadataName}`, + '', + buildGenericMarkdownTable(this.parsedXmlObject, ["label", "description", "status"], "## Permission Set Group attributes", []), + '', + '## Permission Sets', + ...permissionSetTableLines, + '', + '', + '', + ]; + } +} \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderProfile.ts b/src/common/docBuilder/docBuilderProfile.ts new file mode 100644 index 000000000..829e3815e --- /dev/null +++ b/src/common/docBuilder/docBuilderProfile.ts @@ -0,0 +1,234 @@ +import { XMLBuilder, XMLParser } from "fast-xml-parser"; +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import { buildGenericMarkdownTable, prettifyFieldName } from "../utils/flowVisualiser/nodeFormatUtils.js"; +import { DocBuilderRoot } from "./docBuilderRoot.js"; +/* jscpd:ignore-start */ +export class DocBuilderProfile extends DocBuilderRoot { + + public docType = "Profile"; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_PROFILE"; + public placeholder = ""; + public xmlRootKey = "Profile"; + public docsSection = "profiles"; + + public static buildIndexTable(prefix: string, profileDescriptions: any[], filterObject: string | null = null) { + const filteredProfiles = filterObject ? profileDescriptions.filter(profile => profile.impactedObjects.includes(filterObject)) : profileDescriptions; + if (filteredProfiles.length === 0) { + return []; + } + const lines: string[] = []; + lines.push(...[ + filterObject ? "## Related Profiles" : "## Profiles", + "", + "| Profile | User License |", + "| :---- | :--: | " + ]); + for (const profile of filteredProfiles) { + const profileNameCell = `[${profile.name}](${prefix}${encodeURIComponent(profile.name)}.md)`; + lines.push(...[ + `| ${profileNameCell} | ${profile.userLicense} |` + ]); + } + lines.push(""); + return lines; + } + + public async buildInitialMarkdownLines(): Promise { + return [ + `## ${this.metadataName}`, + '', + '
', + '', + buildGenericMarkdownTable(this.parsedXmlObject, ["userLicense", "custom"], "## Profile attributes", []), + '', + '', + '', + ]; + } + + public stripXmlForAi(): Promise { + const xmlObj = new XMLParser().parse(this.metadataXml); + // Remove class access: not relevant for prompt + if (xmlObj?.Profile?.classAccesses) { + delete xmlObj.Profile.classAccesses; + } + // Remove fieldPermissions: not relevant for prompt + if (xmlObj?.Profile?.fieldPermissions) { + delete xmlObj.Profile.fieldPermissions; + } + // Remove flowAccesses: not relevant for prompt + if (xmlObj?.Profile?.flowAccesses) { + delete xmlObj.Profile.flowAccesses; + } + // Remove layoutAssignments: not relevant for prompt + if (xmlObj?.Profile?.layoutAssignments) { + delete xmlObj.Profile.layoutAssignments; + } + // Remove pageAccesses: not relevant for prompt + if (xmlObj?.Profile?.pageAccesses) { + delete xmlObj.Profile.pageAccesses; + } + // Keep only visible applications + if (xmlObj?.Profile?.applicationVisibilities) { + if (!Array.isArray(xmlObj.Profile.applicationVisibilities)) { + xmlObj.Profile.applicationVisibilities = [xmlObj.Profile.applicationVisibilities]; + } + xmlObj.Profile.applicationVisibilities = xmlObj.Profile.applicationVisibilities.filter(applicationVisibility => applicationVisibility.visible === true); + } + // Keep only visible recordTypes + if (xmlObj?.Profile?.recordTypeVisibilities) { + if (!Array.isArray(xmlObj.Profile.recordTypeVisibilities)) { + xmlObj.Profile.recordTypeVisibilities = [xmlObj.Profile.recordTypeVisibilities]; + } + xmlObj.Profile.recordTypeVisibilities = xmlObj.Profile.recordTypeVisibilities.filter(rt => rt.visible === true); + } + // Keep only visible tabs + if (xmlObj?.Profile?.tabVisibilities) { + if (!Array.isArray(xmlObj.Profile.tabVisibilities)) { + xmlObj.Profile.tabVisibilities = [xmlObj.Profile.tabVisibilities]; + } + xmlObj.Profile.tabVisibilities = xmlObj.Profile.tabVisibilities.filter(tab => tab.visibility === 'Hidden'); + } + const xmlStripped = new XMLBuilder().build(xmlObj); + return xmlStripped + } + + // Generate json for display with jsTree npm library + public async generateJsonTree(): Promise { + const xmlObj = new XMLParser().parse(this.metadataXml); + const treeElements: any[] = []; + for (const profileRootAttribute of Object.keys(xmlObj?.Profile || {})) { + if (["custom", "userLicense"].includes(profileRootAttribute)) { + continue; + } + let attributeValue = xmlObj.Profile[profileRootAttribute]; + if (!Array.isArray(attributeValue)) { + attributeValue = [attributeValue] + } + const attributeTreeRoot: any = { + text: prettifyFieldName(profileRootAttribute), + icon: "fa-solid fa-folder icon-blue", + a_attr: { href: null }, + children: [], + } + if (profileRootAttribute === "fieldPermissions") { + // Sort custom fields by object name + this.buildObjectFieldsTree(attributeValue, attributeTreeRoot); + } + else { + for (const element of attributeValue) { + if (!this.isAccessibleElement(element)) { + continue; + } + const subElement: any = this.getSubElement(element); + attributeTreeRoot.children.push(subElement); + } + attributeTreeRoot.text = attributeTreeRoot.text + " (" + attributeTreeRoot.children.length + ")"; + } + if (attributeTreeRoot.children.length > 0) { + treeElements.push(attributeTreeRoot); + } + } + return treeElements; + } + + public buildObjectFieldsTree(attributeValue: any, attributeTreeRoot: any) { + const elementsByObject: any = []; + for (const element of attributeValue) { + const objectName = element.field.split('.')[0]; + if (!elementsByObject[objectName]) { + elementsByObject[objectName] = []; + } + elementsByObject[objectName].push(element); + } + // Create object nodes and fields as children + let totalFields = 0; + for (const objectName of Object.keys(elementsByObject)) { + const objectNode: any = { + text: objectName + " (" + elementsByObject[objectName].length + ")", + icon: "fa-solid fa-folder icon-blue", + a_attr: { href: null }, + children: [], + }; + for (const element of elementsByObject[objectName]) { + if (!this.isAccessibleElement(element)) { + continue; + } + const subElement: any = this.getSubElement(element); + objectNode.children.push(subElement); + } + if (objectNode.children.length > 0) { + attributeTreeRoot.children.push(objectNode); + totalFields += objectNode.children.length; + } + } + attributeTreeRoot.text = attributeTreeRoot.text + " (" + totalFields + ")"; + } + + public isAccessibleElement(element: any) { + if (element.visible === false) { + return false; + } + if (element.readable === false) { + return false; + } + if (element.allowRead === false) { + return false; + } + if (element.enabled === false) { + return false; + } + return true; + } + + public getSubElement(element: any) { + const subElement: any = { + text: element.name || element.apexClass || element.flow || element.apexPage || element.object || element.tab || element.application || element.field || element.layout || element.recordType || element.externalDataSource || element.startAddress || element.dataspaceScope || "ERROR: " + JSON.stringify(element), + icon: + // Common properties + element.default === true ? "fa-solid fa-star icon-success" : + element.visible === true ? "fa-solid fa-eye icon-success" : + element.visible === false ? "fa-solid fa-eye-slash icon-error" : + element.enabled === true ? "fa-solid fa-circle-check icon-success" : + element.enabled === false ? "fa-solid fa-circle-xmark icon-error" : + // Custom fields + element.editable === true ? "fa-solid fa-square-pen icon-success" : + element.readable === true ? "fa-solid fa-eye icon-success" : + element.readable === false ? "fa-solid fa-eye-slash icon-error" : + // Custom objects + element.modifyAllRecords === true ? "fa-solid fa-web-awesome icon-success" : + element.viewAllRecords === true && element.allowEdit === false ? "fa-solid fa-magnifying-glass icon-success" : + element.allowEdit === true ? "fa-solid fa-square-pen icon-success" : + element.allowRead === true ? "fa-solid fa-eye icon-success" : + element.allowRead === false ? "fa-solid fa-eye-slash icon-error" : + // Tabs + ["DefaultOn", "Visible"].includes(element.visibility) ? "fa-solid fa-eye icon-success" : + element.visibility === "DefaultOff" ? "fa-solid fa-circle-notch icon-warning" : + element.visibility === "Hidden" ? "fa-solid fa-eye-slash icon-error" : + "fa-solid fa-file", + a_attr: { href: null }, + children: [], + }; + subElement.children = Object.keys(element).map((key) => { + const icon = (element[key] === true) ? "fa-solid fa-circle-check icon-success" : + (element[key] === false) ? "fa-solid fa-circle-xmark icon-error" : + (["DefaultOn", "Visible"].includes(element[key])) ? "fa-solid fa-eye icon-success" : + (element[key] === "Hidden") ? "fa-solid fa-eye-slash icon-error" : + (element[key] === "DefaultOff") ? "fa-solid fa-circle-notch icon-warning" : + ""; + return { + text: prettifyFieldName(key) + ": " + element[key], + icon: icon, + a_attr: { href: null }, + }; + }); + // Sort subElement.children to put text as first element + subElement.children.sort((a: any, b: any) => { + if (a.text.endsWith(subElement.text)) return -1; + if (b.text.endsWith(subElement.text)) return 1; + return 0; + }); + return subElement; + } +} +/* jscpd:ignore-end */ \ No newline at end of file diff --git a/src/common/docBuilder/docBuilderRoles.ts b/src/common/docBuilder/docBuilderRoles.ts new file mode 100644 index 000000000..34bc9cf42 --- /dev/null +++ b/src/common/docBuilder/docBuilderRoles.ts @@ -0,0 +1,118 @@ +import { PromptTemplate } from "../aiProvider/promptTemplates.js"; +import fs from "fs-extra"; +import path from "path"; +import { uxLog } from "../utils/index.js"; +import c from "chalk"; +import { UtilsAi } from "../aiProvider/utils.js"; +import { AiProvider } from "../aiProvider/index.js"; + +export class DocBuilderRoles { + + public placeholder = ""; + public promptKey: PromptTemplate = "PROMPT_DESCRIBE_ROLES"; + + public static async generateMarkdownFileFromRoles(roleDescriptions: any[], outputFile: string) { + const mdLines: string[] = [ + '', + '', + '', + '# Organization roles', + '', + '
', + '', + ]; + const aiDescription = await DocBuilderRoles.getDescriptionWithAI(roleDescriptions); + if (aiDescription) { + mdLines.push(""); + mdLines.push("## AI-Generated Description", ""); + mdLines.push(...aiDescription.split("\n")); + mdLines.push(""); + } + await fs.ensureDir(path.dirname(outputFile)); + await fs.writeFile(outputFile, mdLines.join("\n") + "\n"); + + await this.generateJsonTreeFileFromRoles(roleDescriptions); + + } + + public static async getDescriptionWithAI(roleDescriptions: any[]): Promise { + const promptKey = "PROMPT_DESCRIBE_ROLES"; + const metadataName = "Roles" + const rolesStrings = roleDescriptions.map(role => { + return `- **${role.name} (id:${role.apiName} )**: ${role.description || "No description available"} (parentId: ${role.parentRole || "ROOT"}`; + }).join("\n"); + const aiCache = await UtilsAi.findAiCache(promptKey, [rolesStrings], metadataName); + if (aiCache.success) { + uxLog("success", this, c.green(`Using cached AI response for Roles`)); + return aiCache.cacheText || ''; + } + if (AiProvider.isAiAvailable()) { + const variables = { + ROLES_DESCRIPTION: rolesStrings + }; + const prompt = AiProvider.buildPrompt(promptKey, variables); + const aiResponse = await AiProvider.promptAi(prompt, promptKey); + if (aiResponse?.success) { + let responseText = aiResponse.promptResponse || "No AI description available"; + if (responseText.startsWith("##")) { + responseText = responseText.split("\n").slice(1).join("\n"); + } + await UtilsAi.writeAiCache(promptKey, [rolesStrings], metadataName, responseText); + return responseText; + } + } + return ''; + } + + public static async generateJsonTreeFileFromRoles(roleDescriptions: any[]) { + const jsonTree = this.buildHierarchyTree(roleDescriptions); + const jsonFile = `./docs/json/root-roles.json`; + await fs.ensureDir(path.dirname(jsonFile)); + await fs.writeFile(jsonFile, JSON.stringify(jsonTree, null, 2)); + uxLog("success", this, c.green(`Successfully generated Roles JSON into ${jsonFile}`)); + } + + public static buildHierarchyTree(roleDescriptions: any[]): any[] { + // Build a tree structure for roles based on parent-child relationships + const roleMap = new Map(); + const rootRoles: any[] = []; + + // First pass: create all role nodes + for (const role of roleDescriptions) { + roleMap.set(role.apiName, { + text: role.name, + icon: "fa-solid fa-users icon-blue", + children: [], + roleData: role + }); + } + + // Second pass: build parent-child relationships + for (const role of roleDescriptions) { + const roleNode = roleMap.get(role.apiName); + if (role.parentRole && roleMap.has(role.parentRole)) { + const parentNode = roleMap.get(role.parentRole); + parentNode.children.push(roleNode); + } else { + // This is a root role (no parent) + rootRoles.push(roleNode); + } + } + + // Sort children by name for each node + const sortChildren = (node: any) => { + if (node.children && node.children.length > 0) { + node.children.sort((a: any, b: any) => a.text.localeCompare(b.text)); + node.children.forEach(sortChildren); + // Update text to show count + node.text = `${node.roleData.name} (${node.children.length})`; + } + }; + rootRoles.forEach(sortChildren); + rootRoles.sort((a: any, b: any) => a.roleData.name.localeCompare(b.roleData.name)); + + return rootRoles; + } + + +} diff --git a/src/common/docBuilder/docBuilderRoot.ts b/src/common/docBuilder/docBuilderRoot.ts new file mode 100644 index 000000000..e12592640 --- /dev/null +++ b/src/common/docBuilder/docBuilderRoot.ts @@ -0,0 +1,143 @@ +import c from 'chalk'; +import { UtilsAi } from "../aiProvider/utils.js"; +import { uxLog } from "../utils/index.js"; +import { PromptTemplate } from '../aiProvider/promptTemplates.js'; +import { AiProvider } from '../aiProvider/index.js'; +import { XMLParser } from 'fast-xml-parser'; +import fs from 'fs-extra'; +import path from 'path'; +import { getMetaHideLines, includeFromFile } from './docUtils.js'; +import { CONSTANTS } from '../../config/index.js'; + +export abstract class DocBuilderRoot { + public docType: string; + public promptKey: PromptTemplate; + public placeholder: string; + public xmlRootKey: string; + public docsSection: string; + + public metadataName: string; + public metadataXml: string = ""; + public outputFile: string; + public additionalVariables: any; + + public markdownDoc: string; + public parsedXmlObject: any; + + constructor(metadataName: string, metadataXml: string, outputFile: string, additionalVariables: any = {}) { + this.metadataName = metadataName; + this.metadataXml = metadataXml; + this.outputFile = outputFile; + this.additionalVariables = additionalVariables; + } + + // This method must be overridden + public async buildInitialMarkdownLines(): Promise { + return []; + } + + public async generateMarkdownFileFromXml() { + if (this.xmlRootKey === 'json') { + this.parsedXmlObject = this.metadataXml; + } + else if (this.xmlRootKey) { + this.parsedXmlObject = new XMLParser().parse(this.metadataXml)?.[this.xmlRootKey] || {}; + } + const mdLines: string[] = [ + '', + '', + '' + ]; + // Main lines generated by overridden method + const initialMdLines = await this.buildInitialMarkdownLines(); + mdLines.push(...initialMdLines); + // Footer + mdLines.push(""); + mdLines.push(`_Documentation generated with [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT}), by [Cloudity](https://www.cloudity.com/) & [friends](https://github.com/hardisgroupcom/sfdx-hardis/graphs/contributors)_`); + + this.markdownDoc = mdLines.join("\n") + "\n"; + this.markdownDoc = await this.completeDocWithAiDescription(); + + await fs.ensureDir(path.dirname(this.outputFile)); + let overwriteDoc = true; + if (fs.existsSync(this.outputFile)) { + const fileContent = await fs.readFile(this.outputFile, "utf8"); + if (fileContent.includes("DO_NOT_OVERWRITE_DOC=TRUE")) { + uxLog("warning", this, c.yellow(`The file ${this.outputFile} is marked as DO_NOT_OVERWRITE_DOC=TRUE. Skipping generation.`)); + overwriteDoc = false; + } + } + if (overwriteDoc) { + await fs.writeFile(this.outputFile, getMetaHideLines() + this.markdownDoc); + uxLog("success", this, c.green(`Successfully generated ${this.metadataName} documentation into ${this.outputFile}`)); + } + + const jsonTree = await this.generateJsonTree(); + if (jsonTree) { + const jsonFile = `./docs/json/${this.docsSection}-${this.metadataName}.json`; + await fs.ensureDir(path.dirname(jsonFile)); + await fs.writeFile(jsonFile, JSON.stringify(jsonTree, null, 2)); + uxLog("success", this, c.green(`Successfully generated ${this.metadataName} JSON into ${jsonFile}`)); + // Recovery to save git repos: Kill existing file if it has been created with forbidden characters + if (this.docsSection === "packages") { + const jsonFileBad = `./docs/json/${this.docsSection}-${this.metadataName}.json`; + if (jsonFileBad !== jsonFile && fs.existsSync(jsonFileBad)) { + await fs.remove(jsonFileBad); + } + } + } + return this.outputFile; + } + + public async completeDocWithAiDescription(): Promise { + const xmlStripped = await this.stripXmlForAi(); + const aiCache = await UtilsAi.findAiCache(this.promptKey, [xmlStripped], this.metadataName); + + if (aiCache.success === true) { + uxLog("log", this, c.grey(`Used AI cache for ${this.docType.toLowerCase()} description (set IGNORE_AI_CACHE=true to force call to AI)`)); + const replaceText = `## AI-Generated Description\n\n${includeFromFile(aiCache.aiCacheDirFile, aiCache.cacheText || "")}`; + this.markdownDoc = this.markdownDoc.replace(this.placeholder, replaceText); + return this.markdownDoc; + } + + if (AiProvider.isAiAvailable()) { + const defaultVariables = { [`${this.docType.toUpperCase()}_NAME`]: this.metadataName, [`${this.docType.toUpperCase()}_XML`]: xmlStripped }; + const variables = Object.assign(defaultVariables, this.additionalVariables); + const prompt = AiProvider.buildPrompt(this.promptKey, variables); + /* jscpd:ignore-start */ + const aiResponse = await AiProvider.promptAi(prompt, this.promptKey); + if (aiResponse?.success) { + let responseText = aiResponse.promptResponse || "No AI description available"; + if (responseText.startsWith("##")) { + responseText = responseText.split("\n").slice(1).join("\n"); + } + await UtilsAi.writeAiCache(this.promptKey, [xmlStripped], this.metadataName, responseText); + const replaceText = `## AI-Generated Description\n\n${includeFromFile(aiCache.aiCacheDirFile, responseText)}`; + this.markdownDoc = this.markdownDoc.replace(this.placeholder, replaceText); + return this.markdownDoc; + } + /* jscpd:ignore-end */ + else if (aiResponse?.forcedTimeout) { + const forcedTimeoutText = `CI job reached maximum time allowed for allowed calls to AI. You can either: + + - Run command locally then commit + push + - Increase using variable \`AI_MAX_TIMEOUT_MINUTES\` in your CI config (ex: AI_MAX_TIMEOUT_MINUTES=120) after making sure than your CI job timeout can handle it :)`; + const replaceText = `## AI-Generated Description\n\n${includeFromFile(aiCache.aiCacheDirFile, forcedTimeoutText)}`; + this.markdownDoc = this.markdownDoc.replace(this.placeholder, replaceText); + } + } + + return this.markdownDoc; + } + + // Override this method if you need to make a smaller XML to fit in the number of prompt tokens + public async stripXmlForAi(): Promise { + return this.metadataXml; + } + + // Override this method if you need to generate a JSON tree for the doc + public async generateJsonTree(): Promise { + return null; + } + +} \ No newline at end of file diff --git a/src/common/docBuilder/docUtils.ts b/src/common/docBuilder/docUtils.ts new file mode 100644 index 000000000..c885a3661 --- /dev/null +++ b/src/common/docBuilder/docUtils.ts @@ -0,0 +1,248 @@ +import c from 'chalk'; +import fs from 'fs-extra'; + +import * as yaml from 'js-yaml'; +import { SfError } from "@salesforce/core"; +import { UtilsAi } from "../aiProvider/utils.js"; +import { AiProvider } from "../aiProvider/index.js"; +import { uxLog, execCommand } from "../utils/index.js"; + + +export function readMkDocsFile(mkdocsYmlFile: string): any { + const mkdocsYml: any = yaml.load( + fs + .readFileSync(mkdocsYmlFile, 'utf-8') + .replace('!!python/name:materialx.emoji.twemoji', "!!python/name:material.extensions.emoji.twemoji") + .replace('!!python/name:materialx.emoji.to_svg', "!!python/name:material.extensions.emoji.to_svg") + .replace('!!python/name:material.extensions.emoji.twemoji', "'!!python/name:material.extensions.emoji.twemoji'") + .replace('!!python/name:material.extensions.emoji.to_svg', "'!!python/name:material.extensions.emoji.to_svg'") + .replace('!!python/name:pymdownx.superfences.fence_code_format', "'!!python/name:pymdownx.superfences.fence_code_format'") + ); + if (!mkdocsYml.nav) { + mkdocsYml.nav = {} + } + return mkdocsYml; +} + +export async function writeMkDocsFile(mkdocsYmlFile: string, mkdocsYml: any) { + const mkdocsYmlStr = yaml + .dump(mkdocsYml, { lineWidth: -1 }) + .replace("!!python/name:materialx.emoji.twemoji", '!!python/name:material.extensions.emoji.twemoji') + .replace("!!python/name:materialx.emoji.to_svg", '!!python/name:material.extensions.emoji.to_svg') + .replace("'!!python/name:material.extensions.emoji.twemoji'", '!!python/name:material.extensions.emoji.twemoji') + .replace("'!!python/name:material.extensions.emoji.to_svg'", '!!python/name:material.extensions.emoji.to_svg') + .replace("'!!python/name:pymdownx.superfences.fence_code_format'", '!!python/name:pymdownx.superfences.fence_code_format'); + await fs.writeFile(mkdocsYmlFile, mkdocsYmlStr); + uxLog("action", this, c.cyan(`Updated mkdocs-material config file at ${c.green(mkdocsYmlFile)}`)); +} + +const alreadySaid: string[] = []; + +export class SalesforceSetupUrlBuilder { + /** + * Map of metadata types to their Lightning Experience setup paths. + */ + private static readonly setupAreaMap: Record = { + 'ActionLinkGroupTemplate': '/lightning/setup/ActionLinkTemplates/home', + 'AppMenu': '/lightning/setup/NavigationMenus/home', + 'ApprovalProcess': '/lightning/setup/ApprovalProcesses/home', + 'AssignmentRules': '/lightning/setup/AssignmentRules/home', + 'AuthProvider': '/lightning/setup/AuthProviders/home', + 'AutoResponseRules': '/lightning/setup/AutoResponseRules/home', + 'ApexClass': '/lightning/setup/ApexClasses/home', + 'ApexPage': '/lightning/setup/VisualforcePages/home', + 'ApexTrigger': '/lightning/setup/ApexTriggers/home', + 'BusinessProcess': '/lightning/setup/ObjectManager/{objectName}/BusinessProcesses/view', + 'CompactLayout': '/lightning/setup/ObjectManager/{objectName}/CompactLayouts/view', + 'ConnectedApp': '/lightning/setup/ConnectedApps/home', + 'ContentAsset': '/lightning/setup/ContentAssets/home', + 'CustomApplication': '/lightning/setup/NavigationMenus/home', + 'CustomField': '/lightning/setup/ObjectManager/{objectName}/FieldsAndRelationships/{apiName}/view', + 'CustomHelpMenu': '/lightning/setup/CustomHelpMenu/home', + 'CustomLabel': '/lightning/setup/CustomLabels/home', + 'CustomMetadata': '/lightning/setup/CustomMetadataTypes/home', + 'CustomNotificationType': '/lightning/setup/CustomNotifications/home', + 'CustomObject': '/lightning/setup/ObjectManager/{objectName}/Details/view', + 'CustomPermission': '/lightning/setup/CustomPermissions/home', + 'CustomSetting': '/lightning/setup/ObjectManager/{objectName}/Details/view', + 'CustomSite': '/lightning/setup/Sites/home', + 'CustomTab': '/lightning/setup/Tabs/home', + 'Dashboard': '/lightning/setup/Dashboards/home', + 'DashboardFolder': '/lightning/setup/DashboardFolders/home', + 'DataCategoryGroup': '/lightning/setup/DataCategories/home', + 'EmailServicesFunction': '/lightning/setup/EmailServices/home', + 'EmailTemplate': '/lightning/setup/EmailTemplates/home', + 'EntitlementTemplate': '/lightning/setup/EntitlementTemplates/home', + 'EscalationRules': '/lightning/setup/EscalationRules/home', + 'EventSubscription': '/lightning/setup/PlatformEvents/home', + 'ExternalDataSource': '/lightning/setup/ExternalDataSources/home', + 'ExternalService': '/lightning/setup/ExternalServices/home', + 'FieldSet': '/lightning/setup/ObjectManager/{objectName}/FieldSets/view', + 'Flexipage': '/lightning/setup/FlexiPageList/home', + 'Flow': '/lightning/setup/Flows/home', + 'GlobalPicklist': '/lightning/setup/Picklists/home', + 'Group': '/lightning/setup/PublicGroups/home', + 'HomePageLayout': '/lightning/setup/HomePageLayouts/home', + 'Layout': '/lightning/setup/ObjectManager/{objectName}/PageLayouts/view', + 'LightningComponentBundle': '/lightning/setup/LightningComponents/home', + 'MilestoneType': '/lightning/setup/Milestones/home', + 'NamedCredential': '/lightning/setup/NamedCredentials/home', + 'OmniChannelSettings': '/lightning/setup/OmniChannelSettings/home', + 'PermissionSet': '/lightning/setup/PermissionSets/home', + 'PermissionSetGroup': '/lightning/setup/PermissionSetGroups/home', + 'PlatformEvent': '/lightning/setup/PlatformEvents/home', + 'Profile': '/lightning/setup/Profiles/home', + 'Queue': '/lightning/setup/Queues/home', + 'RecordType': '/lightning/setup/ObjectManager/{objectName}/RecordTypes/view', + 'RemoteSiteSetting': '/lightning/setup/RemoteSites/home', + 'Report': '/lightning/setup/Reports/home', + 'ReportFolder': '/lightning/setup/ReportFolders/home', + 'Role': '/lightning/setup/Roles/home', + 'ServiceChannel': '/lightning/setup/ServiceChannels/home', + 'SharingRules': '/lightning/setup/SharingRules/home', + 'StaticResource': '/lightning/setup/StaticResources/home', + 'Territory': '/lightning/setup/Territories/home', + 'TerritoryModel': '/lightning/setup/TerritoryManagement/home', + 'Translation': '/lightning/setup/Translations/home', + 'ValidationRule': '/lightning/setup/ObjectManager/{objectName}/ValidationRules/view', + 'VisualforcePage': '/lightning/setup/VisualforcePages/home', + 'Workflow': '/lightning/setup/Workflow/home', + // Add more metadata types if needed + }; + + /** + * Generates the setup URL for a given metadata type and API name (if required). + * @param metadataType The metadata type (e.g., "CustomObject", "ApexClass"). + * @param apiName The API name of the metadata (optional, e.g., "Account"). + * @returns The constructed setup URL. + * @throws Error if the metadata type is unsupported or the API name is missing for required types. + */ + public static getSetupUrl(metadataType: string, apiName: string): string | null { + const pathTemplate = this.setupAreaMap[metadataType]; + + if (!pathTemplate) { + if (!alreadySaid.includes(metadataType)) { + uxLog("log", this, c.grey(`Unsupported metadata type for doc quick link: ${metadataType}`)); + alreadySaid.push(metadataType); + } + return null; + } + + let apiNameFinal = apiName + ""; + let objectName = "" + if (apiName.includes(".") && apiName.split(".").length === 2) { + [objectName, apiNameFinal] = apiName.split(".")[1]; + } + + // Replace placeholders in the path template with the provided API name + const urlPath = pathTemplate + .replace(/\{objectName\}/g, objectName || '') + .replace(/\{apiName\}/g, apiNameFinal || ''); + + if (urlPath.includes('{apiName}') || urlPath.includes('{objectName}')) { + uxLog("log", this, c.grey(`Wrong replacement in ${urlPath} with values apiName:${apiNameFinal} and objectName:${objectName}`)); + } + + return urlPath; + } +} + +export async function completeAttributesDescriptionWithAi(attributesMarkdown: string, objectName: string): Promise { + if (!attributesMarkdown) { + return attributesMarkdown; + } + const aiCache = await UtilsAi.findAiCache("PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD", [attributesMarkdown], objectName); + if (aiCache.success === true) { + uxLog("log", this, c.grey("Used AI cache for attributes completion (set IGNORE_AI_CACHE=true to force call to AI)")); + return aiCache.cacheText ? includeFromFile(aiCache.aiCacheDirFile, aiCache.cacheText) : attributesMarkdown; + } + if (AiProvider.isAiAvailable()) { + // Invoke AI Service + const prompt = AiProvider.buildPrompt("PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD", { "MARKDOWN": attributesMarkdown, "OBJECT_NAME": objectName }); + const aiResponse = await AiProvider.promptAi(prompt, "PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD"); + // Replace description in markdown + if (aiResponse?.success) { + const responseText = aiResponse.promptResponse || "No AI description available"; + await UtilsAi.writeAiCache("PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD", [attributesMarkdown], objectName, responseText); + attributesMarkdown = includeFromFile(aiCache.aiCacheDirFile, responseText); + } + } + return attributesMarkdown; +} + +export async function replaceInFile(filePath: string, stringToReplace: string, replaceWith: string) { + const fileContent = await fs.readFile(filePath, 'utf8'); + const newContent = fileContent.replaceAll(stringToReplace, replaceWith); + await fs.writeFile(filePath, newContent); +} + +export async function generateMkDocsHTML() { + const mkdocsLocalOk = await installMkDocs(); + if (mkdocsLocalOk) { + // Generate MkDocs HTML pages with local MkDocs + uxLog("action", this, c.cyan("Generating HTML pages with mkdocs...")); + const mkdocsBuildRes = await execCommand("mkdocs build -v || python -m mkdocs build -v || py -m mkdocs build -v", this, { fail: false, output: true, debug: false }); + if (mkdocsBuildRes.status !== 0) { + throw new SfError('MkDocs build failed:\n' + mkdocsBuildRes.stderr + "\n" + mkdocsBuildRes.stdout); + } + } + else { + // Generate MkDocs HTML pages with Docker + uxLog("action", this, c.cyan("Generating HTML pages with Docker...")); + const mkdocsBuildRes = await execCommand("docker run --rm -v $(pwd):/docs squidfunk/mkdocs-material build -v", this, { fail: false, output: true, debug: false }); + if (mkdocsBuildRes.status !== 0) { + throw new SfError('MkDocs build with docker failed:\n' + mkdocsBuildRes.stderr + "\n" + mkdocsBuildRes.stdout); + } + } +} + +export async function installMkDocs() { + uxLog("action", this, c.cyan("Managing mkdocs-material local installation...")); + let mkdocsLocalOk = false; + const installMkDocsRes = await execCommand("pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || python -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || py -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || python3 -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists || py3 -m pip install mkdocs-material mkdocs-exclude-search mdx_truly_sane_lists", this, { fail: false, output: true, debug: false }); + if (installMkDocsRes.status === 0) { + mkdocsLocalOk = true; + } + return mkdocsLocalOk; +} + +export function getMetaHideLines(): string { + return `--- +hide: + - path +--- + +`; +} + +export function includeFromFile(cacheFilePath: string, content: string): string { + // Detect if cacheFilePath contains a fingerprint at the end after the last "-" + const fileNameWithoutExtension = cacheFilePath.substring(0, cacheFilePath.lastIndexOf(".")); + const fileExtensionWithDot = cacheFilePath.substring(cacheFilePath.lastIndexOf(".")); + const lastDashIndex = fileNameWithoutExtension.lastIndexOf("-"); + const cacheFileFingerprint = lastDashIndex !== -1 ? fileNameWithoutExtension.substring(lastDashIndex + 1) : ""; + // Check if the fingerprint is a valid number + const isValidFingerprint = /^\d+$/.test(cacheFileFingerprint); + if (isValidFingerprint) { + // Remove the fingerprint from the cacheFilePath + const cacheFilePathOverridden = fileNameWithoutExtension.substring(0, lastDashIndex) + fileExtensionWithDot; + return ` + + + +${content} + + +` + } + else { + return ` + + + +${content} + +` + } + +} diff --git a/src/common/docBuilder/objectModelBuilder.ts b/src/common/docBuilder/objectModelBuilder.ts new file mode 100644 index 000000000..1f0296307 --- /dev/null +++ b/src/common/docBuilder/objectModelBuilder.ts @@ -0,0 +1,197 @@ +import c from "chalk"; +import { glob } from "glob"; +import fs from "fs-extra"; +import * as path from "path"; +import { XMLParser } from "fast-xml-parser"; +import { GLOB_IGNORE_PATTERNS } from "../utils/projectUtils.js"; +import { uxLog } from "../utils/index.js"; + +let ALL_LINKS_CACHE: any[] = []; +let ALL_OBJECTS_CACHE: any[] = []; + +export class ObjectModelBuilder { + protected mainCustomObject: string; + protected relatedCustomObjects: string[]; + protected allLinks: any[] = []; + protected allObjects: any[] = []; + protected selectedObjectsNames: Set = new Set(); + protected selectedObjects: any[] = [] + protected selectedLinks: any[] = [] + + constructor(mainCustomObject: string = "all", relatedCustomObjects: string[] = []) { + this.mainCustomObject = mainCustomObject; + if (this.mainCustomObject !== "all") { + this.selectedObjectsNames.add(this.mainCustomObject); + } + this.relatedCustomObjects = relatedCustomObjects; + } + + async buildObjectsMermaidSchema() { + await this.buildAllLinks(); + await this.buildAllObjects(); + await this.selectObjects(); + await this.selectLinks(); + const mermaidSchema = await this.generateMermaidSchema(); + return mermaidSchema; + } + + async generateMermaidSchema() { + let mermaidSchema = `graph TD\n`; + for (const object of this.selectedObjects) { + const objectClass = + object.name === this.mainCustomObject ? "mainObject" : + !object.name.endsWith("__c") ? "object" : + object.name.split("__").length > 2 ? "customObjectManaged" : + "customObject"; + mermaidSchema += `${object.name}["${object.label}"]:::${objectClass}\n`; + if (fs.existsSync(`docs/objects/${object.name}.md`)) { + mermaidSchema += `click ${object.name} "/objects/${object.name}/"\n`; + } + } + mermaidSchema += `\n`; + let pos = 0; + const masterDetailPos: number[] = []; + const lookupPos: number[] = []; + for (const link of this.selectedLinks) { + if (link.type === "MasterDetail") { + mermaidSchema += `${link.from} ==>|${link.field}| ${link.to}\n`; + masterDetailPos.push(pos); + } + else { + mermaidSchema += `${link.from} -->|${link.field}| ${link.to}\n`; + lookupPos.push(pos); + } + pos++; + } + mermaidSchema += `\n`; + mermaidSchema += `classDef object fill:#D6E9FF,stroke:#0070D2,stroke-width:3px,rx:12px,ry:12px,shadow:drop,color:#333; +classDef customObject fill:#FFF4C2,stroke:#CCAA00,stroke-width:3px,rx:12px,ry:12px,shadow:drop,color:#333; +classDef customObjectManaged fill:#FFD8B2,stroke:#CC5500,stroke-width:3px,rx:12px,ry:12px,shadow:drop,color:#333; +classDef mainObject fill:#FFB3B3,stroke:#A94442,stroke-width:4px,rx:14px,ry:14px,shadow:drop,color:#333,font-weight:bold; +`; + + if (masterDetailPos.length > 0) { + mermaidSchema += "linkStyle " + masterDetailPos.join(",") + " stroke:#4C9F70,stroke-width:4px;\n"; + } + if (lookupPos.length > 0) { + mermaidSchema += "linkStyle " + lookupPos.join(",") + " stroke:#A6A6A6,stroke-width:2px;\n"; + } + + // Use Graph LR if there are too many lines for a nice mermaid display + if (mermaidSchema.split("\n").length > 50) { + mermaidSchema = mermaidSchema.replace("graph TD", "graph LR"); + } + + return mermaidSchema; + } + + async buildAllLinks() { + if (ALL_LINKS_CACHE.length > 0) { + this.allLinks = ALL_LINKS_CACHE; + return; + } + // List all object links in the project + const findFieldsPattern = `**/objects/**/fields/**.field-meta.xml`; + const matchingFieldFiles = (await glob(findFieldsPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS })).map(file => file.replace(/\\/g, '/')); + for (const fieldFile of matchingFieldFiles) { + const objectName = fieldFile.substring(fieldFile.indexOf('objects/')).split("/")[1]; + if (objectName.endsWith("__dlm") || objectName.endsWith("__dll")) { + continue; + } + const fieldXml = fs.readFileSync(fieldFile, "utf8").toString(); + const fieldDetail = new XMLParser().parse(fieldXml); + if (fieldDetail?.CustomField?.type === "MasterDetail" || fieldDetail?.CustomField?.type === "Lookup") { + const fieldName = path.basename(fieldFile, ".field-meta.xml"); + if (fieldDetail?.CustomField?.referenceTo) { + const link = { + from: objectName, + to: fieldDetail.CustomField.referenceTo, + field: fieldName, + relationshipName: fieldDetail?.CustomField?.relationshipName || fieldDetail?.CustomField?.referenceTo, + type: fieldDetail.CustomField.type + }; + this.allLinks.push(link); + } + else { + uxLog("warning", this, c.yellow(`Warning: ${objectName}.${fieldName} has no referenceTo value so has been ignored.`)); + } + } + } + ALL_LINKS_CACHE = [...this.allLinks]; + } + + async buildAllObjects() { + if (ALL_OBJECTS_CACHE.length > 0) { + this.allObjects = ALL_OBJECTS_CACHE; + return; + } + // Get custom objects info + const findObjectsPattern = `**/objects/**/*.object-meta.xml`; + const matchingObjectFiles = (await glob(findObjectsPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS })).map(file => file.replace(/\\/g, '/')); + for (const objectFile of matchingObjectFiles) { + const objectName = path.basename(objectFile, ".object-meta.xml"); + const objectXml = fs.readFileSync(objectFile, "utf8").toString(); + const objectDetail = new XMLParser().parse(objectXml); + const object = { + name: objectName, + label: objectDetail?.CustomObject?.label || objectName, + description: objectDetail?.CustomObject?.description || '', + }; + this.allObjects.push(object); + } + ALL_OBJECTS_CACHE = [...this.allObjects]; + } + + async selectObjects() { + for (const link of this.allLinks) { + if (this.relatedCustomObjects.includes(link.from) || this.relatedCustomObjects.includes(link.to)) { + this.selectedObjectsNames.add(link.from); + this.selectedObjectsNames.add(link.to); + } + else if (this.mainCustomObject === "all") { + this.selectedObjectsNames.add(link.from); + this.selectedObjectsNames.add(link.to); + } + else { + if (link.from === this.mainCustomObject || link.to === this.mainCustomObject) { + this.selectedObjectsNames.add(link.from); + this.selectedObjectsNames.add(link.to); + } + } + } + for (const object of this.allObjects) { + if (this.selectedObjectsNames.has(object.name)) { + this.selectedObjects.push(object); + } + } + // Complete with objects with missing .object-meta.xml file + for (const object of this.selectedObjectsNames) { + if (!this.selectedObjects.some(obj => obj.name === object)) { + this.selectedObjects.push({ name: object, label: object, description: '' }); + } + } + } + + async selectLinks() { + if (this.selectedObjectsNames.size > 10) { + for (const link of this.allLinks) { + if ((link.from === this.mainCustomObject || link.to === this.mainCustomObject) && + (this.selectedObjectsNames.has(link.from) && this.selectedObjectsNames.has(link.to)) + ) { + this.selectedLinks.push(link); + } + } + } + else { + for (const link of this.allLinks) { + if (this.selectedObjectsNames.has(link.from) && this.selectedObjectsNames.has(link.to)) { + this.selectedLinks.push(link); + } + } + } + } + +} + + + diff --git a/src/common/gitProvider/azureDevops.ts b/src/common/gitProvider/azureDevops.ts index 49e0b6544..ae976ab24 100644 --- a/src/common/gitProvider/azureDevops.ts +++ b/src/common/gitProvider/azureDevops.ts @@ -1,41 +1,81 @@ -import { GitProviderRoot } from "./gitProviderRoot"; +import { GitProviderRoot } from "./gitProviderRoot.js"; import * as azdev from "azure-devops-node-api"; -import * as c from "chalk"; -import { getCurrentGitBranch, git, uxLog } from "../utils"; -import { PullRequestMessageRequest, PullRequestMessageResult } from "."; -import { - CommentThreadStatus, - GitPullRequestCommentThread, - PullRequestAsyncStatus, - PullRequestStatus, -} from "azure-devops-node-api/interfaces/GitInterfaces"; +import c from "chalk"; +import fs from 'fs-extra'; +import { getCurrentGitBranch, getGitRepoUrl, git, isGitRepo, uxLog } from "../utils/index.js"; +import * as path from "path"; +import { CommonPullRequestInfo, PullRequestMessageRequest, PullRequestMessageResult } from "./index.js"; +import { CommentThreadStatus, GitPullRequest, GitPullRequestCommentThread, GitPullRequestSearchCriteria, PullRequestAsyncStatus, PullRequestStatus } from "azure-devops-node-api/interfaces/GitInterfaces.js"; +import { CONSTANTS } from "../../config/index.js"; +import { SfError } from "@salesforce/core"; +import { prompts } from "../utils/prompts.js"; export class AzureDevopsProvider extends GitProviderRoot { private azureApi: InstanceType; + public serverUrl: string; + public token: string; + public attachmentsWorkItemId: number; + public attachmentsWorkItemTitle: string = process.env.AZURE_ATTACHMENTS_WORK_ITEM_TITLE || 'sfdx-hardis tech attachments' constructor() { super(); // Azure server url must be provided in SYSTEM_COLLECTIONURI. ex: https:/dev.azure.com/mycompany - this.serverUrl = process.env.SYSTEM_COLLECTIONURI; + this.serverUrl = process.env.SYSTEM_COLLECTIONURI || ""; // a Personal Access Token must be defined - this.token = process.env.CI_SFDX_HARDIS_AZURE_TOKEN || process.env.SYSTEM_ACCESSTOKEN; + this.token = process.env.CI_SFDX_HARDIS_AZURE_TOKEN || process.env.SYSTEM_ACCESSTOKEN || ""; const authHandler = azdev.getHandlerFromToken(this.token); this.azureApi = new azdev.WebApi(this.serverUrl, authHandler); } + public static async handleLocalIdentification() { + if (!isGitRepo()) { + uxLog("warning", this, c.yellow("[Azure Integration] You must be in a git repository context")); + return; + } + if (!process.env.SYSTEM_COLLECTIONURI) { + const repoUrl = await getGitRepoUrl() || ""; + if (!repoUrl) { + uxLog("warning", this, c.yellow("[Azure Integration] An git origin must be set")); + return; + } + const parseUrlRes = this.parseAzureRepoUrl(repoUrl); + if (!parseUrlRes) { + uxLog("warning", this, c.yellow(`[Azure Integration] Unable to parse ${repoUrl} to get SYSTEM_COLLECTIONURI and BUILD_REPOSITORY_ID`)); + return; + } + process.env.SYSTEM_COLLECTIONURI = parseUrlRes.collectionUri; + process.env.SYSTEM_TEAMPROJECT = parseUrlRes.teamProject; + process.env.BUILD_REPOSITORY_ID = parseUrlRes.repositoryId; + } + if (!process.env.SYSTEM_ACCESSTOKEN) { + uxLog("warning", this, c.yellow("If you need an Azure Personal Access Token, create one following this documentation: https://learn.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops&tabs=Windows")); + uxLog("warning", this, c.yellow("Then please save it in a secured password tracker !")); + const accessTokenResp = await prompts({ + name: "token", + message: "Please input an Azure Personal Access Token", + description: "Enter your Azure DevOps Personal Access Token for API authentication (will not be stored permanently)", + type: "text" + }); + process.env.SYSTEM_ACCESSTOKEN = accessTokenResp.token; + } + } + public getLabel(): string { return "sfdx-hardis Azure Devops connector"; } // Returns current job URL - public async getCurrentJobUrl(): Promise { + public async getCurrentJobUrl(): Promise { + if (process.env.PIPELINE_JOB_URL) { + return process.env.PIPELINE_JOB_URL; + } if (process.env.SYSTEM_COLLECTIONURI && process.env.SYSTEM_TEAMPROJECT && process.env.BUILD_BUILDID) { - const jobUrl = `${process.env.SYSTEM_COLLECTIONURI}${encodeURIComponent(process.env.SYSTEM_TEAMPROJECT)}/_build/results?buildId=${ - process.env.BUILD_BUILDID - }`; + const jobUrl = `${process.env.SYSTEM_COLLECTIONURI}${encodeURIComponent(process.env.SYSTEM_TEAMPROJECT)}/_build/results?buildId=${process.env.BUILD_BUILDID + }`; return jobUrl; } uxLog( + "warning", this, c.yellow(`[Azure DevOps] You need the following variables to be accessible to sfdx-hardis to build current job url: - SYSTEM_COLLECTIONURI @@ -46,7 +86,7 @@ export class AzureDevopsProvider extends GitProviderRoot { } // Returns current job URL - public async getCurrentBranchUrl(): Promise { + public async getCurrentBranchUrl(): Promise { if ( process.env.SYSTEM_COLLECTIONURI && process.env.SYSTEM_TEAMPROJECT && @@ -59,6 +99,7 @@ export class AzureDevopsProvider extends GitProviderRoot { return currentBranchUrl; } uxLog( + "warning", this, c.yellow(`[Azure DevOps] You need the following variables to be defined in azure devops pipeline step: ${this.getPipelineVariablesConfig()} @@ -67,8 +108,13 @@ ${this.getPipelineVariablesConfig()} return null; } + // Azure does not supports mermaid in PR markdown + public async supportsMermaidInPrMarkdown(): Promise { + return false; + } + // Find pull request info - public async getPullRequestInfo(): Promise { + public async getPullRequestInfo(): Promise { // Case when PR is found in the context // Get CI variables const repositoryId = process.env.BUILD_REPOSITORY_ID || null; @@ -84,19 +130,19 @@ ${this.getPipelineVariablesConfig()} const pullRequest = await azureGitApi.getPullRequestById(pullRequestId); if (pullRequest && pullRequest.targetRefName) { // Add references to work items in PR result - const pullRequestWorkItemRefs = await azureGitApi.getPullRequestWorkItemRefs(repositoryId, pullRequestId); + const pullRequestWorkItemRefs = await azureGitApi.getPullRequestWorkItemRefs(repositoryId || "", pullRequestId); if (!pullRequest.workItemRefs) { pullRequest.workItemRefs = pullRequestWorkItemRefs; } return this.completePullRequestInfo(pullRequest); } else { - uxLog(this, c.yellow(`[Azure Integration] Warning: incomplete PR found (id: ${pullRequestIdStr})`)); - uxLog(this, c.grey(JSON.stringify(pullRequest || {}))); + uxLog("warning", this, c.yellow(`[Azure Integration] Warning: incomplete PR found (id: ${pullRequestIdStr})`)); + uxLog("log", this, c.grey(JSON.stringify(pullRequest || {}))); } } // Case when we find PR from a commit const sha = await git().revparse(["HEAD"]); - const latestPullRequestsOnBranch = await azureGitApi.getPullRequests(repositoryId, { + const latestPullRequestsOnBranch = await azureGitApi.getPullRequests(repositoryId || "", { targetRefName: `refs/heads/${currentGitBranch}`, status: PullRequestStatus.Completed, }); @@ -106,23 +152,122 @@ ${this.getPipelineVariablesConfig()} if (latestMergedPullRequestOnBranch.length > 0) { const pullRequest = latestMergedPullRequestOnBranch[0]; // Add references to work items in PR result - const pullRequestWorkItemRefs = await azureGitApi.getPullRequestWorkItemRefs(repositoryId, pullRequest.pullRequestId); + const pullRequestWorkItemRefs = await azureGitApi.getPullRequestWorkItemRefs(repositoryId || "", pullRequest.pullRequestId || 0); if (!pullRequest.workItemRefs) { pullRequest.workItemRefs = pullRequestWorkItemRefs; } return this.completePullRequestInfo(latestMergedPullRequestOnBranch[0]); } - uxLog(this, c.grey(`[Azure Integration] Unable to find related Pull Request Info`)); + uxLog("log", this, c.grey(`[Azure Integration] Unable to find related Pull Request Info`)); return null; } - public async getBranchDeploymentCheckId(gitBranch: string): Promise { - let deploymentCheckId = null; + public async listPullRequests(filters: { + pullRequestStatus?: "open" | "merged" | "abandoned", + targetBranch?: string, + minDate?: Date + } = {}, + options: { + formatted?: boolean + } = { formatted: false } + ): Promise { + // Get Azure Git API + const azureGitApi = await this.azureApi.getGitApi(); + const repositoryId = process.env.BUILD_REPOSITORY_ID || null; + if (repositoryId == null) { + uxLog("warning", this, c.yellow("[Azure Integration] Unable to find BUILD_REPOSITORY_ID")); + return []; + } + const teamProject = process.env.SYSTEM_TEAMPROJECT || null; + if (teamProject == null) { + uxLog("warning", this, c.yellow("[Azure Integration] Unable to find SYSTEM_TEAMPROJECT")); + return []; + } + // Build search criteria + const queryConstraint: GitPullRequestSearchCriteria = {}; + if (filters.pullRequestStatus) { + const azurePrStatusValue = + filters.pullRequestStatus === "open" ? PullRequestStatus.Active : + filters.pullRequestStatus === "abandoned" ? PullRequestStatus.Abandoned : + filters.pullRequestStatus === "merged" ? PullRequestStatus.Completed : + null; + if (azurePrStatusValue == null) { + throw new SfError(`[Azure Integration] No matching status for ${filters.pullRequestStatus} in ${JSON.stringify(PullRequestStatus)}`); + } + queryConstraint.status = azurePrStatusValue + } + else { + queryConstraint.status = PullRequestStatus.All + } + if (filters.targetBranch) { + queryConstraint.targetRefName = `refs/heads/${filters.targetBranch}` + } + if (filters.minDate) { + queryConstraint.minTime = filters.minDate + } + // Process request + uxLog("action", this, c.cyan("Calling Azure API to list Pull Requests...")); + uxLog("log", this, c.grey(`Constraint:\n${JSON.stringify(queryConstraint, null, 2)}`)); + + // List pull requests + const pullRequests = await azureGitApi.getPullRequests(repositoryId, queryConstraint, teamProject); + // Complete results with PR comments + const pullRequestsWithComments: Array = []; + for (const pullRequest of pullRequests) { + const pr: GitPullRequest & { threads?: any[] } = Object.assign({}, pullRequest); + uxLog("log", this, c.grey(`Getting threads for PR ${pullRequest.pullRequestId}...`)); + const existingThreads = await azureGitApi.getThreads(pullRequest.repository?.id || "", pullRequest.pullRequestId || 0, teamProject); + pr.threads = existingThreads.filter(thread => !thread.isDeleted); + pullRequestsWithComments.push(pr); + } + + // Format if requested + if (options.formatted) { + uxLog("action", this, c.cyan(`Formatting ${pullRequestsWithComments.length} results...`)); + const pullRequestsFormatted = pullRequestsWithComments.map(pr => { + const prFormatted: any = {}; + // Find sfdx-hardis deployment simulation status comment and extract tickets part + let tickets = ""; + for (const thread of pr.threads || []) { + for (const comment of thread?.comments || []) { + if ((comment?.content || "").includes(`/gm.exec(comment.content); if (matches) { deploymentCheckId = matches[1]; - uxLog(this, c.gray(`Found deployment id ${deploymentCheckId} on PR #${latestPullRequestId} ${latestPullRequest.title}`)); + uxLog("error", this, c.grey(`Found deployment id ${deploymentCheckId} on PR #${latestPullRequestId} ${latestPullRequest.title}`)); break; } } @@ -195,8 +339,9 @@ ${this.getPipelineVariablesConfig()} const jobId = process.env.SYSTEM_JOB_ID || null; const pullRequestIdStr = process.env.SYSTEM_PULLREQUEST_PULLREQUESTID || null; if (repositoryId == null || pullRequestIdStr == null) { - uxLog(this, c.grey("[Azure integration] No project and pull request, so no note thread...")); + uxLog("log", this, c.grey("[Azure integration] No project and pull request, so no note thread...")); uxLog( + "warning", this, c.yellow(`Following variables should be defined when available: ${this.getPipelineVariablesConfig()} @@ -206,8 +351,8 @@ ${this.getPipelineVariablesConfig()} } const pullRequestId = Number(pullRequestIdStr); const azureJobName = process.env.SYSTEM_JOB_DISPLAY_NAME; - const SYSTEM_COLLECTIONURI = process.env.SYSTEM_COLLECTIONURI.replace(/ /g, "%20"); - const SYSTEM_TEAMPROJECT = process.env.SYSTEM_TEAMPROJECT.replace(/ /g, "%20"); + const SYSTEM_COLLECTIONURI = (process.env.SYSTEM_COLLECTIONURI || "").replace(/ /g, "%20"); + const SYSTEM_TEAMPROJECT = (process.env.SYSTEM_TEAMPROJECT || "").replace(/ /g, "%20"); const azureBuildUri = `${SYSTEM_COLLECTIONURI}${encodeURIComponent(SYSTEM_TEAMPROJECT)}/_build/results?buildId=${buildId}&view=logs&j=${jobId}`; // Build thread message const messageKey = prMessage.messageKey + "-" + azureJobName + "-" + pullRequestId; @@ -217,21 +362,23 @@ ${prMessage.message}
-_Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${azureJobName}](${azureBuildUri})_ +_Powered by [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT}) from job [${azureJobName}](${azureBuildUri})_ `; // Add deployment id if present if (globalThis.pullRequestDeploymentId) { messageBody += `\n`; } + // Upload attached images if necessary + messageBody = await this.uploadAndReplaceImageReferences(messageBody, prMessage.sourceFile || ""); // Get Azure Git API const azureGitApi = await this.azureApi.getGitApi(); // Check for existing threads from a previous run - uxLog(this, c.grey("[Azure integration] Listing Threads of Pull Request...")); + uxLog("log", this, c.grey("[Azure integration] Listing Threads of Pull Request...")); const existingThreads = await azureGitApi.getThreads(repositoryId, pullRequestId); - let existingThreadId: number = null; - let existingThreadComment: GitPullRequestCommentThread = null; - let existingThreadCommentId: number = null; + let existingThreadId: number | null = null; + let existingThreadComment: GitPullRequestCommentThread | null = null; + let existingThreadCommentId: number | null | undefined = null; for (const existingThread of existingThreads) { if (existingThread.isDeleted) { continue; @@ -239,8 +386,8 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${azureJob for (const comment of existingThread?.comments || []) { if ((comment?.content || "").includes(``)) { existingThreadComment = existingThread; - existingThreadCommentId = existingThread.comments[0].id; - existingThreadId = existingThread.id; + existingThreadCommentId = (existingThread.comments || [])[0].id; + existingThreadId = existingThread.id || null; break; } } @@ -252,8 +399,8 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${azureJob // Create or update MR note if (existingThreadId) { // Delete previous comment - uxLog(this, c.grey("[Azure integration] Deleting previous comment and closing previous thread...")); - await azureGitApi.deleteComment(repositoryId, pullRequestId, existingThreadId, existingThreadCommentId); + uxLog("log", this, c.grey("[Azure integration] Deleting previous comment and closing previous thread...")); + await azureGitApi.deleteComment(repositoryId, pullRequestId, existingThreadId, existingThreadCommentId || 0); existingThreadComment = await azureGitApi.getPullRequestThread(repositoryId, pullRequestId, existingThreadId); // Update existing thread existingThreadComment = { @@ -264,17 +411,17 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${azureJob } // Create new thread - uxLog(this, c.grey("[Azure integration] Adding Pull Request Thread on Azure...")); + uxLog("log", this, c.grey("[Azure integration] Adding Pull Request Thread on Azure...")); const newThreadComment: GitPullRequestCommentThread = { comments: [{ content: messageBody }], status: this.pullRequestStatusToAzureThreadStatus(prMessage), }; const azureEditThreadResult = await azureGitApi.createThread(newThreadComment, repositoryId, pullRequestId); const prResult: PullRequestMessageResult = { - posted: azureEditThreadResult.id > 0, + posted: (azureEditThreadResult.id || -1) > 0, providerResult: azureEditThreadResult, }; - uxLog(this, c.grey(`[Azure integration] Posted Pull Request Thread ${azureEditThreadResult.id}`)); + uxLog("log", this, c.grey(`[Azure integration] Posted Pull Request Thread ${azureEditThreadResult.id}`)); return prResult; } @@ -287,15 +434,22 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${azureJob : CommentThreadStatus.Unknown; } - private completePullRequestInfo(prData: any) { - const prInfo: any = Object.assign({}, prData); - prInfo.sourceBranch = (prData.sourceRefName || "").replace("refs/heads/", ""); - prInfo.targetBranch = (prData.targetRefName || "").replace("refs/heads/", ""); - prInfo.web_url = `${process.env.SYSTEM_COLLECTIONURI}${encodeURIComponent(process.env.SYSTEM_TEAMPROJECT)}/_git/${encodeURIComponent( - process.env.BUILD_REPOSITORYNAME, - )}/pullrequest/${prData.pullRequestId}`; - prInfo.authorName = prData?.createdBy?.displayName || ""; - return prInfo; + private completePullRequestInfo(prData: GitPullRequest): CommonPullRequestInfo { + const prInfo: CommonPullRequestInfo = { + idNumber: prData.pullRequestId || 0, + idStr: String(prData.pullRequestId || 0), + sourceBranch: (prData.sourceRefName || "").replace("refs/heads/", ""), + targetBranch: (prData.targetRefName || "").replace("refs/heads/", ""), + title: prData.title || "", + description: prData.description || "", + webUrl: `${process.env.SYSTEM_COLLECTIONURI}${encodeURIComponent(process.env.SYSTEM_TEAMPROJECT || "")}/_git/${encodeURIComponent( + process.env.BUILD_REPOSITORYNAME || "", + )}/pullrequest/${prData.pullRequestId}`, + authorName: prData?.createdBy?.displayName || "", + providerInfo: prData, + customBehaviors: {} + }; + return this.completeWithCustomBehaviors(prInfo); } private getPipelineVariablesConfig() { @@ -320,4 +474,133 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${azureJob BUILD_SOURCEBRANCHNAME: $(Build.SourceBranchName) BUILD_BUILD_ID: $(Build.BuildId)`; } + + // Do not make crash the whole process in case there is an issue with integration + public async tryPostPullRequestMessage(prMessage: PullRequestMessageRequest): Promise { + let prResult: PullRequestMessageResult | null = null; + try { + prResult = await this.postPullRequestMessage(prMessage); + } catch (e) { + uxLog("warning", this, c.yellow(`[GitProvider] Error while trying to post pull request message.\n${(e as Error).message}\n${(e as Error).stack}`)); + prResult = { posted: false, providerResult: { error: e } }; + } + return prResult; + } + + public static parseAzureRepoUrl(remoteUrl: string): { + collectionUri: string; + teamProject: string; + repositoryId: string; + } | null { + let collectionUri: string; + let repositoryId: string; + let teamProject: string; + + if (remoteUrl.startsWith("https://")) { + // Handle HTTPS URLs with or without username + const httpsRegex = /https:\/\/(?:[^@]+@)?dev\.azure\.com\/([^/]+)\/([^/]+)\/_git\/([^/]+)/; + const match = remoteUrl.match(httpsRegex); + if (match) { + const organization = match[1]; + teamProject = decodeURIComponent(match[2]); // Decode URL-encoded project name + repositoryId = decodeURIComponent(match[3]); // Decode URL-encoded repository name + collectionUri = `https://dev.azure.com/${organization}/`; + return { collectionUri, teamProject, repositoryId }; + } + } else if (remoteUrl.startsWith("git@")) { + /* jscpd:ignore-start */ + // Handle SSH URLs + const sshRegex = /git@ssh\.dev\.azure\.com:v3\/([^/]+)\/([^/]+)\/([^/]+)/; + const match = remoteUrl.match(sshRegex); + if (match) { + const organization = match[1]; + teamProject = decodeURIComponent(match[2]); // Decode URL-encoded project name + repositoryId = decodeURIComponent(match[3]); // Decode URL-encoded repository name + collectionUri = `https://dev.azure.com/${organization}/`; + return { collectionUri, teamProject, repositoryId }; + } + /* jscpd:ignore-end */ + } + + // Return null if the URL doesn't match expected patterns + return null; + } + + public async uploadImage(localImagePath: string): Promise { + try { + // Upload the image to Azure DevOps + const imageName = path.basename(localImagePath); + const imageContent = fs.createReadStream(localImagePath); + const witApi = await this.azureApi.getWorkItemTrackingApi(); + const attachment = await witApi.createAttachment( + null, // Custom headers (usually null) + imageContent, // File content + imageName, // File name + "simple", + process.env.SYSTEM_TEAMPROJECT, // Project name + ); + if (attachment && attachment.url) { + uxLog("log", this, c.grey(`[Azure Integration] Image uploaded for comment: ${attachment.url}`)); + // Link attachment to work item + const techWorkItemId = await this.findCreateAttachmentsWorkItemId(); + if (techWorkItemId) { + await witApi.updateWorkItem( + [], + [ + { + op: "add", + path: "/relations/-", + value: { + rel: "AttachedFile", + url: attachment.url, + attributes: { + comment: "Uploaded Flow Diff image, generated by sfdx-hardis" + } + } + } + ], + techWorkItemId, + process.env.SYSTEM_TEAMPROJECT + ); + uxLog("log", this, c.grey(`[Azure Integration] Attachment linked to work item ${techWorkItemId}`)); + } + return attachment.url; + } + else { + uxLog("warning", this, c.yellow(`[Azure Integration] Image uploaded but unable to get URL from response\n${JSON.stringify(attachment, null, 2)}`)); + } + } catch (e) { + uxLog("warning", this, c.yellow(`[Azure Integration] Error while uploading image ${localImagePath}\n${(e as Error).message}`)); + } + return null; + } + + public async findCreateAttachmentsWorkItemId() { + if (this.attachmentsWorkItemId) { + return this.attachmentsWorkItemId; + } + const workItemId = process.env.AZURE_ATTACHMENTS_WORK_ITEM_ID; + if (workItemId) { + this.attachmentsWorkItemId = Number(workItemId); + return this.attachmentsWorkItemId; + } + // Try to find the work item + const witApi = await this.azureApi.getWorkItemTrackingApi(); + const wiql = { + query: ` + SELECT [System.Id], [System.Title] + FROM WorkItems + WHERE [System.Title] = '${this.attachmentsWorkItemTitle}' + AND [System.TeamProject] = '${process.env.SYSTEM_TEAMPROJECT}' + ` + }; + const queryResult = await witApi.queryByWiql(wiql); + const workItemIds = (queryResult.workItems || []).map(item => item.id); + if (workItemIds.length > 0) { + this.attachmentsWorkItemId = Number(workItemIds[0]); + return this.attachmentsWorkItemId; + } + uxLog("error", this, c.red(`[Azure Integration] You need to create a technical work item exactly named '${this.attachmentsWorkItemTitle}', then set its identifier in variable AZURE_ATTACHMENTS_WORK_ITEM_ID`)); + return null; + } } diff --git a/src/common/gitProvider/bitbucket.ts b/src/common/gitProvider/bitbucket.ts index 5a019c8e5..a144dd7cd 100644 --- a/src/common/gitProvider/bitbucket.ts +++ b/src/common/gitProvider/bitbucket.ts @@ -1,57 +1,74 @@ -import { GitProviderRoot } from "./gitProviderRoot"; -import * as c from "chalk"; -import { PullRequestMessageRequest, PullRequestMessageResult } from "."; -import { git, uxLog } from "../utils"; -import { Bitbucket, Schema } from "bitbucket"; +import { GitProviderRoot } from './gitProviderRoot.js'; +import c from 'chalk'; +import fs from "fs-extra"; +import FormData from 'form-data' +import * as path from "path"; +import { CommonPullRequestInfo, PullRequestMessageRequest, PullRequestMessageResult } from './index.js'; +import { git, uxLog } from '../utils/index.js'; +import bbPkg, { Schema } from 'bitbucket'; +import { CONSTANTS } from '../../config/index.js'; +const { Bitbucket } = bbPkg; export class BitbucketProvider extends GitProviderRoot { private bitbucket: InstanceType; + public serverUrl: string = 'https://bitbucket.org'; + public token: string; constructor() { super(); - const token = process.env.CI_SFDX_HARDIS_BITBUCKET_TOKEN; - const clientOptions = { auth: { token: token } }; + this.token = process.env.CI_SFDX_HARDIS_BITBUCKET_TOKEN || ''; + const clientOptions = { auth: { token: this.token } }; this.bitbucket = new Bitbucket(clientOptions); } public getLabel(): string { - return "sfdx-hardis Bitbucket connector"; + return 'sfdx-hardis Bitbucket connector'; } - public async getCurrentJobUrl(): Promise { + public async getCurrentJobUrl(): Promise { + if (process.env.PIPELINE_JOB_URL) { + return process.env.PIPELINE_JOB_URL; + } if (process.env.BITBUCKET_WORKSPACE && process.env.BITBUCKET_REPO_SLUG && process.env.BITBUCKET_BUILD_NUMBER) { - const jobUrl = `https://bitbucket.org/${process.env.BITBUCKET_WORKSPACE}/${process.env.BITBUCKET_REPO_SLUG}/pipelines/results/${process.env.BITBUCKET_BUILD_NUMBER}`; + const jobUrl = `${this.serverUrl}/${process.env.BITBUCKET_WORKSPACE}/${process.env.BITBUCKET_REPO_SLUG}/pipelines/results/${process.env.BITBUCKET_BUILD_NUMBER}`; return jobUrl; } uxLog( + "warning", this, c.yellow(`[Bitbucket Integration] You need the following variables to be accessible to sfdx-hardis to build current job url: - BITBUCKET_WORKSPACE - BITBUCKET_REPO_SLUG - - BITBUCKET_BUILD_NUMBER`), + - BITBUCKET_BUILD_NUMBER`) ); return null; } - public async getCurrentBranchUrl(): Promise { + public async getCurrentBranchUrl(): Promise { if (process.env.BITBUCKET_WORKSPACE && process.env.BITBUCKET_REPO_SLUG && process.env.BITBUCKET_BRANCH) { - const currentBranchUrl = `https://bitbucket.org/${process.env.BITBUCKET_WORKSPACE}/${process.env.BITBUCKET_REPO_SLUG}/branch/${process.env.BITBUCKET_BRANCH}`; + const currentBranchUrl = `${this.serverUrl}/${process.env.BITBUCKET_WORKSPACE}/${process.env.BITBUCKET_REPO_SLUG}/branch/${process.env.BITBUCKET_BRANCH}`; return currentBranchUrl; } uxLog( + "warning", this, c.yellow(`[Bitbucket Integration] You need the following variables to be accessible to sfdx-hardis to build current job url: - BITBUCKET_WORKSPACE - BITBUCKET_REPO_SLUG - - BITBUCKET_BRANCH`), + - BITBUCKET_BRANCH`) ); return null; } + // Bitbucket does not supports mermaid in PR markdown + public async supportsMermaidInPrMarkdown(): Promise { + return false; + } + // Find pull request info - public async getPullRequestInfo(): Promise { + public async getPullRequestInfo(): Promise { const pullRequestIdStr = process.env.BITBUCKET_PR_ID || null; const repoSlug = process.env.BITBUCKET_REPO_SLUG || null; const workspace = process.env.BITBUCKET_WORKSPACE || null; @@ -61,66 +78,81 @@ export class BitbucketProvider extends GitProviderRoot { const pullRequestId = Number(pullRequestIdStr); const pullRequest = await this.bitbucket.repositories.getPullRequest({ pull_request_id: pullRequestId, - repo_slug: repoSlug, - workspace: workspace, + repo_slug: repoSlug || '', + workspace: workspace || '', }); if (pullRequest?.data.destination) { // Add cross git provider properties used by sfdx-hardis return this.completePullRequestInfo(pullRequest.data); } else { - uxLog(this, c.yellow(`[Bitbucket Integration] Warning: incomplete PR found (id: ${pullRequestIdStr})`)); - uxLog(this, c.grey(JSON.stringify(pullRequest || {}))); + uxLog("warning", this, c.yellow(`[Bitbucket Integration] Warning: incomplete PR found (id: ${pullRequestIdStr})`)); + uxLog("log", this, c.grey(JSON.stringify(pullRequest || {}))); } } // Case when we find PR from a commit - const sha = await git().revparse(["HEAD"]); + const sha = await git().revparse(['HEAD']); const latestPullRequestsOnBranch = await this.bitbucket.repositories.listPullrequestsForCommit({ // cspell:disable-line commit: sha, - repo_slug: repoSlug, - workspace: workspace, + repo_slug: repoSlug || '', + workspace: workspace || '', }); const latestMergedPullRequestOnBranch = latestPullRequestsOnBranch?.data?.values?.filter( - (pr) => pr.state === "MERGED" && pr.merge_commit?.hash === sha, + (pr) => pr.state === 'MERGED' && pr.merge_commit?.hash === sha ); - if (latestMergedPullRequestOnBranch?.length > 0) { + if (latestMergedPullRequestOnBranch?.length && latestMergedPullRequestOnBranch?.length > 0) { const pullRequest = latestMergedPullRequestOnBranch[0]; // Add cross git provider properties used by sfdx-hardis return this.completePullRequestInfo(pullRequest); } - uxLog(this, c.grey(`[Bitbucket Integration] Unable to find related Pull Request Info`)); + uxLog("log", this, c.grey(`[Bitbucket Integration] Unable to find related Pull Request Info`)); return null; } - public async getBranchDeploymentCheckId(gitBranch: string): Promise { - let deploymentCheckId = null; + public async getBranchDeploymentCheckId(gitBranch: string): Promise { + let deploymentCheckId: string | null = null; const repoSlug = process.env.BITBUCKET_REPO_SLUG || null; const workspace = process.env.BITBUCKET_WORKSPACE || null; const latestMergedPullRequestsOnBranch = await this.bitbucket.repositories.listPullRequests({ - repo_slug: repoSlug, - workspace: workspace, - state: "MERGED", + repo_slug: repoSlug || '', + workspace: workspace || '', + state: 'MERGED', q: `destination.branch.name = "${gitBranch}"`, - sort: "-updated_on", + sort: '-updated_on', }); - if (latestMergedPullRequestsOnBranch?.data?.values?.length > 0) { + if ( + latestMergedPullRequestsOnBranch?.data?.values?.length && + latestMergedPullRequestsOnBranch?.data?.values?.length > 0 + ) { const latestPullRequest = latestMergedPullRequestsOnBranch?.data?.values[0]; const latestPullRequestId = latestPullRequest.id; - deploymentCheckId = await this.getDeploymentIdFromPullRequest(latestPullRequestId, repoSlug, workspace, deploymentCheckId, latestPullRequest); + deploymentCheckId = await this.getDeploymentIdFromPullRequest( + latestPullRequestId || 0, + repoSlug || '', + workspace || '', + deploymentCheckId, + this.completePullRequestInfo(latestPullRequest) + ); } return deploymentCheckId; } - public async getPullRequestDeploymentCheckId(): Promise { + public async getPullRequestDeploymentCheckId(): Promise { const pullRequestInfo = await this.getPullRequestInfo(); if (pullRequestInfo) { const repoSlug = process.env.BITBUCKET_REPO_SLUG || null; const workspace = process.env.BITBUCKET_WORKSPACE || null; - return await this.getDeploymentIdFromPullRequest(pullRequestInfo.id, repoSlug, workspace, null, pullRequestInfo); + return await this.getDeploymentIdFromPullRequest( + pullRequestInfo.idNumber || 0, + repoSlug || '', + workspace || '', + null, + pullRequestInfo + ); } return null; } @@ -129,9 +161,9 @@ export class BitbucketProvider extends GitProviderRoot { latestPullRequestId: number, repoSlug: string, workspace: string, - deploymentCheckId: any, - latestPullRequest: Schema.Pullrequest, - ) { + deploymentCheckId: string | null, + latestPullRequest: CommonPullRequestInfo + ): Promise { const comments = await this.bitbucket.repositories.listPullRequestComments({ pull_request_id: latestPullRequestId, repo_slug: repoSlug, @@ -139,13 +171,16 @@ export class BitbucketProvider extends GitProviderRoot { }); for (const comment of comments?.data?.values || []) { - if ((comment?.content?.raw || "").includes(`/gm.exec(comment?.content?.raw); + if ((comment?.content?.raw || '').includes(`/gm.exec(comment?.content?.raw || ''); if (matches) { deploymentCheckId = matches[1]; uxLog( + "log", this, - c.gray(`[Bitbucket Integration] Found deployment id ${deploymentCheckId} on PR #${latestPullRequestId} ${latestPullRequest.title}`), + c.grey( + `[Bitbucket Integration] Found deployment id ${deploymentCheckId} on PR #${latestPullRequestId} ${latestPullRequest.title}` + ) ); break; } @@ -160,19 +195,19 @@ export class BitbucketProvider extends GitProviderRoot { const workspace = process.env.BITBUCKET_WORKSPACE || null; if (repoSlug == null || pullRequestIdStr == null) { - uxLog(this, c.grey("[Bitbucket integration] No repo and pull request, so no note posted...")); - return { posted: false, providerResult: { info: "No related pull request" } }; + uxLog("log", this, c.grey('[Bitbucket integration] No repo and pull request, so no note posted...')); + return { posted: false, providerResult: { info: 'No related pull request' } }; } const pullRequestId = Number(pullRequestIdStr); const bitbucketBuildNumber = process.env.BITBUCKET_BUILD_NUMBER || null; const bitbucketJobUrl = await this.getCurrentJobUrl(); - const messageKey = `${prMessage.messageKey}-${bitbucketBuildNumber}-${pullRequestId}`; - let messageBody = `**${prMessage.title || ""}** + const messageKey = `${prMessage.messageKey}-${pullRequestId}`; + let messageBody = `**${prMessage.title || ''}** ${prMessage.message} - \n_Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${bitbucketBuildNumber}](${bitbucketJobUrl})_ + \n_Powered by [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT}) from job [${bitbucketBuildNumber}](${bitbucketJobUrl})_ \n `; @@ -181,6 +216,8 @@ export class BitbucketProvider extends GitProviderRoot { messageBody += `\n`; } + messageBody = await this.uploadAndReplaceImageReferences(messageBody, prMessage.sourceFile || ""); + const commentBody: any = { content: { raw: messageBody, @@ -188,25 +225,28 @@ export class BitbucketProvider extends GitProviderRoot { }; // Check for existing comment from a previous run - uxLog(this, c.grey("[Bitbucket integration] Listing comments of Pull Request...")); + uxLog("log", this, c.grey('[Bitbucket integration] Listing comments of Pull Request...')); const existingComments = await this.bitbucket.repositories.listPullRequestComments({ pull_request_id: pullRequestId, repo_slug: repoSlug, - workspace: workspace, + workspace: workspace || '', }); - let existingCommentId = null; + let existingCommentId: number | null = null; for (const existingComment of existingComments?.data?.values || []) { - if (existingComment?.content.raw?.includes(``)) { - existingCommentId = existingComment.id; + if ( + existingComment?.content?.raw && + existingComment?.content.raw?.includes(``) + ) { + existingCommentId = existingComment.id || null; } } // Create or update MR comment if (existingCommentId) { // Update existing comment - uxLog(this, c.grey("[Bitbucket integration] Updating Pull Request Comment on Bitbucket...")); + uxLog("log", this, c.grey('[Bitbucket integration] Updating Pull Request Comment on Bitbucket...')); const pullRequestComment = await this.bitbucket.repositories.updatePullRequestComment({ - workspace: workspace, + workspace: workspace || '', repo_slug: repoSlug, pull_request_id: pullRequestId, comment_id: existingCommentId, @@ -214,33 +254,75 @@ export class BitbucketProvider extends GitProviderRoot { }); const prResult: PullRequestMessageResult = { - posted: pullRequestComment?.data?.id > 0, + posted: (pullRequestComment?.data?.id || -1) > 0, providerResult: pullRequestComment, }; + uxLog("log", this, c.grey(`[Bitbucket integration] Updated Pull Request comment ${existingCommentId}`)); return prResult; } else { // Create new comment if no existing comment was found - uxLog(this, c.grey("[Bitbucket integration] Adding Pull Request Comment on Bitbucket...")); + uxLog("log", this, c.grey('[Bitbucket integration] Adding Pull Request Comment on Bitbucket...')); const pullRequestComment = await this.bitbucket.repositories.createPullRequestComment({ - workspace: workspace, + workspace: workspace || '', repo_slug: repoSlug, pull_request_id: pullRequestId, _body: commentBody, }); const prResult: PullRequestMessageResult = { - posted: pullRequestComment?.data?.id > 0, + posted: (pullRequestComment?.data?.id || -1) > 0, providerResult: pullRequestComment, }; + if (prResult.posted) { + uxLog("log", this, c.grey(`[Bitbucket integration] Posted Pull Request comment on ${pullRequestId}`)); + } else { + uxLog("warning", this, c.yellow(`[Bitbucket integration] Unable to post Pull Request comment on ${pullRequestId}:\n${JSON.stringify(pullRequestComment, null, 2)}`)); + } return prResult; } } - private completePullRequestInfo(prData: Schema.Pullrequest) { - const prInfo: any = Object.assign({}, prData); - prInfo.sourceBranch = prData?.source?.branch?.name || ""; - prInfo.targetBranch = prData?.destination?.branch?.name || ""; - return prInfo; + private completePullRequestInfo(prData: Schema.Pullrequest): CommonPullRequestInfo { + const prInfo: CommonPullRequestInfo = { + idNumber: prData?.id || 0, + idStr: prData.id ? prData?.id?.toString() : '', + sourceBranch: prData?.source?.branch?.name || '', + targetBranch: prData?.destination?.branch?.name || '', + title: prData?.rendered?.title?.raw || prData?.rendered?.title?.markup || prData?.rendered?.title?.html || '', + description: prData?.rendered?.description?.raw || prData?.rendered?.description?.markup || prData?.rendered?.description?.html || '', + webUrl: prData?.links?.html?.href || '', + authorName: prData?.author?.display_name || '', + providerInfo: prData, + customBehaviors: {} + }; + return this.completeWithCustomBehaviors(prInfo); } + + // Upload the image to Bitbucket + public async uploadImage(localImagePath: string): Promise { + try { + const imageName = path.basename(localImagePath); + const filesForm = new FormData(); + filesForm.append("files", fs.createReadStream(localImagePath)); + const attachmentResponse = await this.bitbucket.repositories.createDownload({ + workspace: process.env.BITBUCKET_WORKSPACE || "", + repo_slug: process.env.BITBUCKET_REPO_SLUG || "", + _body: filesForm as any, + }); + if (attachmentResponse) { + const imageRef = `${this.serverUrl}/${process.env.BITBUCKET_WORKSPACE}/${process.env.BITBUCKET_REPO_SLUG}/downloads/${imageName}`; + uxLog("log", this, c.grey(`[Bitbucket Integration] Image uploaded for comment: ${imageRef}`)); + return imageRef; + } + else { + uxLog("warning", this, c.yellow(`[Bitbucket Integration] Image uploaded but unable to get URL from response\n${JSON.stringify(attachmentResponse, null, 2)}`)); + } + } catch (e) { + uxLog("warning", this, c.yellow(`[Bitbucket Integration] Error while uploading image in downloads section ${localImagePath}\n${(e as Error).message}`)); + } + return null; + } + + } diff --git a/src/common/gitProvider/gitProviderRoot.ts b/src/common/gitProvider/gitProviderRoot.ts index 663a355ce..fd30884a0 100644 --- a/src/common/gitProvider/gitProviderRoot.ts +++ b/src/common/gitProvider/gitProviderRoot.ts @@ -1,55 +1,124 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import { PullRequestMessageRequest, PullRequestMessageResult } from "."; -import { uxLog } from "../utils"; +import { SfError } from "@salesforce/core"; +import c from "chalk"; +import { CommonPullRequestInfo, PullRequestMessageRequest, PullRequestMessageResult } from "./index.js"; +import { uxLog } from "../utils/index.js"; +import { extractImagesFromMarkdown, replaceImagesInMarkdown } from "./utilsMarkdown.js"; +import { getEnvVar } from "../../config/index.js"; export abstract class GitProviderRoot { - protected serverUrl: string; - protected token: string; + public serverUrl: string | null; + public token: string; + public getLabel(): string { - throw new SfdxError("getLabel should be implemented on this call"); + throw new SfError("getLabel should be implemented on this call"); + } + + public async getBranchDeploymentCheckId(gitBranch: string): Promise { + uxLog("other", this, `Method getBranchDeploymentCheckId(${gitBranch}) is not implemented yet on ${this.getLabel()}`); + return null; + } + + public async getPullRequestDeploymentCheckId(): Promise { + uxLog("other", this, `Method getPullRequestDeploymentCheckId() is not implemented yet on ${this.getLabel()}`); + return null; } - public async getBranchDeploymentCheckId(gitBranch: string): Promise { - uxLog(this, `Method getBranchDeploymentCheckId(${gitBranch}) is not implemented yet on ${this.getLabel()}`); + public async getCurrentJobUrl(): Promise { + uxLog("other", this, `Method getCurrentJobUrl is not implemented yet on ${this.getLabel()}`); return null; } - public async getPullRequestDeploymentCheckId(): Promise { - uxLog(this, `Method getPullRequestDeploymentCheckId() is not implemented yet on ${this.getLabel()}`); + public async getCurrentBranchUrl(): Promise { + uxLog("other", this, `Method getCurrentBranchUrl is not implemented yet on ${this.getLabel()}`); return null; } - public async getCurrentJobUrl(): Promise { - uxLog(this, `Method getCurrentJobUrl is not implemented yet on ${this.getLabel()}`); + public async supportsMermaidInPrMarkdown(): Promise { + uxLog("other", this, `Method supportsMermaidInPrMarkdown is not implemented yet on ${this.getLabel()}`); + return false; + } + + public async supportsSvgAttachments(): Promise { + // False by default, might be used later + return false; + } + + public async getPullRequestInfo(): Promise { + uxLog("other", this, `Method getPullRequestInfo is not implemented yet on ${this.getLabel()}`); return null; } - public async getCurrentBranchUrl(): Promise { - uxLog(this, `Method getCurrentBranchUrl is not implemented yet on ${this.getLabel()}`); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public async uploadImage(image: string): Promise { + uxLog("other", this, `Method uploadImage is not implemented yet on ${this.getLabel()}`); return null; } - public async getPullRequestInfo(): Promise { - uxLog(this, `Method getPullRequestInfo is not implemented yet on ${this.getLabel()}`); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public async listPullRequests(filters: { + status?: string, + targetBranch?: string, + minDate?: Date + // eslint-disable-next-line @typescript-eslint/no-unused-vars + } = {}, options: { + formatted?: boolean + } = { formatted: false }): Promise { + uxLog("other", this, `Method listPullRequests is not implemented yet on ${this.getLabel()}`); return null; } public async postPullRequestMessage(prMessage: PullRequestMessageRequest): Promise { - uxLog(this, c.yellow("Method postPullRequestMessage is not yet implemented on " + this.getLabel() + " to post " + JSON.stringify(prMessage))); + uxLog("warning", this, c.yellow("Method postPullRequestMessage is not yet implemented on " + this.getLabel() + " to post " + JSON.stringify(prMessage))); return { posted: false, providerResult: { error: "Not implemented in sfdx-hardis" } }; } - + /* jscpd:ignore-start */ // Do not make crash the whole process in case there is an issue with integration public async tryPostPullRequestMessage(prMessage: PullRequestMessageRequest): Promise { - let prResult: PullRequestMessageResult = null; + let prResult: PullRequestMessageResult | null = null; try { prResult = await this.postPullRequestMessage(prMessage); } catch (e) { - uxLog(this, c.yellow(`[GitProvider] Error while trying to post pull request message.\n${e.message}\n${e.stack}`)); + uxLog("warning", this, c.yellow(`[GitProvider] Error while trying to post pull request message.\n${(e as Error).message}\n${(e as Error).stack}`)); prResult = { posted: false, providerResult: { error: e } }; } return prResult; } + /* jscpd:ignore-end */ + + public async uploadAndReplaceImageReferences(markdownBody: string, sourceFile: string | null = null): Promise { + const replacements: any = {}; + const markdownImages = extractImagesFromMarkdown(markdownBody, sourceFile); + for (const image of markdownImages) { + const imageUrl = await this.uploadImage(image.path); + if (imageUrl) { + replacements[image.name] = imageUrl; + } + } + markdownBody = replaceImagesInMarkdown(markdownBody, replacements); + return markdownBody; + } + + protected completeWithCustomBehaviors(pullRequestInfo: CommonPullRequestInfo): CommonPullRequestInfo { + const desc = pullRequestInfo.description || ""; + if (desc.includes("NO_DELTA") + || getEnvVar("NO_DELTA") === "true" + || getEnvVar("NO_DELTA_" + pullRequestInfo.targetBranch) === "true" + ) { + pullRequestInfo.customBehaviors.noDeltaDeployment = true; + } + if (desc.includes("PURGE_FLOW_VERSIONS") + || getEnvVar("PURGE_FLOW_VERSIONS") === "true" + || getEnvVar("PURGE_FLOW_VERSIONS_" + pullRequestInfo.targetBranch) === "true" + ) { + pullRequestInfo.customBehaviors.purgeFlowVersions = true; + } + if (desc.includes("DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT") + || getEnvVar("DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT") === "true" + || getEnvVar("DESTRUCTIVE_CHANGES_AFTER_DEPLOYMENT_" + pullRequestInfo.targetBranch) === "true" + ) { + pullRequestInfo.customBehaviors.destructiveChangesAfterDeployment = true; + } + return pullRequestInfo; + } } diff --git a/src/common/gitProvider/github.ts b/src/common/gitProvider/github.ts index f0a8beae1..7e93900aa 100644 --- a/src/common/gitProvider/github.ts +++ b/src/common/gitProvider/github.ts @@ -1,37 +1,44 @@ import * as github from "@actions/github"; -import * as c from "chalk"; -import { GitProviderRoot } from "./gitProviderRoot"; -import { getCurrentGitBranch, git, uxLog } from "../utils"; -import { PullRequestMessageRequest, PullRequestMessageResult } from "."; -import { GitHub } from "@actions/github/lib/utils"; +import c from "chalk"; +import { GitProviderRoot } from "./gitProviderRoot.js"; +import { getCurrentGitBranch, git, uxLog } from "../utils/index.js"; +import { CommonPullRequestInfo, PullRequestMessageRequest, PullRequestMessageResult } from "./index.js"; +import { GitHub } from "@actions/github/lib/utils.js"; +import { CONSTANTS } from "../../config/index.js"; export class GithubProvider extends GitProviderRoot { private octokit: InstanceType; - private repoOwner: string; - private repoName: string; + private repoOwner: string | null; + private repoName: string | null; + public serverUrl: string | null; + public workflow: string | null; + public branch: string | null; + public prNumber: number | null; + public runId: string | number | null; constructor() { super(); const tokenName = process.env.CI_SFDX_HARDIS_GITHUB_TOKEN ? "CI_SFDX_HARDIS_GITHUB_TOKEN" : process.env.PAT ? "PAT" : "GITHUB_TOKEN"; const token = process.env[tokenName]; - this.octokit = github.getOctokit(token); - this.repoOwner = github?.context?.repo?.owner || null; - this.repoName = github?.context?.repo?.repo || null; - this.serverUrl = github?.context?.serverUrl || null; + this.octokit = github.getOctokit(token || ""); + this.repoOwner = github?.context?.repo?.owner || process.env.GITHUB_REPOSITORY_OWNER || null; + this.repoName = github?.context?.repo?.repo || process.env?.GITHUB_REPOSITORY?.split("/")[1] || null + this.serverUrl = github?.context?.serverUrl || process.env.GITHUB_SERVER_URL || null; + this.workflow = github?.context?.workflow || process.env.GITHUB_WORKFLOW || null; + this.branch = github?.context?.ref || process.env.GITHUB_REF || null; + this.prNumber = github?.context?.payload?.pull_request?.number || (process.env.GITHUB_REF_NAME ? parseInt(process.env.GITHUB_REF_NAME.split("/")?.[0] || "0") : null); + this.runId = github?.context?.runId || process.env.GITHUB_RUN_ID || null; } public getLabel(): string { return "sfdx-hardis GitHub connector"; } - - public async getBranchDeploymentCheckId(gitBranch: string): Promise { - let deploymentCheckId = null; - const repoOwner = github?.context?.repo?.owner || null; - const repoName = github?.context?.repo?.repo || null; - uxLog(this, c.grey("[GitHub integration] Listing previously closed Pull Requests")); + public async getBranchDeploymentCheckId(gitBranch: string): Promise { + let deploymentCheckId: string | null = null; + uxLog("log", this, c.grey("[GitHub Integration] Listing previously closed Pull Requests")); const latestPullRequestsOnBranch = await this.octokit.rest.pulls.list({ - owner: repoOwner, - repo: repoName, + owner: this.repoOwner || "", + repo: this.repoName || "", state: "closed", direction: "desc", per_page: 10, @@ -40,40 +47,37 @@ export class GithubProvider extends GitProviderRoot { if (latestPullRequestsOnBranch.data.length > 0) { const latestPullRequest = latestPullRequestsOnBranch.data[0]; const latestPullRequestId = latestPullRequest.number; - deploymentCheckId = await this.getDeploymentIdFromPullRequest(latestPullRequestId, repoOwner, repoName, deploymentCheckId, latestPullRequest); + deploymentCheckId = await this.getDeploymentIdFromPullRequest(latestPullRequestId, this.repoOwner || "", this.repoName || "", deploymentCheckId, latestPullRequest); } return deploymentCheckId; } - public async getPullRequestDeploymentCheckId(): Promise { + public async getPullRequestDeploymentCheckId(): Promise { const pullRequestInfo = await this.getPullRequestInfo(); if (pullRequestInfo) { - const repoOwner = github?.context?.repo?.owner || null; - const repoName = github?.context?.repo?.repo || null; - return await this.getDeploymentIdFromPullRequest(pullRequestInfo.number, repoOwner, repoName, null, pullRequestInfo); + return await this.getDeploymentIdFromPullRequest(pullRequestInfo.idNumber, this.repoOwner || "", this.repoName || "", null, pullRequestInfo); } return null; } - private async getDeploymentIdFromPullRequest( latestPullRequestId: number, repoOwner: string, repoName: string, - deploymentCheckId: any, + deploymentCheckId: string | null, latestPullRequest: any, - ) { - uxLog(this, c.grey(`[GitHub integration] Listing comments for PR ${latestPullRequestId}`)); + ): Promise { + uxLog("log", this, c.grey(`[GitHub Integration] Listing comments for PR ${latestPullRequestId}`)); const existingComments = await this.octokit.rest.issues.listComments({ owner: repoOwner, repo: repoName, issue_number: latestPullRequestId, }); for (const existingComment of existingComments.data) { - if (existingComment.body.includes("/gm.exec(existingComment.body); + if ((existingComment.body || "").includes("/gm.exec(existingComment.body || ""); if (matches) { deploymentCheckId = matches[1]; - uxLog(this, c.gray(`Found deployment id ${deploymentCheckId} on PR #${latestPullRequestId} ${latestPullRequest.title}`)); + uxLog("error", this, c.grey(`Found deployment id ${deploymentCheckId} on PR #${latestPullRequestId} ${latestPullRequest.title}`)); break; } } @@ -82,14 +86,16 @@ export class GithubProvider extends GitProviderRoot { } // Returns current job URL - public async getCurrentJobUrl(): Promise { + public async getCurrentJobUrl(): Promise { + if (process.env.PIPELINE_JOB_URL) { + return process.env.PIPELINE_JOB_URL; + } try { - const runId = github?.context?.runId; - if (this.repoOwner && this.repoName && this.serverUrl && runId) { - return `${this.serverUrl}/${this.repoOwner}/${this.repoName}/actions/runs/${runId}`; + if (this.repoOwner && this.repoName && this.serverUrl && this.runId) { + return `${this.serverUrl}/${this.repoOwner}/${this.repoName}/actions/runs/${this.runId}`; } - } catch (err) { - uxLog(this, c.yellow("[GitHub integration]" + err.message)); + } catch (err: any) { + uxLog("warning", this, c.yellow("[GitHub Integration]" + err.message)); } if (process.env.GITHUB_JOB_URL) { return process.env.GITHUB_JOB_URL; @@ -98,27 +104,30 @@ export class GithubProvider extends GitProviderRoot { } // Returns current job URL - public async getCurrentBranchUrl(): Promise { + public async getCurrentBranchUrl(): Promise { try { - const branch = github?.context?.ref || null; - if (this.repoOwner && this.repoName && this.serverUrl && branch) { - return `${this.serverUrl}/${this.repoOwner}/${this.repoName}/tree/${branch}`; + if (this.repoOwner && this.repoName && this.serverUrl && this.branch) { + return `${this.serverUrl}/${this.repoOwner}/${this.repoName}/tree/${this.branch}`; } - } catch (err) { - uxLog(this, c.yellow("[GitHub integration]" + err.message)); + } catch (err: any) { + uxLog("warning", this, c.yellow("[GitHub Integration]" + err.message)); } return null; } + // GitHub supports mermaid in PR markdown + public async supportsMermaidInPrMarkdown(): Promise { + return true; + } + // Find pull request info - public async getPullRequestInfo(): Promise { + public async getPullRequestInfo(): Promise { // Case when PR is found in the context - const prNumber = github?.context?.payload?.pull_request?.number || null; - if (prNumber !== null && this.repoOwner !== null && prNumber !== null) { + if (this.prNumber !== null && this.repoOwner !== null) { const pullRequest = await this.octokit.rest.pulls.get({ owner: this.repoOwner, - repo: this.repoName, - pull_number: prNumber, + repo: this.repoName || "", + pull_number: this.prNumber, }); // Add cross git provider properties used by sfdx-hardis if (pullRequest) { @@ -165,7 +174,7 @@ export class GithubProvider extends GitProviderRoot { }, ); } catch (error) { - uxLog(this, c.yellow(`[GitHub Integration] Error while calling GraphQL Api to list PR on commit ${sha}\n${error.message}`)); + uxLog("warning", this, c.yellow(`[GitHub Integration] Error while calling GraphQL Api to list PR on commit ${sha}\n${(error as any).message}`)); } if (graphQlRes?.repository?.commit?.associatedPullRequests?.edges?.length > 0) { const currentGitBranch = await getCurrentGitBranch(); @@ -176,43 +185,39 @@ export class GithubProvider extends GitProviderRoot { return this.completePullRequestInfo(candidatePullRequests[0].node); } } - uxLog(this, c.grey(`[GitHub Integration] Unable to find related Pull Request Info`)); + uxLog("log", this, c.grey(`[GitHub Integration] Unable to find related Pull Request Info`)); return null; } // Posts a note on the merge request public async postPullRequestMessage(prMessage: PullRequestMessageRequest): Promise { - const { pull_request } = github.context.payload; - - // Get CI variables - const pullRequestId = pull_request?.number || null; - if (this.repoName == null || pullRequestId == null) { - uxLog(this, c.grey("[GitHub integration] No project and merge request, so no note posted...")); + if (this.repoName == null || this.prNumber == null) { + uxLog("log", this, c.grey("[GitHub Integration] No project and merge request, so no note posted...")); return { posted: false, providerResult: { info: "No related pull request" } }; } - const githubWorkflowName = github.context.workflow; - const githubJobUrl = `${github.context.serverUrl}/${this.repoOwner}/${this.repoName}/actions/runs/${github.context.runId}`; + const githubJobUrl = await this.getCurrentJobUrl(); // Build note message - const messageKey = prMessage.messageKey + "-" + githubWorkflowName + "-" + pullRequestId; + const messageKey = prMessage.messageKey + "-" + this.workflow + "-" + this.prNumber; let messageBody = `**${prMessage.title || ""}** ${prMessage.message} -_Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${githubWorkflowName}](${githubJobUrl})_ +_Powered by [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT}) from job [${this.workflow}](${githubJobUrl})_ `; // Add deployment id if present if (globalThis.pullRequestDeploymentId) { messageBody += `\n`; } + // Check for existing note from a previous run - uxLog(this, c.grey("[GitHub integration] Listing comments of Pull Request...")); + uxLog("log", this, c.grey("[GitHub Integration] Listing comments of Pull Request...")); const existingComments = await this.octokit.rest.issues.listComments({ - owner: this.repoOwner, + owner: this.repoOwner || "", repo: this.repoName, - issue_number: pullRequestId, + issue_number: this.prNumber, }); - let existingCommentId = null; + let existingCommentId: number | null = null; for (const existingComment of existingComments.data) { if (existingComment?.body?.includes(``)) { existingCommentId = existingComment.id; @@ -222,11 +227,11 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${githubWo // Create or update MR note if (existingCommentId) { // Update existing note - uxLog(this, c.grey("[GitHub integration] Updating Pull Request Comment on GitHub...")); + uxLog("log", this, c.grey("[GitHub Integration] Updating Pull Request Comment on GitHub...")); const githubCommentEditResult = await this.octokit.rest.issues.updateComment({ - owner: this.repoOwner, + owner: this.repoOwner || "", repo: this.repoName, - issue_number: pullRequestId, + issue_number: this.prNumber, comment_id: existingCommentId, body: messageBody, }); @@ -237,11 +242,11 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${githubWo return prResult; } else { // Create new note if no existing not was found - uxLog(this, c.grey("[GitHub integration] Adding Pull Request Comment on GitHub...")); + uxLog("log", this, c.grey("[GitHub Integration] Adding Pull Request Comment on GitHub...")); const githubCommentCreateResult = await this.octokit.rest.issues.createComment({ - owner: this.repoOwner, + owner: this.repoOwner || "", repo: this.repoName, - issue_number: pullRequestId, + issue_number: this.prNumber, body: messageBody, }); const prResult: PullRequestMessageResult = { @@ -252,11 +257,19 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${githubWo } } - private completePullRequestInfo(prData: any) { - const prInfo: any = Object.assign({}, prData); - prInfo.sourceBranch = (prData?.head?.ref || "").replace("refs/heads/", ""); - prInfo.targetBranch = (prData?.base?.ref || "").replace("refs/heads/", ""); - prInfo.description = prData?.body || ""; - return prInfo; + private completePullRequestInfo(prData: any): CommonPullRequestInfo { + const prInfo: CommonPullRequestInfo = { + idNumber: prData?.number || 0, + idStr: String(prData.number || ""), + sourceBranch: (prData?.head?.ref || "").replace("refs/heads/", ""), + targetBranch: (prData?.base?.ref || "").replace("refs/heads/", ""), + title: prData?.title || "", + description: prData?.body || "", + authorName: prData?.user?.login || "", + webUrl: prData?.html_url || "", + providerInfo: prData, + customBehaviors: {} + } + return this.completeWithCustomBehaviors(prInfo); } } diff --git a/src/common/gitProvider/gitlab.ts b/src/common/gitProvider/gitlab.ts index 414888f55..de0176f2e 100644 --- a/src/common/gitProvider/gitlab.ts +++ b/src/common/gitProvider/gitlab.ts @@ -1,18 +1,21 @@ import { Gitlab } from "@gitbeaker/node"; -import * as c from "chalk"; -import { PullRequestMessageRequest, PullRequestMessageResult } from "."; -import { getCurrentGitBranch, git, uxLog } from "../utils"; -import { GitProviderRoot } from "./gitProviderRoot"; +import c from "chalk"; +import { CommonPullRequestInfo, PullRequestMessageRequest, PullRequestMessageResult } from "./index.js"; +import { getCurrentGitBranch, git, uxLog } from "../utils/index.js"; +import { GitProviderRoot } from "./gitProviderRoot.js"; +import { CONSTANTS } from "../../config/index.js"; export class GitlabProvider extends GitProviderRoot { private gitlabApi: InstanceType; + public serverUrl: string; + public token: string; constructor() { super(); // Gitlab URL is always provided by default CI variables - this.serverUrl = process.env.CI_SERVER_URL; + this.serverUrl = process.env.CI_SERVER_URL || ""; // It's better to have a project token defined in a CI_SFDX_HARDIS_GITLAB_TOKEN variable, to have the rights to act on Pull Requests - this.token = process.env.CI_SFDX_HARDIS_GITLAB_TOKEN || process.env.ACCESS_TOKEN; + this.token = process.env.CI_SFDX_HARDIS_GITLAB_TOKEN || process.env.ACCESS_TOKEN || ""; this.gitlabApi = new Gitlab({ host: this.serverUrl, token: this.token, @@ -25,7 +28,10 @@ export class GitlabProvider extends GitProviderRoot { } // Returns current job URL - public async getCurrentJobUrl(): Promise { + public async getCurrentJobUrl(): Promise { + if (process.env.PIPELINE_JOB_URL) { + return process.env.PIPELINE_JOB_URL; + } if (process.env.CI_JOB_URL) { return process.env.CI_JOB_URL; } @@ -33,29 +39,35 @@ export class GitlabProvider extends GitProviderRoot { } // Returns current job URL - public async getCurrentBranchUrl(): Promise { + public async getCurrentBranchUrl(): Promise { if (process.env.CI_PROJECT_URL && process.env.CI_COMMIT_REF_NAME) return `${process.env.CI_PROJECT_URL}/-/tree/${process.env.CI_COMMIT_REF_NAME}`; return null; } + // Gitlab supports mermaid in PR markdown + public async supportsMermaidInPrMarkdown(): Promise { + return true; + } + // Find pull request info - public async getPullRequestInfo(): Promise { + public async getPullRequestInfo(): Promise { // Case when MR is found in the context const projectId = process.env.CI_PROJECT_ID || null; const mrNumber = process.env.CI_MERGE_REQUEST_IID || null; if (mrNumber !== null) { const mergeRequests = await this.gitlabApi.MergeRequests.all({ - projectId: projectId, + projectId: projectId || "", iids: [parseInt(mrNumber)], }); if (mergeRequests.length > 0) { - return this.completePullRequestInfo(mergeRequests[0]); + const mergeRequest = mergeRequests[0]; + return this.completePullRequestInfo(mergeRequest); } } // Case when we find MR from a commit const sha = await git().revparse(["HEAD"]); const latestMergeRequestsOnBranch = await this.gitlabApi.MergeRequests.all({ - projectId: projectId, + projectId: projectId || "", state: "merged", sort: "desc", sha: sha, @@ -67,15 +79,14 @@ export class GitlabProvider extends GitProviderRoot { return this.completePullRequestInfo(candidateMergeRequests[0]); } } - uxLog(this, c.grey(`[Gitlab Integration] Unable to find related Merge Request Info`)); + uxLog("log", this, c.grey(`[Gitlab Integration] Unable to find related Merge Request Info`)); return null; } - - public async getBranchDeploymentCheckId(gitBranch: string): Promise { - let deploymentCheckId = null; + public async getBranchDeploymentCheckId(gitBranch: string): Promise { + let deploymentCheckId: string | null = null; const projectId = process.env.CI_PROJECT_ID || null; const latestMergeRequestsOnBranch = await this.gitlabApi.MergeRequests.all({ - projectId: projectId, + projectId: projectId || "", state: "merged", sort: "desc", targetBranch: gitBranch, @@ -83,28 +94,28 @@ export class GitlabProvider extends GitProviderRoot { if (latestMergeRequestsOnBranch.length > 0) { const latestMergeRequest = latestMergeRequestsOnBranch[0]; const latestMergeRequestId = latestMergeRequest.iid; - deploymentCheckId = await this.getDeploymentIdFromPullRequest(projectId, latestMergeRequestId, deploymentCheckId, latestMergeRequest); + deploymentCheckId = await this.getDeploymentIdFromPullRequest(projectId || "", latestMergeRequestId, deploymentCheckId, this.completePullRequestInfo(latestMergeRequest)); } return deploymentCheckId; } - public async getPullRequestDeploymentCheckId(): Promise { + public async getPullRequestDeploymentCheckId(): Promise { const pullRequestInfo = await this.getPullRequestInfo(); if (pullRequestInfo) { const projectId = process.env.CI_PROJECT_ID || null; - return await this.getDeploymentIdFromPullRequest(projectId, pullRequestInfo.iid, null, pullRequestInfo); + return await this.getDeploymentIdFromPullRequest(projectId || "", pullRequestInfo.idNumber, null, pullRequestInfo); } return null; } - private async getDeploymentIdFromPullRequest(projectId: string, latestMergeRequestId: number, deploymentCheckId: any, latestMergeRequest) { + private async getDeploymentIdFromPullRequest(projectId: string, latestMergeRequestId: number, deploymentCheckId: string | null, latestMergeRequest: CommonPullRequestInfo): Promise { const existingNotes = await this.gitlabApi.MergeRequestNotes.all(projectId, latestMergeRequestId); for (const existingNote of existingNotes) { if (existingNote.body.includes("/gm.exec(existingNote.body); if (matches) { deploymentCheckId = matches[1]; - uxLog(this, c.gray(`Found deployment id ${deploymentCheckId} on MR #${latestMergeRequestId} ${latestMergeRequest.title}`)); + uxLog("error", this, c.grey(`Found deployment id ${deploymentCheckId} on MR #${latestMergeRequestId} ${latestMergeRequest.title}`)); break; } } @@ -118,7 +129,7 @@ export class GitlabProvider extends GitProviderRoot { const projectId = process.env.CI_PROJECT_ID || null; const mergeRequestId = process.env.CI_MERGE_REQUEST_IID || process.env.CI_MERGE_REQUEST_ID || null; if (projectId == null || mergeRequestId == null) { - uxLog(this, c.grey("[Gitlab integration] No project and merge request, so no note posted...")); + uxLog("log", this, c.grey("[Gitlab integration] No project and merge request, so no note posted...")); return { posted: false, providerResult: { info: "No related merge request" } }; } const gitlabCiJobName = process.env.CI_JOB_NAME; @@ -129,7 +140,7 @@ export class GitlabProvider extends GitProviderRoot { ${prMessage.message} -_Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${gitlabCiJobName}](${gitlabCIJobUrl})_ +_Powered by [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT}) from job [${gitlabCiJobName}](${gitlabCIJobUrl})_ `; // Add deployment id if present @@ -137,9 +148,9 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${gitlabCi messageBody += `\n`; } // Check for existing note from a previous run - uxLog(this, c.grey("[Gitlab integration] Listing Notes of Merge Request...")); + uxLog("log", this, c.grey("[Gitlab integration] Listing Notes of Merge Request...")); const existingNotes = await this.gitlabApi.MergeRequestNotes.all(projectId, mergeRequestId); - let existingNoteId = null; + let existingNoteId: number | null = null; for (const existingNote of existingNotes) { if (existingNote.body.includes(``)) { existingNoteId = existingNote.id; @@ -149,7 +160,7 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${gitlabCi // Create or update MR note if (existingNoteId) { // Update existing note - uxLog(this, c.grey("[Gitlab integration] Updating Merge Request Note on Gitlab...")); + uxLog("log", this, c.grey("[Gitlab integration] Updating Merge Request Note on Gitlab...")); const gitlabEditNoteResult = await this.gitlabApi.MergeRequestNotes.edit(projectId, mergeRequestId, existingNoteId, messageBody); const prResult: PullRequestMessageResult = { posted: gitlabEditNoteResult.id > 0, @@ -158,7 +169,7 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${gitlabCi return prResult; } else { // Create new note if no existing not was found - uxLog(this, c.grey("[Gitlab integration] Adding Merge Request Note on Gitlab...")); + uxLog("log", this, c.grey("[Gitlab integration] Adding Merge Request Note on Gitlab...")); const gitlabPostNoteResult = await this.gitlabApi.MergeRequestNotes.create(projectId, mergeRequestId, messageBody); const prResult: PullRequestMessageResult = { posted: gitlabPostNoteResult.id > 0, @@ -168,10 +179,19 @@ _Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${gitlabCi } } - private completePullRequestInfo(prData: any) { - const prInfo: any = Object.assign({}, prData); - prInfo.sourceBranch = (prData?.source_branch || "").replace("refs/heads/", ""); - prInfo.targetBranch = (prData?.target_branch || "").replace("refs/heads/", ""); - return prInfo; + private completePullRequestInfo(prData: any): CommonPullRequestInfo { + const prInfo: CommonPullRequestInfo = { + idNumber: prData?.iid || prData?.id || 0, + idStr: String(prData?.iid || prData?.id || ""), + sourceBranch: (prData?.source_branch || "").replace("refs/heads/", ""), + targetBranch: (prData?.target_branch || "").replace("refs/heads/", ""), + title: prData?.title || "", + description: prData?.description || "", + authorName: prData?.author?.name || "", + webUrl: prData?.web_url || "", + providerInfo: prData, + customBehaviors: {} + } + return this.completeWithCustomBehaviors(prInfo); } } diff --git a/src/common/gitProvider/index.ts b/src/common/gitProvider/index.ts index 1a2d176e9..d563f0811 100644 --- a/src/common/gitProvider/index.ts +++ b/src/common/gitProvider/index.ts @@ -1,16 +1,18 @@ -import * as c from "chalk"; -import { getCurrentGitBranch, isCI, uxLog } from "../utils"; -import { AzureDevopsProvider } from "./azureDevops"; -import { GithubProvider } from "./github"; -import { GitlabProvider } from "./gitlab"; -import { GitProviderRoot } from "./gitProviderRoot"; -import { BitbucketProvider } from "./bitbucket"; +import c from "chalk"; +import { getCurrentGitBranch, isCI, uxLog } from "../utils/index.js"; +import { AzureDevopsProvider } from "./azureDevops.js"; +import { GithubProvider } from "./github.js"; +import { GitlabProvider } from "./gitlab.js"; +import { GitProviderRoot } from "./gitProviderRoot.js"; +import { BitbucketProvider } from "./bitbucket.js"; import Debug from "debug"; -import { getEnvVar } from "../../config"; +import { CONSTANTS, getEnvVar } from "../../config/index.js"; +import { prompts } from "../utils/prompts.js"; +import { removeMermaidLinks } from "../utils/mermaidUtils.js"; const debug = Debug("sfdxhardis"); export abstract class GitProvider { - static getInstance(): GitProviderRoot { + static async getInstance(prompt = false): Promise { // Azure if (process.env.SYSTEM_ACCESSTOKEN) { const serverUrl = process.env.SYSTEM_COLLECTIONURI || null; @@ -18,6 +20,7 @@ export abstract class GitProvider { const token = process.env.CI_SFDX_HARDIS_AZURE_TOKEN || process.env.SYSTEM_ACCESSTOKEN || null; if (serverUrl == null || token == null) { uxLog( + "warning", this, c.yellow(`To benefit from Azure Pipelines advanced integration, you need to define the following variables as ENV vars: - SYSTEM_COLLECTIONURI @@ -32,6 +35,7 @@ export abstract class GitProvider { const token = process.env.CI_SFDX_HARDIS_GITLAB_TOKEN || process.env.ACCESS_TOKEN || null; if (token == null) { uxLog( + "warning", this, c.yellow(`To benefit from Gitlab advanced integration, you need to : - Go to Settings -> Access tokens -> create a project token named "SFDX HARDIS BOT" with developer access and scope "api", then copy its value @@ -50,6 +54,7 @@ export abstract class GitProvider { const token = process.env.CI_SFDX_HARDIS_BITBUCKET_TOKEN || null; if (token == null) { uxLog( + "warning", this, c.yellow(`To benefit from Bitbucket advanced integration, you need to : - Go to Repository Settings -> Access Tokens -> Create a repository access token with the scopes pullrequest, pullrequest:write, repository, repository:write and copy its value @@ -58,8 +63,15 @@ export abstract class GitProvider { return null; } return new BitbucketProvider(); - } else if (isCI) { + } + // If prompt allowed and no vars found, request to user + else if (prompt && !isCI) { + await GitProvider.handleManualGitServerAuth(); + return this.getInstance(false); + } + else if (isCI) { uxLog( + "log", this, c.grey( "To use sfdx-hardis GitProvider capabilities, SYSTEM_ACCESSTOKEN, CI_JOB_TOKEN, GITHUB_TOKEN or CI_SFDX_HARDIS_BITBUCKET_TOKEN must be accessible for Azure Pipelines, Gitlab, GitHub or Bitbucket", @@ -69,20 +81,41 @@ export abstract class GitProvider { return null; } + private static async handleManualGitServerAuth() { + const promptRes = await prompts({ + message: "Please select your Git Service Provider", + description: "Choose your git hosting service to enable CI/CD integration features", + type: "select", + choices: [ + { title: "Azure DevOps", value: "azure" }, + { title: "GitHub", value: "github" }, + { title: "Gitlab", value: "gitlab" }, + { title: "Bitbucket", value: "bitbucket" }, + ] + }); + if (promptRes.value === "azure") { + await AzureDevopsProvider.handleLocalIdentification(); + } + else { + uxLog("warning", this, c.yellow(`[GitProvider] Local authentication is not yet implemented for ${promptRes.value}`)); + } + } + static async managePostPullRequestComment(): Promise { - const gitProvider = GitProvider.getInstance(); + const gitProvider = await GitProvider.getInstance(); if (gitProvider == null) { - uxLog(this, c.yellow("[Git Provider] WARNING: No git provider found to post pull request comment. Maybe you should configure it ?")); + uxLog("warning", this, c.yellow("[Git Provider] WARNING: No git provider found to post pull request comment. Maybe you should configure it ?")); uxLog( + "warning", this, - c.yellow("[Git Provider] See documentation: https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integrations-home/#git-providers"), + c.yellow(`[Git Provider] See documentation: ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integrations-home/#git-providers`), ); return; } const prData = globalThis.pullRequestData; const prCommentSent = globalThis.pullRequestCommentSent || false; if (prData && gitProvider && prCommentSent === false) { - uxLog(this, c.yellow("[Git Provider] Try to post a pull request comment/note...")); + uxLog("warning", this, c.yellow("[Git Provider] Try to post a pull request comment/note...")); let markdownBody = ""; if (prData.deployErrorsMarkdownBody) { markdownBody += prData.deployErrorsMarkdownBody; @@ -93,24 +126,41 @@ export abstract class GitProvider { if (prData.commitsSummary) { markdownBody += "\n\n" + prData.commitsSummary; } + if (prData?.flowDiffMarkdown?.markdownSummary) { + markdownBody += "\n\n" + prData.flowDiffMarkdown.markdownSummary; + } + markdownBody = removeMermaidLinks(markdownBody); // Remove "click" elements that are useless and ugly on some providers :) const prMessageRequest: PullRequestMessageRequest = { - title: prData.title, + title: prData.title || '', message: markdownBody, - status: prData.status, - messageKey: prData.messageKey, + status: prData.status || 'tovalidate', + messageKey: prData.messageKey || '', }; + // Post main message const postResult = await gitProvider.tryPostPullRequestMessage(prMessageRequest); if (postResult && postResult.posted === true) { globalThis.pullRequestCommentSent = true; } + // Post additional comments + for (const flowDiff of prData?.flowDiffMarkdown?.flowDiffMarkdownList || []) { + const flowDiffMessage = removeMermaidLinks(flowDiff.markdown); // Remove "click" elements that are useless and ugly on some providers :) + const prMessageRequestAdditional: PullRequestMessageRequest = { + title: `Differences for Flow ${flowDiff.name}`, + message: flowDiffMessage, + status: "valid", + messageKey: `sfdx-hardis-flow-diff-${flowDiff.name}`, + sourceFile: flowDiff.markdownFile, + }; + await gitProvider.tryPostPullRequestMessage(prMessageRequestAdditional); + } } else { - uxLog(this, c.gray(`${JSON.stringify(prData || { noPrData: "" })} && ${gitProvider} && ${prCommentSent}`)); - uxLog(this, c.yellow("[Git Provider] Skip post pull request comment")); + uxLog("error", this, c.grey(`${JSON.stringify(prData || { noPrData: "" })} && ${gitProvider} && ${prCommentSent}`)); + uxLog("warning", this, c.yellow("[Git Provider] Skip post pull request comment")); } } - static async getDeploymentCheckId(): Promise { - const gitProvider = GitProvider.getInstance(); + static async getDeploymentCheckId(): Promise { + const gitProvider = await GitProvider.getInstance(); if (gitProvider == null) { return null; } @@ -120,44 +170,69 @@ export abstract class GitProvider { return gitProvider.getPullRequestDeploymentCheckId(); } // Classic way: get deployment check Id from latest merged Pull Request - const currentGitBranch = await getCurrentGitBranch(); + const currentGitBranch = await getCurrentGitBranch() || ""; return gitProvider.getBranchDeploymentCheckId(currentGitBranch); } catch (e) { - uxLog(this, c.yellow(`Error while trying to retrieve deployment check id:\n${e.message}`)); + uxLog("warning", this, c.yellow(`Error while trying to retrieve deployment check id:\n${(e as Error).message}`)); return null; } } - static async getCurrentBranchUrl(): Promise { - const gitProvider = GitProvider.getInstance(); + static async getCurrentBranchUrl(): Promise { + const gitProvider = await GitProvider.getInstance(); if (gitProvider == null) { return null; } return gitProvider.getCurrentBranchUrl(); } - static async getJobUrl(): Promise { - const gitProvider = GitProvider.getInstance(); + static async getJobUrl(): Promise { + const gitProvider = await GitProvider.getInstance(); if (gitProvider == null) { return null; } return gitProvider.getCurrentJobUrl(); } - static async getPullRequestInfo(): Promise { - const gitProvider = GitProvider.getInstance(); + static async supportsMermaidInPrMarkdown(): Promise { + const gitProvider = await GitProvider.getInstance(); + if (gitProvider == null) { + return false; + } + return gitProvider.supportsMermaidInPrMarkdown(); + } + + static async supportsSvgAttachments(): Promise { + const gitProvider = await GitProvider.getInstance(); + if (gitProvider == null) { + return false; + } + return gitProvider.supportsSvgAttachments(); + } + + static prInfoCache: any = null; + + static async getPullRequestInfo(options: { useCache: boolean } = { useCache: false }): Promise { + // Return cached result if available and caching is enabled + if (options.useCache && GitProvider.prInfoCache !== null) { + debug("[PR Info] Returning cached pull request info"); + return GitProvider.prInfoCache; + } + + const gitProvider = await GitProvider.getInstance(); if (gitProvider == null) { debug("[PR Info] No GitProvider instance found"); return null; } - let prInfo: any; + let prInfo: CommonPullRequestInfo | null = null; try { prInfo = await gitProvider.getPullRequestInfo(); debug("[GitProvider][PR Info] " + JSON.stringify(prInfo, null, 2)); + GitProvider.prInfoCache = prInfo; } catch (e) { - uxLog(this, c.yellow("[GitProvider] Unable to get Pull Request info: " + e.message)); - uxLog(this, c.yellow(`[GitProvider] Maybe you misconfigured your ${gitProvider.getLabel()} ?`)); - uxLog(this, c.yellow(`[GitProvider] See https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integrations-home/#git-providers`)); + uxLog("warning", this, c.yellow("[GitProvider] Unable to get Pull Request info: " + (e as Error).message)); + uxLog("warning", this, c.yellow(`[GitProvider] Maybe you misconfigured your ${gitProvider.getLabel()} ?`)); + uxLog("warning", this, c.yellow(`[GitProvider] See ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integrations-home/#git-providers`)); prInfo = null; } return prInfo; @@ -167,6 +242,108 @@ export abstract class GitProvider { const deployBeforeMerge = getEnvVar("SFDX_HARDIS_DEPLOY_BEFORE_MERGE") || false; return [true, "true"].includes(deployBeforeMerge); } + + static getMergeRequestName(gitUrl: string): string { + if (gitUrl.includes("gitlab")) { + return "Merge Request"; + } + // Default fallback + return "Pull Request"; + } + + static getMergeRequestCreateUrl(gitUrl: string, targetBranch: string, sourceBranch: string): string | null { + const gitUrlHttp = gitUrl.replace(".git", "").trim(); + // GitLab + if (gitUrlHttp.includes("gitlab")) { + // https://gitlab.com/group/project/-/merge_requests/new?merge_request[source_branch]=feature&merge_request[target_branch]=main + return `${gitUrlHttp}/-/merge_requests/new?merge_request[source_branch]=${encodeURIComponent(sourceBranch)}&merge_request[target_branch]=${encodeURIComponent(targetBranch)}`; + } + // GitHub + if (gitUrlHttp.includes("github") || gitUrlHttp.includes("ghe.com")) { + // https://github.com/org/repo/compare/main...feature?expand=1 + return `${gitUrlHttp}/compare/${encodeURIComponent(targetBranch)}...${encodeURIComponent(sourceBranch)}?expand=1`; + } + // Gitea (common pattern) + if (gitUrlHttp.includes("gitea")) { + // https://gitea.example.com/org/repo/compare/main...feature + return `${gitUrlHttp}/compare/${encodeURIComponent(targetBranch)}...${encodeURIComponent(sourceBranch)}`; + } + // Azure DevOps + if (gitUrlHttp.includes("dev.azure.com")) { + // https://dev.azure.com/org/project/_git/repo/pullrequestcreate?sourceRef=feature&targetRef=main + // Try to extract the repo path after _git/ + const match = gitUrlHttp.match(/dev\.azure\.com\/([^/]+)\/([^/]+)\/_git\/([^/]+)/); + if (match) { + const org = match[1]; + const project = match[2]; + const repo = match[3]; + return `https://dev.azure.com/${org}/${project}/_git/${repo}/pullrequestcreate?sourceRef=${encodeURIComponent(sourceBranch)}&targetRef=${encodeURIComponent(targetBranch)}`; + } + } + // Bitbucket (cloud) + if (gitUrlHttp.includes("bitbucket.org")) { + // https://bitbucket.org/org/repo/pull-requests/new?source=feature&dest=main + return `${gitUrlHttp}/pull-requests/new?source=${encodeURIComponent(sourceBranch)}&dest=${encodeURIComponent(targetBranch)}`; + } + // Bitbucket (server/DC) + if (gitUrlHttp.includes("/scm/")) { + // e.g. http://bitbucket.example.com/scm/project/repo + // https://bitbucket.example.com/projects/PROJECT/repos/REPO/pull-requests?create&sourceBranch=feature&targetBranch=main + const match = gitUrlHttp.match(/\/scm\/([^/]+)\/([^/]+)/); + if (match) { + const project = match[1]; + const repo = match[2]; + return gitUrlHttp.replace(/\/scm\/[^/]+\/[^/]+$/, `/projects/${project.toUpperCase()}/repos/${repo}/pull-requests?create&sourceBranch=${encodeURIComponent(sourceBranch)}&targetBranch=${encodeURIComponent(targetBranch)}`); + } + } + // Fallback: just return null + return null; + } +} + +export declare type CommonPullRequestInfo = { + idNumber: number; + idStr: string; + targetBranch: string; + sourceBranch: string; + title: string; + description: string; + authorName: string; + webUrl: string; + customBehaviors: { + noDeltaDeployment?: boolean, + purgeFlowVersions?: boolean, + destructiveChangesAfterDeployment?: boolean + } + providerInfo: any +} + +export declare type PullRequestData = { + messageKey: string; + title: string; + deployErrorsMarkdownBody?: string; + codeCoverageMarkdownBody?: string; + commitsSummary?: string; + deployStatus?: "valid" | "invalid" | "unknown"; + status?: "valid" | "invalid" | "tovalidate"; + flowDiffMarkdown?: { + markdownSummary?: string; + flowDiffMarkdownList?: Array<{ + name: string; + markdown: string; + markdownFile?: string; + }>; + }; +} + +// Global type augmentation for globalThis +declare global { + // eslint-disable-next-line no-var + var pullRequestData: Partial | undefined; + // eslint-disable-next-line no-var + var pullRequestCommentSent: boolean | undefined; + // eslint-disable-next-line no-var + var pullRequestDeploymentId: string | undefined; } export declare type PullRequestMessageRequest = { @@ -174,6 +351,7 @@ export declare type PullRequestMessageRequest = { message: string; messageKey: string; status: "valid" | "invalid" | "tovalidate"; + sourceFile?: string; }; export declare type PullRequestMessageResult = { diff --git a/src/common/gitProvider/utilsMarkdown.ts b/src/common/gitProvider/utilsMarkdown.ts index 76a2c4544..e07cea602 100644 --- a/src/common/gitProvider/utilsMarkdown.ts +++ b/src/common/gitProvider/utilsMarkdown.ts @@ -1,14 +1,23 @@ +import c from "chalk"; +import fs from "fs-extra" +import * as path from "path" +import { MetadataUtils } from "../metadata-utils/index.js"; +import { uxLog } from "../utils/index.js"; +import { generateFlowVisualGitDiff } from "../utils/mermaidUtils.js"; +import { GitProvider } from "./index.js"; + export function deployErrorsToMarkdown(errorsAndTips: Array) { let md = "## Deployment errors\n\n"; for (const err of errorsAndTips) { - const errorMessage = err.error.message.trim().includes("Error ") - ? err.error.message - .trim() - .replace("Error ", "") - .replace(" ", "
") - .trim() - .replace(/(.*)/gm, `$1 `) - : err.error.message.trim(); + const errorMessage = (err as any)?.error?.message?.trim().includes("Error ") + ? (err as any)?.error?.message + .trim() + .replace("| Error ", "") + .replace("Error ", "") + .replace(" ", "
") + .trim() + .replace(/(.*)/gm, `$1 `) + : (err as any)?.error?.message?.trim() || "WE SHOULD NOT GO THERE: PLEASE DECLARE AN ISSUE"; // sfdx-hardis tip if (err.tip) { const aiText = err?.tipFromAi?.promptResponse @@ -16,9 +25,9 @@ export function deployErrorsToMarkdown(errorsAndTips: Array) { : err?.tipFromAi?.promptText ? getAiPromptTextMarkdown("Get prompt for AI", err.tipFromAi.promptText) : ""; - md += `
🛠️ ${errorMessage} + md += `
⛔ ${errorMessage} -_${err.tip.label}_ +_[**✏️ ${err.tip.label}**](${err.tip.docUrl || "https://sfdx-hardis.cloudity.com/salesforce-deployment-assistant-home/"})_ ${err.tip.message.replace(/:\n-/gm, `:\n\n-`)} ${aiText} @@ -68,8 +77,76 @@ export function deployCodeCoverageToMarkdown(orgCoverage: number, orgCoverageTar } } +export function mdTableCell(str: string) { + if (!str) { + return "" + } + return str.replace(/\n/gm, "
").replace(/\|/gm, ""); +} + +export async function flowDiffToMarkdownForPullRequest(flowNames: string[], fromCommit: string, toCommit: string, truncatedNb: number = 0): Promise { + if (flowNames.length === 0) { + return ""; + } + const supportsMermaidInPrMarkdown = await GitProvider.supportsMermaidInPrMarkdown(); + const supportsSvgAttachments = await GitProvider.supportsSvgAttachments(); + const flowDiffMarkdownList: any = []; + let flowDiffFilesSummary = "## Flow changes\n\n"; + for (const flowName of flowNames) { + flowDiffFilesSummary += `- [${flowName}](#${flowName})\n`; + const fileMetadata = await MetadataUtils.findMetaFileFromTypeAndName("Flow", flowName); + try { + // Markdown with pure mermaidJs + if (supportsMermaidInPrMarkdown) { + await generateDiffMarkdownWithMermaid(fileMetadata, fromCommit, toCommit, flowDiffMarkdownList, flowName); + } + // Markdown with Mermaid converted as SVG + else if (supportsSvgAttachments) { + await generateDiffMarkdownWithSvg(fileMetadata, fromCommit, toCommit, flowDiffMarkdownList, flowName); + } + // Markdown with images converted as PNG + else { + await generateDiffMarkdownWithPng(fileMetadata, fromCommit, toCommit, flowDiffMarkdownList, flowName); + } + } catch (e: any) { + uxLog("warning", this, c.yellow(`[FlowGitDiff] Unable to generate Flow diff for ${flowName}: ${e.message}`)); + } + } + if (truncatedNb > 0) { + flowDiffFilesSummary += `\n\n:warning: _${truncatedNb} Flows have been truncated_\n\n`; + } + return { + markdownSummary: flowDiffFilesSummary, + flowDiffMarkdownList: flowDiffMarkdownList + } +} + +async function generateDiffMarkdownWithMermaid(fileMetadata: string | null, fromCommit: string, toCommit: string, flowDiffMarkdownList: any, flowName: string) { + const { outputDiffMdFile, hasFlowDiffs } = await generateFlowVisualGitDiff(fileMetadata, fromCommit, toCommit, { mermaidMd: true, svgMd: false, pngMd: false, debug: false }); + if (outputDiffMdFile && hasFlowDiffs) { + const flowDiffMarkdownMermaid = await fs.readFile(outputDiffMdFile.replace(".md", ".mermaid.md"), "utf8"); + flowDiffMarkdownList.push({ name: flowName, markdown: flowDiffMarkdownMermaid, markdownFile: outputDiffMdFile }); + } +} + +async function generateDiffMarkdownWithSvg(fileMetadata: string | null, fromCommit: string, toCommit: string, flowDiffMarkdownList: any, flowName: string) { + const { outputDiffMdFile, hasFlowDiffs } = await generateFlowVisualGitDiff(fileMetadata, fromCommit, toCommit, { mermaidMd: true, svgMd: true, pngMd: false, debug: false }); + if (outputDiffMdFile && hasFlowDiffs && fs.existsSync(outputDiffMdFile)) { + const flowDiffMarkdownWithSvg = await fs.readFile(outputDiffMdFile, "utf8"); + flowDiffMarkdownList.push({ name: flowName, markdown: flowDiffMarkdownWithSvg, markdownFile: outputDiffMdFile }); + } +} + +async function generateDiffMarkdownWithPng(fileMetadata: string | null, fromCommit: string, toCommit: string, flowDiffMarkdownList: any, flowName: string) { + const { outputDiffMdFile, hasFlowDiffs } = await generateFlowVisualGitDiff(fileMetadata, fromCommit, toCommit, { mermaidMd: true, svgMd: false, pngMd: true, debug: false }); + if (outputDiffMdFile && hasFlowDiffs && fs.existsSync(outputDiffMdFile)) { + const flowDiffMarkdownWithPng = await fs.readFile(outputDiffMdFile, "utf8"); + flowDiffMarkdownList.push({ name: flowName, markdown: flowDiffMarkdownWithPng, markdownFile: outputDiffMdFile }); + } +} + function getAiPromptResponseMarkdown(title, message) { - return `
🤖 ${title} 🤖 + return `
🤖 ${title} _AI Deployment Assistant tip (not verified !)_ @@ -89,3 +166,37 @@ ${message.replace(/:\n-/gm, `:\n\n-`)}
`; } + +export function extractImagesFromMarkdown(markdown: string, sourceFile: string | null): any[] { + let sourceFilePath = ""; + if (sourceFile && fs.existsSync(sourceFile)) { + sourceFilePath = path.dirname(sourceFile) + } + const imageRegex = /!\[.*?\]\((.*?)\)/gm; + const matches = Array.from(markdown.matchAll(imageRegex)); + return matches.map((match) => match[1]).filter(file => { + if (fs.existsSync(file)) { + return true; + } + else if (fs.existsSync(path.join(sourceFilePath, file))) { + return true; + } + uxLog("warning", this, c.yellow(`[Markdown] Image file not found: ${file} or ${path.join(sourceFilePath, file)}`)); + return false; + }).map(file => { + if (fs.existsSync(file)) { + return { name: file, path: file }; + } + else if (fs.existsSync(path.join(sourceFilePath, file))) { + return { name: file, path: path.join(sourceFilePath, file) }; + } + return {}; + }); +} + +export function replaceImagesInMarkdown(markdown: string, replacements: any): string { + for (const replacedImage of Object.keys(replacements)) { + markdown = markdown.replaceAll(replacedImage, replacements[replacedImage]); + } + return markdown; +} \ No newline at end of file diff --git a/src/common/keyValueProviders/keyValueXyz.ts b/src/common/keyValueProviders/keyValueXyz.ts deleted file mode 100644 index 639e62244..000000000 --- a/src/common/keyValueProviders/keyValueXyz.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { SfdxError } from "@salesforce/core"; -import axios from "axios"; -import * as c from "chalk"; -import { getConfig, setConfig } from "../../config"; -import { uxLog } from "../utils"; -import { KeyValueProviderInterface } from "../utils/keyValueUtils"; -import { prompts } from "../utils/prompts"; - -export class KeyValueXyzProvider implements KeyValueProviderInterface { - name = "keyvalue.xyz"; - description = "keyvalue.xyz external service (api token, no auth). Seems down for now."; - keyValueUrl = null; - - async initialize() { - await this.manageKeyValueXyzAuth(null); - return this.keyValueUrl !== null; - } - - async getValue(key: string | null = null) { - await this.manageKeyValueXyzAuth(key); - const response = await axios({ - method: "get", - url: this.keyValueUrl, - responseType: "json", - }); - return response.status === 200 ? response.data || {} : null; - } - - async setValue(key: string | null = null, value: any) { - await this.manageKeyValueXyzAuth(key); - await axios({ - method: "post", - url: this.keyValueUrl, - responseType: "json", - data: value, - }); - return true; - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async updateActiveScratchOrg(_scratchOrg: any, _keyValues: any) { - return null; - } - - async manageKeyValueXyzAuth(key: string | null = null) { - if (this.keyValueUrl == null) { - const config = await getConfig("user"); - const apiKey = config.keyValueXyzApiKey || process.env.KEY_VALUE_XYZ_API_KEY; - if (apiKey === null) { - throw new SfdxError(c.red("You need to define a keyvalue.xyz apiKey in config.keyValueXyzApiKey or env var KEY_VALUE_XYZ_API_KEY")); - } - if (key === null) { - const projectName = config.projectName || "default"; - key = `pool_${projectName}`; - } - this.keyValueUrl = `https://api.keyvalue.xyz/${apiKey}/${key}`; - uxLog(this, c.grey("keyvalue.xyz url: " + this.keyValueUrl)); - } - } - - async userSetup() { - const config = await getConfig("user"); - const projectName = config.projectName || "default"; - const keyValueUrl = `https://api.keyvalue.xyz/new/pool_${projectName}`; - const resp = await axios({ - method: "post", - url: keyValueUrl, - responseType: "json", - }); - const keyValueXyzApiKey = resp.data; - await setConfig("user", { keyValueXyzApiKey: keyValueXyzApiKey }); - uxLog(this, c.cyan("Created new keyvalue.xyz API key and stored in local untracked config")); - uxLog(this, c.yellow(`In CI config, set protected variable ${c.bold(c.green("KEY_VALUE_XYZ_API_KEY = " + keyValueXyzApiKey))}`)); - return true; - } - - async userAuthenticate() { - const config = await getConfig("user"); - const response = await prompts([ - { - type: "text", - name: "keyValueXyzApiKey", - message: c.cyanBright("Please input keyvalue.xyz API KEY (ask the value to your tech lead or look in CI variable KEY_VALUE_XYZ_API_KEY )"), - initial: config.keyValueXyzApiKey || null, - }, - ]); - await setConfig("user", { keyValueXyzApiKey: response.keyValueXyzApiKey }); - return true; - } -} diff --git a/src/common/keyValueProviders/kvdbIo.ts b/src/common/keyValueProviders/kvdbIo.ts deleted file mode 100644 index 3456aa534..000000000 --- a/src/common/keyValueProviders/kvdbIo.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { SfdxError } from "@salesforce/core"; -import axios from "axios"; -import * as c from "chalk"; -import * as crypto from "crypto"; -import { getConfig, setConfig } from "../../config"; -import { uxLog } from "../utils"; -import { KeyValueProviderInterface } from "../utils/keyValueUtils"; -import { setPoolStorage } from "../utils/poolUtils"; -import { prompts } from "../utils/prompts"; - -export class KvdbIoProvider implements KeyValueProviderInterface { - name = "kvdb.io"; - description = "kvdb.io external service (api token, auth with Bearer). Requires paid plan, or renewing config every 2 weeks"; - kvdbIoUrl = null; - kvdbIoSecretKey = null; - - async initialize() { - await this.manageKvdbIoAuth(null); - return this.kvdbIoUrl !== null && this.kvdbIoSecretKey !== null; - } - - async getValue(key: string | null = null) { - await this.manageKvdbIoAuth(key); - const response = await axios({ - method: "get", - url: this.kvdbIoUrl, - responseType: "json", - headers: { - Authorization: "Bearer " + this.kvdbIoSecretKey, - }, - }); - return response.status === 200 ? response.data || {} : null; - } - - async setValue(key: string | null = null, value: any) { - await this.manageKvdbIoAuth(key); - const resp = await axios({ - method: "post", - url: this.kvdbIoUrl, - responseType: "json", - data: JSON.stringify(value), - headers: { - Authorization: "Bearer " + this.kvdbIoSecretKey, - }, - }); - return resp.status === 200; - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async updateActiveScratchOrg(_scratchOrg: any, _keyValues: any) { - return null; - } - - async manageKvdbIoAuth(key: string | null = null) { - if (this.kvdbIoUrl == null) { - const config = await getConfig("user"); - const kvdbIoBucketId = config.kvdbIoBucketId || process.env.KVDB_IO_BUCKET_ID; - if (kvdbIoBucketId == null) { - throw new SfdxError(c.red("You need to define an kvdb.io apiKey in config.kvdbIoBucketId or CI env var KVDB_IO_BUCKET_ID")); - } - const kvdbIoSecretKey = config.kvdbIoSecretKey || process.env.KVDB_IO_SECRET_KEY; - if (kvdbIoSecretKey == null) { - throw new SfdxError(c.red("You need to define an kvdb.io secretKey in config.kvdbIoSecretKey or CI env var KVDB_IO_SECRET_KEY")); - } - if (key == null) { - const projectName = config.projectName || "default"; - key = `pool_${projectName}`; - } - this.kvdbIoUrl = `https://kvdb.io/${kvdbIoBucketId}/${key}`; - this.kvdbIoSecretKey = kvdbIoSecretKey; - uxLog(this, c.grey("kvdb.io url: " + this.kvdbIoUrl)); - } - } - - async userSetup() { - const config = await getConfig("user"); - const projectName = config.projectName || "default"; - const randomSecretKey = crypto.randomBytes(48).toString("hex"); - const kvdbIoUrl = `https://kvdb.io/`; - const resp = await axios({ - method: "post", - url: kvdbIoUrl, - responseType: "json", - data: { - email: `${projectName}@hardis-scratch-org-pool.com`, - secret_key: randomSecretKey, - }, - }); - const kvdbIoBucketId = resp.data; - await setConfig("user", { kvdbIoSecretKey: randomSecretKey, kvdbIoBucketId: kvdbIoBucketId }); - await setPoolStorage({}); - uxLog(this, c.cyan("Created new kvdb.io bucket and stored in local untracked config")); - uxLog( - this, - c.yellow( - `In future CI config, set protected variables ${c.bold(c.green("KVDB_IO_SECRET_KEY = " + randomSecretKey))} and ${c.bold( - c.green("KVDB_IO_BUCKET_ID = " + kvdbIoBucketId), - )}`, - ), - ); - return true; - } - - async userAuthenticate() { - const config = await getConfig("user"); - const response = await prompts([ - { - type: "text", - name: "kvdbIoBucketId", - message: c.cyanBright("Please input kvdb.io BUCKET ID (ask the value to your tech lead or look in CI variable KVDB_IO_BUCKET_ID )"), - initial: config.kvdbIoSecretKey || null, - }, - { - type: "text", - name: "kvdbIoSecretKey", - message: c.cyanBright("Please input kvdb.io BUCKET SECRET KEY (ask the value to your tech lead or look in CI variable KVDB_IO_SECRET_KEY )"), - initial: config.kvdbIoSecretKey || null, - }, - ]); - await setConfig("user", { kvdbIoBucketId: response.kvdbIoBucketId, kvdbIoSecretKey: response.kvdbIoSecretKey }); - return true; - } -} diff --git a/src/common/keyValueProviders/localtest.ts b/src/common/keyValueProviders/localtest.ts index 50c8343e8..0cb87c62d 100644 --- a/src/common/keyValueProviders/localtest.ts +++ b/src/common/keyValueProviders/localtest.ts @@ -1,15 +1,15 @@ -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import * as os from "os"; -import { getConfig } from "../../config"; -import { uxLog } from "../utils"; -import { KeyValueProviderInterface } from "../utils/keyValueUtils"; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import * as os from 'os'; +import { getConfig } from '../../config/index.js'; +import { uxLog } from '../utils/index.js'; +import { KeyValueProviderInterface } from '../utils/keyValueUtils.js'; export class LocalTestProvider implements KeyValueProviderInterface { - name = "localtest"; - description = "Writes in a local file (just for tests, can not work in CI)"; - poolStorageLocalFileName = null; + name = 'localtest'; + description = 'Writes in a local file (just for tests, can not work in CI)'; + poolStorageLocalFileName: string | null = null; async initialize() { await this.managePoolStorageLocalFileName(); @@ -18,15 +18,15 @@ export class LocalTestProvider implements KeyValueProviderInterface { async getValue(key: string | null = null) { await this.managePoolStorageLocalFileName(key); - if (fs.existsSync(this.poolStorageLocalFileName)) { - return fs.readJsonSync(this.poolStorageLocalFileName); + if (fs.existsSync(this.poolStorageLocalFileName || '')) { + return fs.readJsonSync(this.poolStorageLocalFileName || ''); } return {}; } async setValue(key: string | null = null, value: any) { await this.managePoolStorageLocalFileName(key); - await fs.writeFile(this.poolStorageLocalFileName, JSON.stringify(value, null, 2), "utf8"); + await fs.writeFile(this.poolStorageLocalFileName || '', JSON.stringify(value, null, 2), 'utf8'); return true; } @@ -38,12 +38,12 @@ export class LocalTestProvider implements KeyValueProviderInterface { async managePoolStorageLocalFileName(key: string | null = null) { if (this.poolStorageLocalFileName == null) { if (key === null) { - const config = await getConfig("user"); - const projectName = config.projectName || "default"; + const config = await getConfig('user'); + const projectName = config.projectName || 'default'; key = `pool_${projectName}`; } this.poolStorageLocalFileName = path.join(os.homedir(), `poolStorage_${key}.json`); - uxLog(this, c.grey("Local test storage file: " + this.poolStorageLocalFileName)); + uxLog("log", this, c.grey('Local test storage file: ' + this.poolStorageLocalFileName)); } } diff --git a/src/common/keyValueProviders/redis.ts b/src/common/keyValueProviders/redis.ts deleted file mode 100644 index 9b67f6594..000000000 --- a/src/common/keyValueProviders/redis.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { SfdxError } from "@salesforce/core"; -import * as Keyv from "keyv"; -import * as c from "chalk"; -import { getConfig, setConfig } from "../../config"; -import { uxLog } from "../utils"; -import { KeyValueProviderInterface } from "../utils/keyValueUtils"; -import { setPoolStorage } from "../utils/poolUtils"; -import { prompts } from "../utils/prompts"; - -export class RedisProvider implements KeyValueProviderInterface { - name = "redis"; - description = "redis external service (redis secure db authentication)"; - keyv = null; - redisKey = null; - authError = false; - - async initialize() { - await this.manageRedisAuth("init"); - const connectionOk = this.keyv !== null; - await this.disconnectRedis(); - return connectionOk; - } - - async getValue(key: string | null = null) { - await this.manageRedisAuth(key); - const value = await this.keyv.get(this.redisKey); - await this.disconnectRedis(); - return value; - } - - async setValue(key: string | null = null, value: any) { - await this.manageRedisAuth(key); - await this.keyv.set(this.redisKey, value); - await this.disconnectRedis(); - return true; - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async updateActiveScratchOrg(_scratchOrg: any, _keyValues: any) { - return null; - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async manageRedisAuth(key: string | null = null) { - if (this.keyv == null) { - const config = await getConfig("user"); - const redisAuthUrl = config.redisAuthUrl || process.env.REDIS_AUTH_URL; - if (redisAuthUrl == null) { - throw new SfdxError(c.red("You need to define an redis auth URL config.redisAuthUrl or CI env var REDIS_AUTH_URL")); - } - if (this.redisKey == null) { - const projectName = config.projectName || "default"; - this.redisKey = `pool${projectName}`; - } - this.keyv = new Keyv(redisAuthUrl, { disable_resubscribing: true, autoResubscribe: false, maxRetriesPerRequest: 10 }); - this.keyv.on("error", (err) => { - uxLog(this, "[pool]" + c.red("Redis connection Error :" + err)); - }); - uxLog(this, c.grey("[pool] Requested redis connection")); - } - } - - async disconnectRedis() { - if (this.keyv?.opts?.store?.redis) { - // Kill redis connection - this.keyv.opts.store.redis.disconnect(); - this.keyv = null; - } - } - - async userSetup() { - const config = await getConfig("user"); - const projectName = config.projectName || "default"; - - uxLog(this, c.cyan(`You need a redis account. You can create one for free at ${c.bold("https://redis.com/try-free/")}`)); - uxLog( - this, - c.cyan("Create a database that you can name scratchPool, then build auth URL by appending default user password and public endpoint"), - ); - const response = await prompts([ - { - type: "text", - name: "redisAuthUrl", - message: c.cyanBright("Please enter authentication URL for Redis remote database"), - initial: config.redisAuthUrl || null, - }, - ]); - const redisAuthUrl = response.redisAuthUrl; - await setConfig("user", { redisAuthUrl: redisAuthUrl }); - await setPoolStorage({}); - uxLog(this, c.cyan(`Initialized scratch org pool storage for ${projectName} on Redis`)); - uxLog(this, c.yellow(`In CI config, set protected variable ${c.bold(c.green("REDIS_AUTH_URL = " + redisAuthUrl))}}`)); - return true; - } - - async userAuthenticate() { - const config = await getConfig("user"); - const response = await prompts([ - { - type: "text", - name: "redisAuthUrl", - message: c.cyanBright("Please enter authentication URL for Redis remote database"), - initial: config.redisAuthUrl || null, - }, - ]); - const redisAuthUrl = response.redisAuthUrl; - await setConfig("user", { redisAuthUrl: redisAuthUrl }); - return true; - } -} diff --git a/src/common/keyValueProviders/salesforce.ts b/src/common/keyValueProviders/salesforce.ts index fd5c0388b..0284f379f 100644 --- a/src/common/keyValueProviders/salesforce.ts +++ b/src/common/keyValueProviders/salesforce.ts @@ -1,21 +1,21 @@ import { Connection } from "@salesforce/core"; -import * as c from "chalk"; +import c from "chalk"; import * as he from "he"; import * as path from "path"; -import { getConfig } from "../../config"; -import { PACKAGE_ROOT_DIR } from "../../settings"; -import { uxLog } from "../utils"; -import { soqlQuery } from "../utils/apiUtils"; -import { deployMetadatas } from "../utils/deployUtils"; -import { KeyValueProviderInterface } from "../utils/keyValueUtils"; -import { setPoolStorage } from "../utils/poolUtils"; +import { getConfig } from "../../config/index.js"; +import { PACKAGE_ROOT_DIR } from "../../settings.js"; +import { uxLog } from "../utils/index.js"; +import { soqlQuery } from "../utils/apiUtils.js"; +import { deployMetadatas } from "../utils/deployUtils.js"; +import { KeyValueProviderInterface } from "../utils/keyValueUtils.js"; +import { setPoolStorage } from "../utils/poolUtils.js"; export class SalesforceProvider implements KeyValueProviderInterface { name = "salesforce"; description = "Use a custom object on a Salesforce org (usually DevHub) to store scratch org pool tech info"; - conn: Connection = null; - recordName = null; + conn: Connection | null = null; + recordName: string | null = null; async initialize(options) { await this.manageSfdcOrgAuth(options); @@ -24,6 +24,9 @@ export class SalesforceProvider implements KeyValueProviderInterface { // eslint-disable-next-line @typescript-eslint/no-unused-vars async getValue(_key: string | null = null) { + if (!this.conn) { + return "ERROR"; + } await this.manageSfdcOrgAuth(); // Single record upsert const queryRes = await soqlQuery( @@ -40,6 +43,9 @@ export class SalesforceProvider implements KeyValueProviderInterface { // eslint-disable-next-line @typescript-eslint/no-unused-vars async setValue(_key: string | null = null, value: any) { await this.manageSfdcOrgAuth(); + if (!this.conn) { + return false; + } // Single record upsert const queryRes = await soqlQuery( `SELECT Id,Name,ValueText__c FROM SfdxHardisKeyValueStore__c WHERE Name='${this.recordName}' LIMIT 1`, @@ -63,6 +69,9 @@ export class SalesforceProvider implements KeyValueProviderInterface { } async updateActiveScratchOrg(scratchOrg: any, keyValues: any) { + if (!this.conn) { + return; + } const orgId = scratchOrg?.scratchOrgInfo?.orgId ? scratchOrg.scratchOrgInfo.orgId.slice(0, 15) : scratchOrg.Id.slice(0, 15); const activeScratchOrg: any = await this.conn.sobject("ActiveScratchOrg").findOne({ ScratchOrg: orgId }, { Id: true, Description: true }); keyValues.Id = activeScratchOrg.Id; @@ -88,11 +97,11 @@ export class SalesforceProvider implements KeyValueProviderInterface { try { await deployMetadatas({ deployDir: path.join(path.join(PACKAGE_ROOT_DIR, "defaults/utils/sfdxHardisKeyValueStore", ".")), - soap: true, - targetUsername: options.devHubConn.options.authInfo.fields.username, + targetUsername: options.devHubConn.options.authInfo.username, }); } catch (e) { uxLog( + "error", this, c.red(`Unable to deploy CustomObject SfdxHardisKeyValueStore__c You mut create manually an Custom Object SfdxHardisKeyValueStore__c: @@ -100,7 +109,7 @@ You mut create manually an Custom Object SfdxHardisKeyValueStore__c: - Field SfdxHardisKeyValueStore__c.ValueText__c of type TextArea (long) (with maximum size 131072 chars) `), ); - uxLog(this, c.yellow("You may have to create a Permission Set with all rights on SfdxHardisKeyValueStore__c and assign users to it")); + uxLog("warning", this, c.yellow("You may have to create a Permission Set with all rights on SfdxHardisKeyValueStore__c and assign users to it")); throw e; } // Initialize storage @@ -108,10 +117,10 @@ You mut create manually an Custom Object SfdxHardisKeyValueStore__c: await setPoolStorage({}, options); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { - uxLog(this, c.yellow("You may have to create a Permission Set with all rights on SfdxHardisKeyValueStore__c and assign users to it")); + uxLog("warning", this, c.yellow("You may have to create a Permission Set with all rights on SfdxHardisKeyValueStore__c and assign users to it")); } - uxLog(this, c.green("Created KeyValue storage on Salesforce org")); + uxLog("success", this, c.green("Created KeyValue storage on Salesforce org")); return true; } diff --git a/src/common/metadata-utils/index.ts b/src/common/metadata-utils/index.ts index 4d7e4fabc..172f2ee4d 100644 --- a/src/common/metadata-utils/index.ts +++ b/src/common/metadata-utils/index.ts @@ -1,401 +1,56 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import * as extractZip from "extract-zip"; -import * as fs from "fs-extra"; -import * as path from "path"; -import * as sortArray from "sort-array"; -import { elapseEnd, elapseStart, execCommand, execSfdxJson, filterPackageXml, git, isGitRepo, uxLog } from "../../common/utils"; -import { CONSTANTS } from "../../config"; -import { PACKAGE_ROOT_DIR } from "../../settings"; -import { getCache, setCache } from "../cache"; -import { buildOrgManifest } from "../utils/deployUtils"; -import { listMajorOrgs } from "../utils/orgConfigUtils"; -import { isSfdxProject } from "../utils/projectUtils"; -import { prompts } from "../utils/prompts"; -import { parsePackageXmlFile } from "../utils/xmlUtils"; -import { listMetadataTypes } from "./metadataList"; -import { FileStatusResult } from "simple-git"; +import { SfError } from '@salesforce/core'; +import c from 'chalk'; +import extractZip from 'extract-zip'; +import fs from 'fs-extra'; +import * as path from 'path'; +import sortArray from 'sort-array'; +import { + elapseEnd, + elapseStart, + execCommand, + execSfdxJson, + filterPackageXml, + git, + isGitRepo, + sortCrossPlatform, + uxLog, +} from '../../common/utils/index.js'; +import { getApiVersion } from '../../config/index.js'; +import { PACKAGE_ROOT_DIR } from '../../settings.js'; +import { getCache, setCache } from '../cache/index.js'; +import { buildOrgManifest } from '../utils/deployUtils.js'; +import { listMajorOrgs } from '../utils/orgConfigUtils.js'; +import { GLOB_IGNORE_PATTERNS, isSfdxProject } from '../utils/projectUtils.js'; +import { prompts } from '../utils/prompts.js'; +import { parsePackageXmlFile } from '../utils/xmlUtils.js'; +import { listMetadataTypes } from './metadataList.js'; +import { FileStatusResult } from 'simple-git'; +import { glob } from 'glob'; class MetadataUtils { // Describe packageXml <=> metadata folder correspondance - public static describeMetadataTypes() { - // folder is the corresponding folder in metadatas folder - // nameSuffixList are the files and/or folder names , built from the name of the package.xml item ( in ) - - const metadataTypesDescription = { - // Metadatas to use for copy - ApexClass: { - folder: "classes", - nameSuffixList: [".cls", ".cls-meta.xml"], - sfdxNameSuffixList: [".cls", "-meta.xml"], - permissionSetTypeName: "classAccesses", - permissionSetMemberName: "apexClass", - }, - ApexComponent: { - folder: "components", - nameSuffixList: [".component", ".component-meta.xml"], - sfdxNameSuffixList: [".component", ".component-meta.xml"], - }, - ApexPage: { - folder: "pages", - nameSuffixList: [".page", ".page-meta.xml"], - sfdxNameSuffixList: [".page", "-meta.xml"], - permissionSetTypeName: "pageAccesses", - permissionSetMemberName: "apexPage", - }, - ApexTrigger: { - folder: "triggers", - nameSuffixList: [".trigger", ".trigger-meta.xml"], - sfdxNameSuffixList: [".trigger", "-meta.xml"], - }, - ApprovalProcess: { - folder: "approvalProcesses", - nameSuffixList: [".approvalProcess"], - sfdxNameSuffixList: [".approvalProcess-meta.xml"], - }, - AuraDefinitionBundle: { - folder: "aura", - nameSuffixList: [""], - sfdxNameSuffixList: [""], - }, - AuthProvider: { - folder: "authproviders", - nameSuffixList: [".authprovider"], - sfdxNameSuffixList: [".authprovider-meta.xml"], - }, - LightningComponentBundle: { - folder: "lwc", - nameSuffixList: [""], - sfdxNameSuffixList: [""], - }, - ContentAsset: { - folder: "contentassets", - nameSuffixList: [".asset", ".asset-meta.xml"], - sfdxNameSuffixList: [".asset", ".asset-meta.xml"], - }, - CustomApplication: { - folder: "applications", - nameSuffixList: [".app"], - sfdxNameSuffixList: [".app-meta.xml"], - permissionSetTypeName: "applicationVisibilities", - permissionSetMemberName: "application", - }, - CustomLabel: { - folder: "labels", - nameSuffixList: [".labels"], - sfdxNameSuffixList: [".labels-meta.xml"], - }, - CustomMetadata: { - folder: "customMetadata", - nameSuffixList: [".md"], - sfdxNameSuffixList: [".md-meta.xml"], - }, - CustomMetadataType: { - virtual: true, - permissionSetTypeName: "customMetadataTypeAccesses", - permissionSetMemberName: "name", - }, - CustomSettings: { - virtual: true, - permissionSetTypeName: "customSettingAccesses", - permissionSetMemberName: "name", - }, - CustomSite: { - folder: "sites", - nameSuffixList: [".site"], - sfdxNameSuffixList: [".site-meta.xml"], - }, - CustomObjectTranslation: { - folder: "objectTranslations", - nameSuffixList: [".objectTranslation"], - }, // We use Translations to define the list of objectTranslations to filter & copy - CustomPermission: { - folder: "customPermissions", - nameSuffixList: [".customPermission"], - sfdxNameSuffixList: [".customPermission-meta.xml"], - }, - CustomPlatformEvent: { - virtual: true, - permissionSetTypeName: "objectPermissions", - permissionSetMemberName: "object", - }, - CustomTab: { - folder: "tabs", - nameSuffixList: [".tab"], - sfdxNameSuffixList: [".tab-meta.xml"], - permissionSetTypeName: "tabSettings", - permissionSetMemberName: "tab", - }, - Document: { - folder: "documents", - nameSuffixList: ["", "-meta.xml"], - sfdxNameSuffixList: [".documentFolder-meta.xml", ".document-meta.xml", ".png"], - metasInSubFolders: true, - }, - EmailTemplate: { - folder: "email", - nameSuffixList: ["", ".email", ".email-meta.xml"], - sfdxNameSuffixList: [".email", ".email-meta.xml"], - metasInSubFolders: true, - }, - EscalationRules: { - folder: "escalationRules", - nameSuffixList: [".escalationRules"], - sfdxNameSuffixList: [".escalationRules-meta.xml"], - }, - FlexiPage: { - folder: "flexipages", - nameSuffixList: [".flexipage"], - sfdxNameSuffixList: [".flexipage-meta.xml"], - }, - Flow: { - folder: "flows", - nameSuffixList: [".flow"], - sfdxNameSuffixList: [".flow-meta.xml"], - }, - GlobalValueSet: { - folder: "globalValueSets", - nameSuffixList: [".globalValueSet"], - sfdxNameSuffixList: [".globalValueSet-meta.xml"], - }, - GlobalValueSetTranslation: { - folder: "globalValueSetTranslations", - nameSuffixList: [".globalValueSetTranslation"], - sfdxNameSuffixList: [".globalValueSetTranslation-meta.xml"], - }, - HomePageLayout: { - folder: "homePageLayouts", - nameSuffixList: [".homePageLayout"], - sfdxNameSuffixList: [".homePageLayout-meta.xml"], - }, - Layout: { - folder: "layouts", - nameSuffixList: [".layout"], - sfdxNameSuffixList: [".layout-meta.xml"], - }, - NamedCredential: { - folder: "namedCredentials", - nameSuffixList: [".namedCredential"], - sfdxNameSuffixList: [".namedCredential-meta.xml"], - }, - Network: { - folder: "networks", - nameSuffixList: [".network"], - sfdxNameSuffixList: [".network-meta.xml"], - }, - NetworkBranding: { - folder: "networkBranding", - nameSuffixList: ["", ".networkBranding", ".networkBranding-meta.xml"], - sfdxNameSuffixList: [".networkBranding-meta.xml", ".networkBranding"], - }, - NotificationTypeConfig: { - folder: "notificationtypes", - nameSuffixList: [".notiftype"], - sfdxNameSuffixList: [".notiftype-meta.xml"], - }, - PermissionSet: { - folder: "permissionsets", - nameSuffixList: [".permissionset"], - sfdxNameSuffixList: [".permissionset-meta.xml"], - }, - PlatformCachePartition: { - folder: "cachePartitions", - nameSuffixList: [".cachePartition"], - sfdxNameSuffixList: [".cachePartition-meta.xml"], - }, - Profile: { - folder: "profiles", - nameSuffixList: [".profile"], - sfdxNameSuffixList: [".profile-meta.xml"], - }, - Queue: { - folder: "queues", - nameSuffixList: [".queue"], - sfdxNameSuffixList: [".queue-meta.xml"], - }, - QuickAction: { - folder: "quickActions", - nameSuffixList: [".quickAction"], - sfdxNameSuffixList: [".quickAction-meta.xml"], - }, - RemoteSiteSetting: { - folder: "remoteSiteSettings", - nameSuffixList: [".remoteSite"], - sfdxNameSuffixList: [".remoteSite-meta.xml"], - }, - Report: { - folder: "reports", - nameSuffixList: ["", "-meta.xml"], - sfdxNameSuffixList: [".reportFolder-meta.xml"], - }, - Role: { - folder: "roles", - nameSuffixList: [".role"], - sfdxNameSuffixList: [".role-meta.xml"], - }, - Settings: { - folder: "settings", - nameSuffixList: [".settings"], - sfdxNameSuffixList: [".settings-meta.xml"], - }, - SiteDotCom: { - folder: "siteDotComSites", - nameSuffixList: [".site", ".site-meta.xml"], - sfdxNameSuffixList: [".site", ".site-meta.xml"], - }, - StandardValueSet: { - folder: "standardValueSets", - nameSuffixList: [".standardValueSet"], - sfdxNameSuffixList: [".standardValueSet-meta.xml"], - }, - StandardValueSetTranslation: { - folder: "standardValueSetTranslations", - nameSuffixList: [".standardValueSetTranslation"], - sfdxNameSuffixList: [".standardValueSetTranslation-meta.xml"], - }, - StaticResource: { - folder: "staticresources", - nameSuffixList: [".resource", ".resource-meta.xml"], - sfdxNameSuffixList: [".resource-meta.xml", ".json", ".txt", ".bin", ".js", ".mp3", ".gif"], - }, - // 'Translations': { folder: 'translations', nameSuffixList: ['.translation'] }, processed apart, as they need to be filtered - Workflow: { - folder: "workflows", - nameSuffixList: [".workflow"], - sfdxNameSuffixList: [".workflow-meta.xml"], - }, - - // Metadatas to use for building objects folder ( SObjects ) - BusinessProcess: { sobjectRelated: true }, - CompactLayout: { sobjectRelated: true }, - CustomField: { - sobjectRelated: true, - permissionSetTypeName: "fieldPermissions", - permissionSetMemberName: "field", - }, - CustomObject: { - sobjectRelated: true, - permissionSetTypeName: "objectPermissions", - permissionSetMemberName: "object", - }, - FieldSet: { sobjectRelated: true }, - ListView: { sobjectRelated: true }, - RecordType: { - sobjectRelated: true, - permissionSetTypeName: "recordTypeVisibilities", - permissionSetMemberName: "recordType", - }, - UserPermission: { - sobjectRelated: false, - permissionSetTypeName: "userPermissions", - permissionSetMemberName: "name", - }, - ValidationRule: { sobjectRelated: true }, - WebLink: { sobjectRelated: true }, - - // Special case: Translations, used for object copy and for filtering - Translations: { - translationRelated: true, - folder: "translations", - nameSuffixList: [".translation"], - sfdxNameSuffixList: [".translation-meta.xml"], - }, - }; - - return metadataTypesDescription; - } - - // Describe .object file <=> package.xml formats - public static describeObjectProperties() { - const objectFilteringProperties = [ - { - objectXmlPropName: "businessProcesses", - packageXmlPropName: "BusinessProcess", - nameProperty: "fullName", - translationNameProperty: "name", - sfdxNameSuffixList: [".businessProcess-meta.xml"], - }, - { - objectXmlPropName: "compactLayouts", - packageXmlPropName: "CompactLayout", - nameProperty: "fullName", - translationNameProperty: "layout", - sfdxNameSuffixList: [".compactLayout-meta.xml"], - }, - { - objectXmlPropName: "fields", - packageXmlPropName: "CustomField", - nameProperty: "fullName", - translationNameProperty: "name", - sfdxNameSuffixList: [".field-meta.xml"], - }, - { - objectXmlPropName: "listViews", - packageXmlPropName: "ListView", - nameProperty: "fullName", - translationNameProperty: "name", - sfdxNameSuffixList: [".listView-meta.xml"], - }, - { - objectXmlPropName: "layouts", - packageXmlPropName: "Layout", - nameProperty: "fullName", - translationNameProperty: "layout", - sfdxNameSuffixList: [".layout-meta.xml"], - }, - { - objectXmlPropName: "recordTypes", - packageXmlPropName: "RecordType", - nameProperty: "fullName", - translationNameProperty: "name", - sfdxNameSuffixList: [".recordType-meta.xml"], - }, - { - objectXmlPropName: "webLinks", - packageXmlPropName: "WebLink", - nameProperty: "fullName", - translationNameProperty: "name", - sfdxNameSuffixList: [".webLink-meta.xml"], - }, - { - objectXmlPropName: "validationRules", - packageXmlPropName: "ValidationRule", - nameProperty: "fullName", - translationNameProperty: "name", - sfdxNameSuffixList: [".validationRule-meta.xml"], - }, - { - objectXmlPropName: "fieldSets", - packageXmlPropName: "FieldSet", - nameProperty: "fullName", - translationNameProperty: "name", - sfdxNameSuffixList: [".fieldSet-meta.xml"], - }, - ]; - return objectFilteringProperties; - } public static listMetadatasNotManagedBySfdx() { return [ - "ApexEmailNotifications", - "AppMenu", - "AppointmentSchedulingPolicy", - "Audience", - "BlacklistedConsumer", - "ConnectedApp", - "CustomIndex", - "ForecastingType", - "IframeWhiteListUrlSettings", - "ManagedContentType", - "NotificationTypeConfig", - "Settings", - "TopicsForObjects", + 'ApexEmailNotifications', + 'AppMenu', + 'AppointmentSchedulingPolicy', + 'Audience', + 'BlacklistedConsumer', + 'ConnectedApp', + 'CustomIndex', + 'ForecastingType', + 'IframeWhiteListUrlSettings', + 'ManagedContentType', + 'NotificationTypeConfig', + 'Settings', + 'TopicsForObjects', ]; } // Get default org that is currently selected for user public static async getCurrentOrg() { - const displayOrgCommand = "sfdx force:org:display"; + const displayOrgCommand = 'sf org display'; const displayResult = await execSfdxJson(displayOrgCommand, this, { fail: false, output: false, @@ -407,29 +62,35 @@ class MetadataUtils { } // List local orgs for user - public static async listLocalOrgs(type = "any", options: any = {}) { - let orgListResult = await getCache("force:org:list", null); + public static async listLocalOrgs(type = 'any', options: any = {}) { + const quickListParams = options?.quickOrgList === true ? ' --skip-connection-status' : ''; + const orgListCommand = `sf org list${quickListParams}`; + let orgListResult = options.useCache === false ? null : await getCache(orgListCommand, null); if (orgListResult == null) { - orgListResult = await execSfdxJson("sfdx force:org:list", this); - await setCache("force:org:list", orgListResult); + orgListResult = await execSfdxJson(orgListCommand, this); + await setCache(orgListCommand, orgListResult); } // All orgs - if (type === "any") { + if (type === 'any') { return orgListResult?.result || []; } // Sandbox - else if (type === "sandbox") { + else if (type === 'sandbox') { return ( orgListResult?.result?.nonScratchOrgs?.filter((org: any) => { - return org.loginUrl.includes("--") || org.loginUrl.includes("test.salesforce.com"); + return org.loginUrl.includes('--') || org.loginUrl.includes('test.salesforce.com'); }) || [] ); } // Sandbox - else if (type === "devSandbox") { + else if (type === 'devSandbox') { + const orgListSorted = sortArray(orgListResult?.result?.nonScratchOrgs || [], { + by: ['instanceUrl', 'username', 'alias'], + order: ['asc', 'asc', 'asc'], + }); const allSandboxes = - orgListResult?.result?.nonScratchOrgs?.filter((org: any) => { - return org.loginUrl.includes("--") || org.loginUrl.includes("test.salesforce.com"); + orgListSorted.filter((org: any) => { + return org.loginUrl.includes('--') || org.loginUrl.includes('test.salesforce.com'); }) || []; const majorOrgs = await listMajorOrgs(); const devSandboxes = allSandboxes.filter((org: any) => { @@ -437,17 +98,20 @@ class MetadataUtils { majorOrgs.filter( (majorOrg) => majorOrg.targetUsername === org.username || - (majorOrg.instanceUrl === org.instanceUrl && !majorOrg.instanceUrl.includes("test.salesforce.com")), + (majorOrg.instanceUrl === org.instanceUrl && !majorOrg.instanceUrl.includes('test.salesforce.com')) ).length === 0 ); }); return devSandboxes; } // scratch - else if (type === "scratch") { + else if (type === 'scratch') { return ( orgListResult?.result?.scratchOrgs?.filter((org: any) => { - return org.status === "Active" && (options.devHubUsername && org.devHubUsername !== options.devHubUsername ? false : true); + return ( + org.status === 'Active' && + (options.devHubUsername && org.devHubUsername !== options.devHubUsername ? false : true) + ); }) || [] ); } @@ -455,10 +119,10 @@ class MetadataUtils { } // List installed packages on a org - public static async listInstalledPackages(orgAlias: string = null, commandThis: any): Promise { - let listCommand = "sfdx force:package:installed:list"; + public static async listInstalledPackages(orgAlias: string | null = null, commandThis: any): Promise { + let listCommand = 'sf package installed list'; if (orgAlias != null) { - listCommand += ` -u ${orgAlias}`; + listCommand += ` --target-org ${orgAlias}`; } try { const alreadyInstalled = await execSfdxJson(listCommand, commandThis, { @@ -467,93 +131,149 @@ class MetadataUtils { }); return alreadyInstalled?.result || []; } catch (e) { - uxLog(this, c.yellow(`Unable to list installed packages: This is probably a @salesforce/cli bug !\n${e.message}\n${e.stack}`)); + uxLog( + "warning", + this, + c.yellow( + `Unable to list installed packages: This is probably a @salesforce/cli bug !\n${(e as Error).message}\n${(e as Error).stack + }` + ) + ); globalThis.workaroundCliPackages = true; return []; } } // Install package on existing org - public static async installPackagesOnOrg(packages: any[], orgAlias: string = null, commandThis: any = null, context = "none") { + public static async installPackagesOnOrg( + packages: any[], + orgAlias: string | null = null, + commandThis: any = null, + context = 'none' + ) { + uxLog("action", commandThis, c.cyan(`Listing packages installed on ` + (orgAlias ? c.green(orgAlias) : 'current org') + '...')); const alreadyInstalled = await MetadataUtils.listInstalledPackages(orgAlias, this); if (globalThis?.workaroundCliPackages === true) { uxLog( + "warning", commandThis, c.yellow(`Skip packages installation because of a @salesforce/cli bug. Until it is solved, please install packages manually in target org if necessary. -Issue tracking: https://github.com/forcedotcom/cli/issues/2426`), +Issue tracking: https://github.com/forcedotcom/cli/issues/2426`) ); return; } for (const package1 of packages) { if ( - alreadyInstalled.filter((installedPackage: any) => package1.SubscriberPackageVersionId === installedPackage.SubscriberPackageVersionId) - .length === 0 + alreadyInstalled.filter( + (installedPackage: any) => package1.SubscriberPackageVersionId === installedPackage.SubscriberPackageVersionId + ).length === 0 ) { - if (context === "scratch" && package1.installOnScratchOrgs === false) { + if (context === 'scratch' && package1.installOnScratchOrgs === false) { uxLog( + "log", commandThis, - c.cyan(`Skip installation of ${c.green(package1.SubscriberPackageName)} as it is configured to not be installed on scratch orgs`), + c.grey( + `Skip installation of ${c.green( + package1.SubscriberPackageName + )} as it is configured to not be installed on scratch orgs` + ) ); continue; } - if (context === "deploy" && package1.installDuringDeployments === false) { + if (context === 'deploy' && package1.installDuringDeployments === false) { uxLog( + "log", commandThis, - c.cyan(`Skip installation of ${c.green(package1.SubscriberPackageName)} as it is configured to not be installed on scratch orgs`), + c.grey( + `Skip installation of ${c.green( + package1.SubscriberPackageName + )} as it is configured to not be installed on scratch orgs` + ) ); continue; } uxLog( + "log", commandThis, c.cyan( `Installing package ${c.green( - `${c.bold(package1.SubscriberPackageName || "")} - ${c.bold(package1.SubscriberPackageVersionName || "")}`, - )}...`, - ), + `${c.bold(package1.SubscriberPackageName || '')} - ${c.bold(package1.SubscriberPackageVersionName || '')}` + )}...` + ) ); if (package1.SubscriberPackageVersionId == null) { - throw new SfdxError( - c.red(`[sfdx-hardis] You must define ${c.bold("SubscriberPackageVersionId")} in .sfdx-hardis.yml (in installedPackages property)`), + throw new SfError( + c.red( + `[sfdx-hardis] You must define ${c.bold( + 'SubscriberPackageVersionId' + )} in .sfdx-hardis.yml (in installedPackages property)` + ) ); } - const securityType = package1.SecurityType || "AdminsOnly"; + const securityType = package1.SecurityType || 'AdminsOnly'; let packageInstallCommand = - "sfdx force:package:install" + + 'sf package install' + ` --package ${package1.SubscriberPackageVersionId}` + - " --noprompt" + - ` --securitytype ${securityType}` + - " -w 60" + - " --json " + - (package1.installationkey != null && package1.installationkey != "" ? ` --installationkey ${package1.installationkey}` : ""); + ' --no-prompt' + + ` --security-type ${securityType}` + + ' --wait 60' + + ' --json ' + + (package1.installationkey != null && package1.installationkey != '' + ? ` --installationkey ${package1.installationkey}` + : ''); if (orgAlias != null) { packageInstallCommand += ` -u ${orgAlias}`; } elapseStart(`Install package ${package1.SubscriberPackageName}`); try { - await execCommand(packageInstallCommand, this, { + await execCommand(packageInstallCommand, null, { fail: true, output: true, }); - } catch (ex) { - const ignoredErrors = ["Une version plus récente de ce package est installée.", "A newer version of this package is currently installed."]; + } catch (ex: any) { + if (ex.message.includes('Installation key not valid')) { + uxLog( + "warning", + this, + c.yellow( + `${c.bold('Package requiring password')}: Please manually install package ${package1.SubscriberPackageName + } in target org using its password, and define 'installDuringDeployments: false' in its .sfdx-hardis.yml reference` + ) + ); + throw ex; + } + const ignoredErrors = [ + 'Une version plus récente de ce package est installée.', + 'A newer version of this package is currently installed.', + ]; // If ex.message contains at least one of the ignoredError, don't rethrow exception if (!ignoredErrors.some((msg) => ex.message && ex.message.includes(msg))) { throw ex; } uxLog( + "warning", this, c.yellow( - `${c.bold("This is not a real error")}: A newer version of ${ - package1.SubscriberPackageName - } has been found. You may update installedPackages property in .sfdx-hardis.yml`, - ), + `${c.bold('This is not a real error')}: A newer version of ${package1.SubscriberPackageName + } has been found. You may upgrade stored package version using VsCode SFDX-Hardis "Installed Packages" feature in menu "DevOps Pipeline" (it will update installedPackages property in .sfdx-hardis.yml)` + ) + ); + uxLog( + "warning", + this, + c.yellow( + `You can do that using command ${c.bold('sf hardis:org:retrieve:packageconfig')} in a minor git branch` + ) ); - uxLog(this, c.yellow(`You can do that using command ${c.bold("sfdx hardis:org:retrieve:packageconfig")} in a minor git branch`)); } elapseEnd(`Install package ${package1.SubscriberPackageName}`); } else { - uxLog(commandThis, c.cyan(`Skip installation of ${c.green(package1.SubscriberPackageName)} as it is already installed`)); + uxLog( + "log", + commandThis, + c.grey(`Skip installation of ${c.green(package1.SubscriberPackageName)} as it is already installed`) + ); } } } @@ -566,104 +286,109 @@ Issue tracking: https://github.com/forcedotcom/cli/issues/2426`), filteredMetadatas: string[], options: any = {}, commandThis: any, - debug: boolean, + orgUsername: string, + debug: boolean ) { // Create output folder if not existing await fs.ensureDir(metadataFolder); // Build package.xml for all org - await buildOrgManifest(commandThis.org.getUsername(), "package-full.xml"); - await fs.copyFile("package-full.xml", "package.xml"); + await buildOrgManifest(orgUsername, 'package-full.xml'); + await fs.copyFile('package-full.xml', 'package.xml'); // Filter managed items if requested if (options.filterManagedItems) { - uxLog(commandThis, c.cyan("Filtering managed items from package.Xml manifest...")); + uxLog("action", commandThis, c.cyan('Filtering managed items from package.Xml manifest...')); // List installed packages & collect managed namespaces - let namespaces = []; + let namespaces: any[] = []; if (isSfdxProject()) { // Use sfdx command if possible - const installedPackages = await this.listInstalledPackages(null, commandThis); + const installedPackages = await this.listInstalledPackages(orgUsername, commandThis); for (const installedPackage of installedPackages) { - if (installedPackage?.SubscriberPackageNamespace !== "" && installedPackage?.SubscriberPackageNamespace != null) { + if ( + installedPackage?.SubscriberPackageNamespace !== '' && + installedPackage?.SubscriberPackageNamespace != null + ) { namespaces.push(installedPackage.SubscriberPackageNamespace); } } } else { // Get namespace list from package.xml - const packageXmlContent = await parsePackageXmlFile("package-full.xml"); - namespaces = packageXmlContent["InstalledPackage"] || []; + const packageXmlContent = await parsePackageXmlFile('package-full.xml'); + namespaces = packageXmlContent['InstalledPackage'] || []; } // Filter package XML to remove identified metadatas - const packageXmlToRemove = fs.existsSync("./remove-items-package.xml") - ? path.resolve("./remove-items-package.xml") - : path.resolve(PACKAGE_ROOT_DIR + "/defaults/remove-items-package.xml"); + const packageXmlToRemove = fs.existsSync('./remove-items-package.xml') + ? path.resolve('./remove-items-package.xml') + : path.resolve(PACKAGE_ROOT_DIR + '/defaults/remove-items-package.xml'); const removeStandard = options.removeStandard === false ? false : true; const filterNamespaceRes = await filterPackageXml(packageXml, packageXml, { removeNamespaces: namespaces, removeStandard: removeStandard, removeFromPackageXmlFile: packageXmlToRemove, - updateApiVersion: CONSTANTS.API_VERSION, + updateApiVersion: getApiVersion(), }); - uxLog(commandThis, filterNamespaceRes.message); + uxLog("log", commandThis, filterNamespaceRes.message); } // Filter package.xml only using locally defined remove-items-package.xml - else if (fs.existsSync("./remove-items-package.xml")) { + else if (fs.existsSync('./remove-items-package.xml')) { const filterNamespaceRes = await filterPackageXml(packageXml, packageXml, { - removeFromPackageXmlFile: path.resolve("./remove-items-package.xml"), - updateApiVersion: CONSTANTS.API_VERSION, + removeFromPackageXmlFile: path.resolve('./remove-items-package.xml'), + updateApiVersion: getApiVersion(), }); - uxLog(commandThis, filterNamespaceRes.message); + uxLog("other", commandThis, filterNamespaceRes.message); } // Filter package XML to remove identified metadatas const filterRes = await filterPackageXml(packageXml, packageXml, { removeMetadatas: filteredMetadatas, }); - uxLog(commandThis, filterRes.message); + uxLog("other", commandThis, filterRes.message); // Filter package XML to keep only selected Metadata types if (options.keepMetadataTypes) { const filterRes2 = await filterPackageXml(packageXml, packageXml, { keepMetadataTypes: options.keepMetadataTypes, }); - uxLog(commandThis, filterRes2.message); + uxLog("other", commandThis, filterRes2.message); } // Retrieve metadatas if (fs.readdirSync(metadataFolder).length === 0 || checkEmpty === false) { - uxLog(commandThis, c.cyan(`Retrieving metadatas in ${c.green(metadataFolder)}...`)); + uxLog("action", commandThis, c.cyan(`Retrieving metadatas in ${c.green(metadataFolder)}...`)); const retrieveCommand = - "sfdx force:mdapi:retrieve" + - ` --retrievetargetdir ${metadataFolder}` + - ` --unpackaged ${packageXml}` + - ` --wait ${process.env.SFDX_RETRIEVE_WAIT_MINUTES || "60"}` + - (debug ? " --verbose" : ""); + 'sf project retrieve start' + + ` --target-metadata-dir ${metadataFolder}` + + ` --manifest ${packageXml}` + + ` --wait ${process.env.SFDX_RETRIEVE_WAIT_MINUTES || '60'}` + + (debug ? ' --verbose' : ''); const retrieveRes = await execSfdxJson(retrieveCommand, this, { output: false, fail: true, debug, }); if (debug) { - uxLog(commandThis, retrieveRes); + uxLog("other", commandThis, retrieveRes); } // Unzip metadatas - uxLog(commandThis, c.cyan("Unzipping metadatas...")); - await extractZip(path.join(metadataFolder, "unpackaged.zip"), { + uxLog("action", commandThis, c.cyan('Unzipping metadatas...')); + await extractZip(path.join(metadataFolder, 'unpackaged.zip'), { dir: metadataFolder, }); - await fs.unlink(path.join(metadataFolder, "unpackaged.zip")); + await fs.unlink(path.join(metadataFolder, 'unpackaged.zip')); } } // Prompt user to select a list of metadata types public static async promptMetadataTypes() { - const metadataTypes = sortArray(listMetadataTypes(), { by: ["xmlName"], order: ["asc"] }); + const metadataTypes = sortArray(listMetadataTypes(), { by: ['xmlName'], order: ['asc'] }); const metadataResp = await prompts({ - type: "multiselect", - message: c.cyanBright("Please select metadata types"), + type: 'multiselect', + message: c.cyanBright('Please select metadata types'), + description: 'Choose the Salesforce metadata types to include in this operation', choices: metadataTypes.map((metadataType: any) => { return { - title: c.cyan(`${metadataType.xmlName || "no xml name"} (${metadataType.directoryName || "no dir name"})`), + title: c.cyan(`${metadataType.xmlName || 'no xml name'} (${metadataType.directoryName || 'no dir name'})`), value: metadataType, }; }), @@ -676,10 +401,127 @@ Issue tracking: https://github.com/forcedotcom/cli/issues/2426`), if (!isGitRepo()) { return []; } - const files = (await git().status(["--porcelain"])).files; + const files = (await git().status(['--porcelain'])).files; const filesSorted = files.sort((a, b) => (a.path > b.path ? 1 : -1)); return filesSorted; } + + // List updated files and reformat them as string + public static async listChangedOrFromCurrentCommitFiles(): Promise { + if (!isGitRepo()) { + return []; + } + const changedFiles = await MetadataUtils.listChangedFiles(); + const commitDetails = await git().show(['--name-only', '--pretty=format:']); + const updatedFiles = commitDetails.trim().split('\n') + .filter(file => { + return file && !changedFiles.some(changedFile => changedFile.path === file); + }) + .map((file) => { + return { path: file, index: 'x', working_dir: 'x' }; + }); + const files = [...changedFiles, ...updatedFiles] + const filesSorted = files.sort((a, b) => (a.path > b.path ? 1 : -1)); + return filesSorted; + } + + public static getMetadataPrettyNames(metadataFilePaths: string[], bold = false): Map { + const metadataList = listMetadataTypes(); + const metadataFilePathsHuman = new Map(); + for (const fileRaw of metadataFilePaths) { + const file = fileRaw.replace(/\\/g, '/').replace('force-app/main/default/', ''); + let fileHuman = "" + file; + for (const metadataDesc of metadataList) { + if (file.includes(metadataDesc.directoryName || "THEREISNOT")) { + const splits = file.split(metadataDesc.directoryName + "/"); + const endOfPath = splits[1] || splits[0] || ""; + const suffix = metadataDesc.suffix ?? "THEREISNOT"; + let metadataName = endOfPath.includes("." + suffix + "-meta.xml") ? + endOfPath.replace("." + suffix + "-meta.xml", "") : + endOfPath.includes("." + suffix) ? + endOfPath.replace("." + suffix, "") : + endOfPath; + if (bold) { + metadataName = "*" + metadataName + "*"; + } + fileHuman = metadataDesc.xmlName + " " + metadataName; + continue; + } + } + metadataFilePathsHuman.set(fileRaw, fileHuman); + } + return metadataFilePathsHuman; + } + + public static async findMetaFileFromTypeAndName(packageXmlType: string, packageXmlName: string, packageDirectories: any[] = []) { + // Handle default package directory if not provided as input + if (packageDirectories.length === 0) { + packageDirectories = [ + { + fullPath: path.join(process.cwd(), "force-app"), + path: "force-app" + } + ] + } + // Find metadata type from packageXmlName + const metadataList = listMetadataTypes(); + const metadataTypes = metadataList.filter(metadata => metadata.xmlName === packageXmlType); + if (metadataTypes.length === 0) { + // Strange, we shouldn't get here, or it means listMetadataTypes content is not up to date + return null; + } + const metadataType = metadataTypes[0]; + + // Look for matching file in sources + const globExpressions = [ + `**/${metadataType.directoryName}/**/${packageXmlName}.${metadataType.suffix || ""}`, // Works for not-xml files + `**/${metadataType.directoryName}/**/${packageXmlName}.${metadataType.suffix || ""}-meta.xml` // Works for all XML files + ] + for (const packageDirectory of packageDirectories) { + for (const globExpression of globExpressions) { + const sourceFiles = await glob(globExpression, { + cwd: packageDirectory.fullPath, + ignore: GLOB_IGNORE_PATTERNS + }); + if (sourceFiles.length > 0) { + const metaFile = path.join(packageDirectory.path, sourceFiles[0]); + return metaFile.replace(/\\/g, "/"); + } + } + } + return null; + } + + public static async promptFlow() { + const flowFiles = await glob("**/*.flow-meta.xml", { ignore: GLOB_IGNORE_PATTERNS }); + sortCrossPlatform(flowFiles); + const flowSelectRes = await prompts({ + type: 'select', + message: 'Please select the Flow you want to visually compare', + description: 'Choose a Flow file to perform visual comparison operations on', + choices: flowFiles.map(flowFile => { + return { value: flowFile, title: path.basename(flowFile, ".flow-meta.xml") } + }) + }); + return flowSelectRes.value.replace(/\\/g, "/"); + } + + public static async promptMultipleFlows() { + const flowFiles = await glob("**/*.flow-meta.xml", { ignore: GLOB_IGNORE_PATTERNS }); + sortCrossPlatform(flowFiles); + const flowSelectRes = await prompts({ + type: 'multiselect', + message: 'Please select the Flows you want to create the documentation', + description: 'Choose multiple Flow files to generate documentation for', + choices: flowFiles.map(flowFile => { + return { value: flowFile, title: path.basename(flowFile, ".flow-meta.xml") } + }) + }); + + return flowSelectRes.value.map(flowFile => flowFile.replace(/\\/g, "/")); + } + + } export { MetadataUtils }; diff --git a/src/common/metadata-utils/metadataList.ts b/src/common/metadata-utils/metadataList.ts index 48ef63d42..70505fc0c 100644 --- a/src/common/metadata-utils/metadataList.ts +++ b/src/common/metadata-utils/metadataList.ts @@ -1,1322 +1,2213 @@ export function listMetadataTypes() { - // v54 + // v62 return [ { - directoryName: "installedPackages", - inFolder: false, - metaFile: false, - suffix: "installedPackage", - xmlName: "InstalledPackage", + "directoryName": "conversationMessageDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "conversationMessageDefinition", + "xmlName": "ConversationMessageDefinition" }, { - childXmlNames: "CustomLabel", - directoryName: "labels", - inFolder: false, - metaFile: false, - suffix: "labels", - xmlName: "CustomLabels", + "directoryName": "genAiPromptTemplateActivations", + "inFolder": false, + "metaFile": false, + "suffix": "genAiPromptTemplateActivation", + "xmlName": "GenAiPromptTemplateActv" }, { - directoryName: "staticresources", - inFolder: false, - metaFile: true, - suffix: "resource", - xmlName: "StaticResource", + "directoryName": "genAiPromptTemplates", + "inFolder": false, + "metaFile": false, + "suffix": "genAiPromptTemplate", + "xmlName": "GenAiPromptTemplate" }, { - directoryName: "scontrols", - inFolder: false, - metaFile: true, - suffix: "scf", - xmlName: "Scontrol", + "directoryName": "dw", + "inFolder": false, + "metaFile": false, + "suffix": "dwl", + "xmlName": "DataWeaveResource" }, { - directoryName: "certs", - inFolder: false, - metaFile: true, - suffix: "crt", - xmlName: "Certificate", + "directoryName": "useraccesspolicies", + "inFolder": false, + "metaFile": false, + "suffix": "useraccesspolicy", + "xmlName": "UserAccessPolicy" }, { - directoryName: "messageChannels", - inFolder: false, - metaFile: false, - suffix: "messageChannel", - xmlName: "LightningMessageChannel", + "directoryName": "webstoretemplate", + "inFolder": false, + "metaFile": false, + "suffix": "webstoretemplate", + "xmlName": "WebStoreTemplate" }, { - directoryName: "aura", - inFolder: false, - metaFile: false, - xmlName: "AuraDefinitionBundle", + "directoryName": "messagingChannels", + "inFolder": false, + "metaFile": false, + "suffix": "messagingChannel", + "xmlName": "MessagingChannel" }, { - directoryName: "lwc", - inFolder: false, - metaFile: false, - xmlName: "LightningComponentBundle", + "directoryName": "recordAlertCategories", + "inFolder": false, + "metaFile": false, + "suffix": "recordAlertCategory", + "xmlName": "RecordAlertCategory" }, { - directoryName: "components", - inFolder: false, - metaFile: true, - suffix: "component", - xmlName: "ApexComponent", + "directoryName": "aiApplicationConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "aiapplicationconfig", + "xmlName": "AIApplicationConfig" }, { - directoryName: "pages", - inFolder: false, - metaFile: true, - suffix: "page", - xmlName: "ApexPage", + "directoryName": "aiApplications", + "inFolder": false, + "metaFile": false, + "suffix": "ai", + "xmlName": "AIApplication" }, { - directoryName: "queues", - inFolder: false, - metaFile: false, - suffix: "queue", - xmlName: "Queue", + "directoryName": "mlDataDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "mlDataDefinition", + "xmlName": "MLDataDefinition" }, { - directoryName: "CaseSubjectParticles", - inFolder: false, - metaFile: false, - suffix: "CaseSubjectParticle", - xmlName: "CaseSubjectParticle", + "directoryName": "mlPredictions", + "inFolder": false, + "metaFile": false, + "suffix": "mlPrediction", + "xmlName": "MLPredictionDefinition" }, { - directoryName: "dataSources", - inFolder: false, - metaFile: false, - suffix: "dataSource", - xmlName: "ExternalDataSource", + "directoryName": "eventRelays", + "inFolder": false, + "metaFile": false, + "suffix": "eventRelay", + "xmlName": "EventRelayConfig" }, { - directoryName: "namedCredentials", - inFolder: false, - metaFile: false, - suffix: "namedCredential", - xmlName: "NamedCredential", + "directoryName": "omniSupervisorConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "omniSupervisorConfig", + "xmlName": "OmniSupervisorConfig" }, { - directoryName: "externalServiceRegistrations", - inFolder: false, - metaFile: false, - suffix: "externalServiceRegistration", - xmlName: "ExternalServiceRegistration", + "directoryName": "uiObjectRelationConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "uiObjectRelationConfig", + "xmlName": "UIObjectRelationConfig" }, { - directoryName: "roles", - inFolder: false, - metaFile: false, - suffix: "role", - xmlName: "Role", + "directoryName": "timelineObjectDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "timelineObjectDefinition", + "xmlName": "TimelineObjectDefinition" }, { - directoryName: "groups", - inFolder: false, - metaFile: false, - suffix: "group", - xmlName: "Group", + "directoryName": "slackapps", + "inFolder": false, + "metaFile": true, + "suffix": "slackapp", + "xmlName": "SlackApp" }, { - directoryName: "globalValueSets", - inFolder: false, - metaFile: false, - suffix: "globalValueSet", - xmlName: "GlobalValueSet", + "directoryName": "viewdefinitions", + "inFolder": false, + "metaFile": true, + "suffix": "view", + "xmlName": "ViewDefinition" }, { - directoryName: "standardValueSets", - inFolder: false, - metaFile: false, - suffix: "standardValueSet", - xmlName: "StandardValueSet", + "directoryName": "ActionLauncherItemDef", + "inFolder": false, + "metaFile": false, + "suffix": "actionLauncherItemDef", + "xmlName": "ActionLauncherItemDef" }, { - directoryName: "customPermissions", - inFolder: false, - metaFile: false, - suffix: "customPermission", - xmlName: "CustomPermission", + "directoryName": "loyaltyProgramSetups", + "inFolder": false, + "metaFile": false, + "suffix": "loyaltyProgramSetup", + "xmlName": "LoyaltyProgramSetup" }, { - childXmlNames: [ - "CustomField", - "Index", - "BusinessProcess", - "RecordType", - "CompactLayout", - "WebLink", - "ValidationRule", - "SharingReason", - "ListView", - "FieldSet", - ], - directoryName: "objects", - inFolder: false, - metaFile: false, - suffix: "object", - xmlName: "CustomObject", + "directoryName": "documentCategory", + "inFolder": false, + "metaFile": false, + "suffix": "documentCategory", + "xmlName": "DocumentCategory" }, { - directoryName: "businessProcesses", - inFolder: false, - metaFile: false, - suffix: "businessProcess", - xmlName: "BusinessProcess", + "directoryName": "documentCategoryDocumentTypes", + "inFolder": false, + "metaFile": false, + "suffix": "documentCategoryDocumentType", + "xmlName": "DocumentCategoryDocumentType" }, { - directoryName: "compactLayouts", - inFolder: false, - metaFile: false, - suffix: "compactLayout", - xmlName: "CompactLayout", + "directoryName": "AssessmentQuestions", + "inFolder": false, + "metaFile": false, + "suffix": "aq", + "xmlName": "AssessmentQuestion" }, { - directoryName: "fields", - inFolder: false, - metaFile: false, - suffix: "field", - xmlName: "CustomField", + "directoryName": "AssessmentQuestionSets", + "inFolder": false, + "metaFile": false, + "suffix": "aqs", + "xmlName": "AssessmentQuestionSet" }, { - directoryName: "fieldSets", - inFolder: false, - metaFile: false, - suffix: "fieldSet", - xmlName: "FieldSet", + "directoryName": "decisionTables", + "inFolder": false, + "metaFile": false, + "suffix": "decisionTable", + "xmlName": "DecisionTable" }, { - directoryName: "listViews", - inFolder: false, - metaFile: false, - suffix: "listView", - xmlName: "ListView", + "directoryName": "decisionTableDatasetLinks", + "inFolder": false, + "metaFile": false, + "suffix": "decisionTableDatasetLink", + "xmlName": "DecisionTableDatasetLink" }, { - directoryName: "recordTypes", - inFolder: false, - metaFile: false, - suffix: "recordType", - xmlName: "RecordType", + "directoryName": "forecastingFilters", + "inFolder": false, + "metaFile": false, + "suffix": "forecastingFilter", + "xmlName": "ForecastingFilter" }, { - directoryName: "sharingReasons", - inFolder: false, - metaFile: false, - suffix: "sharingReason", - xmlName: "SharingReason", + "directoryName": "forecastingFilterConditions", + "inFolder": false, + "metaFile": false, + "suffix": "forecastingFilterCondition", + "xmlName": "ForecastingFilterCondition" }, { - directoryName: "validationRules", - inFolder: false, - metaFile: false, - suffix: "validationRule", - xmlName: "ValidationRule", + "directoryName": "forecastingSourceDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "forecastingSourceDefinition", + "xmlName": "ForecastingSourceDefinition" }, { - directoryName: "webLinks", - inFolder: false, - metaFile: false, - suffix: "webLink", - xmlName: "WebLink", + "directoryName": "forecastingTypes", + "inFolder": false, + "metaFile": false, + "suffix": "forecastingType", + "xmlName": "ForecastingType" }, { - directoryName: "reportTypes", - inFolder: false, - metaFile: false, - suffix: "reportType", - xmlName: "ReportType", + "directoryName": "forecastingTypeSources", + "inFolder": false, + "metaFile": false, + "suffix": "forecastingTypeSource", + "xmlName": "ForecastingTypeSource" }, { - directoryName: "reports", - inFolder: true, - metaFile: false, - suffix: "report", - xmlName: "Report", + "directoryName": "decisionMatrixDefinition", + "inFolder": false, + "metaFile": false, + "suffix": "decisionMatrixDefinition", + "xmlName": "DecisionMatrixDefinition" }, { - directoryName: "dashboards", - inFolder: true, - metaFile: false, - suffix: "dashboard", - xmlName: "Dashboard", + "directoryName": "expressionSetDefinition", + "inFolder": false, + "metaFile": false, + "suffix": "expressionSetDefinition", + "xmlName": "ExpressionSetDefinition" }, { - directoryName: "analyticSnapshots", - inFolder: false, - metaFile: false, - suffix: "snapshot", - xmlName: "AnalyticSnapshot", + "directoryName": "explainabilityActionDefinition", + "inFolder": false, + "metaFile": false, + "suffix": "explainabilityActionDefinition", + "xmlName": "ExplainabilityActionDefinition" }, { - directoryName: "feedFilters", - inFolder: false, - metaFile: false, - suffix: "feedFilter", - xmlName: "CustomFeedFilter", + "directoryName": "explainabilityActionVersion", + "inFolder": false, + "metaFile": false, + "suffix": "explainabilityActionVersion", + "xmlName": "ExplainabilityActionVersion" }, { - directoryName: "layouts", - inFolder: false, - metaFile: false, - suffix: "layout", - xmlName: "Layout", + "directoryName": "applicationSubtypeDefinition", + "inFolder": false, + "metaFile": false, + "suffix": "applicationSubtypeDefinition", + "xmlName": "ApplicationSubtypeDefinition" }, { - directoryName: "documents", - inFolder: true, - metaFile: true, - suffix: "document", - xmlName: "Document", + "directoryName": "businessProcessTypeDefinition", + "inFolder": false, + "metaFile": false, + "suffix": "businessProcessTypeDefinition", + "xmlName": "BusinessProcessTypeDefinition" }, { - directoryName: "weblinks", - inFolder: false, - metaFile: false, - suffix: "weblink", - xmlName: "CustomPageWebLink", + "directoryName": "actionPlanTemplates", + "inFolder": false, + "metaFile": false, + "suffix": "apt", + "xmlName": "ActionPlanTemplate" }, { - directoryName: "letterhead", - inFolder: false, - metaFile: false, - suffix: "letter", - xmlName: "Letterhead", + "directoryName": "documentTypes", + "inFolder": false, + "metaFile": false, + "suffix": "documentType", + "xmlName": "DocumentType" }, { - directoryName: "email", - inFolder: true, - metaFile: true, - suffix: "email", - xmlName: "EmailTemplate", + "directoryName": "recommendationStrategies", + "inFolder": false, + "metaFile": false, + "suffix": "recommendationStrategy", + "xmlName": "RecommendationStrategy" }, { - directoryName: "quickActions", - inFolder: false, - metaFile: false, - suffix: "quickAction", - xmlName: "QuickAction", + "directoryName": "recordActionDeployments", + "inFolder": false, + "metaFile": false, + "suffix": "deployment", + "xmlName": "RecordActionDeployment" }, { - directoryName: "flexipages", - inFolder: false, - metaFile: false, - suffix: "flexipage", - xmlName: "FlexiPage", + "directoryName": "relationshipGraphDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "relationshipGraphDefinition", + "xmlName": "RelationshipGraphDefinition" }, { - directoryName: "tabs", - inFolder: false, - metaFile: false, - suffix: "tab", - xmlName: "CustomTab", + "directoryName": "omniDataTransforms", + "inFolder": false, + "metaFile": false, + "suffix": "rpt", + "xmlName": "OmniDataTransform" }, { - directoryName: "customApplicationComponents", - inFolder: false, - metaFile: false, - suffix: "customApplicationComponent", - xmlName: "CustomApplicationComponent", + "directoryName": "OmniInteractionConfig", + "inFolder": false, + "metaFile": false, + "suffix": "omniInteractionConfig", + "xmlName": "OmniInteractionConfig" }, { - directoryName: "applications", - inFolder: false, - metaFile: false, - suffix: "app", - xmlName: "CustomApplication", + "directoryName": "omniIntegrationProcedures", + "inFolder": false, + "metaFile": false, + "suffix": "oip", + "xmlName": "OmniIntegrationProcedure" }, { - directoryName: "portals", - inFolder: false, - metaFile: false, - suffix: "portal", - xmlName: "Portal", + "directoryName": "omniScripts", + "inFolder": false, + "metaFile": false, + "suffix": "os", + "xmlName": "OmniScript" }, { - directoryName: "customMetadata", - inFolder: false, - metaFile: false, - suffix: "md", - xmlName: "CustomMetadata", + "directoryName": "omniUiCard", + "inFolder": false, + "metaFile": false, + "suffix": "ouc", + "xmlName": "OmniUiCard" }, { - directoryName: "flows", - inFolder: false, - metaFile: false, - suffix: "flow", - xmlName: "Flow", + "directoryName": "customindex", + "inFolder": false, + "metaFile": false, + "suffix": "indx", + "xmlName": "CustomIndex " }, { - directoryName: "flowDefinitions", - inFolder: false, - metaFile: false, - suffix: "flowDefinition", - xmlName: "FlowDefinition", + "directoryName": "batchCalcJobDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "batchCalcJobDefinition", + "xmlName": "BatchCalcJobDefinition" }, { - childXmlNames: [ - "WorkflowFieldUpdate", - "WorkflowKnowledgePublish", - "WorkflowTask", - "WorkflowAlert", - "WorkflowSend", - "WorkflowOutboundMessage", - "WorkflowFlowAction", - "WorkflowRule", - ], - directoryName: "workflows", - inFolder: false, - metaFile: false, - suffix: "workflow", - xmlName: "Workflow", + "directoryName": "batchProcessJobDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "batchProcessJobDefinition", + "xmlName": "BatchProcessJobDefinition" + }, + { + "directoryName": "installedPackages", + "inFolder": false, + "metaFile": false, + "suffix": "installedPackage", + "xmlName": "InstalledPackage" + }, + { + "directoryName": "staticresources", + "inFolder": false, + "metaFile": true, + "suffix": "resource", + "xmlName": "StaticResource" + }, + { + "directoryName": "scontrols", + "inFolder": false, + "metaFile": true, + "suffix": "scf", + "xmlName": "Scontrol" + }, + { + "directoryName": "certs", + "inFolder": false, + "metaFile": true, + "suffix": "crt", + "xmlName": "Certificate" + }, + { + "directoryName": "messageChannels", + "inFolder": false, + "metaFile": false, + "suffix": "messageChannel", + "xmlName": "LightningMessageChannel" + }, + { + "directoryName": "aura", + "inFolder": false, + "metaFile": false, + "xmlName": "AuraDefinitionBundle" + }, + { + "directoryName": "lwc", + "inFolder": false, + "metaFile": false, + "xmlName": "LightningComponentBundle" + }, + { + "directoryName": "components", + "inFolder": false, + "metaFile": true, + "suffix": "component", + "xmlName": "ApexComponent" + }, + { + "directoryName": "pages", + "inFolder": false, + "metaFile": true, + "suffix": "page", + "xmlName": "ApexPage" }, { - childXmlNames: "AssignmentRule", - directoryName: "assignmentRules", - inFolder: false, - metaFile: false, - suffix: "assignmentRules", - xmlName: "AssignmentRules", + "directoryName": "queues", + "inFolder": false, + "metaFile": false, + "suffix": "queue", + "xmlName": "Queue" }, { - childXmlNames: "AutoResponseRule", - directoryName: "autoResponseRules", - inFolder: false, - metaFile: false, - suffix: "autoResponseRules", - xmlName: "AutoResponseRules", + "directoryName": "CaseSubjectParticles", + "inFolder": false, + "metaFile": false, + "suffix": "CaseSubjectParticle", + "xmlName": "CaseSubjectParticle" }, { - childXmlNames: "EscalationRule", - directoryName: "escalationRules", - inFolder: false, - metaFile: false, - suffix: "escalationRules", - xmlName: "EscalationRules", + "directoryName": "dataSources", + "inFolder": false, + "metaFile": false, + "suffix": "dataSource", + "xmlName": "ExternalDataSource" }, { - directoryName: "postTemplates", - inFolder: false, - metaFile: false, - suffix: "postTemplate", - xmlName: "PostTemplate", + "directoryName": "namedCredentials", + "inFolder": false, + "metaFile": false, + "suffix": "namedCredential", + "xmlName": "NamedCredential" }, { - directoryName: "approvalProcesses", - inFolder: false, - metaFile: false, - suffix: "approvalProcess", - xmlName: "ApprovalProcess", + "directoryName": "externalCredentials", + "inFolder": false, + "metaFile": false, + "suffix": "externalCredential", + "xmlName": "ExternalCredential" }, { - directoryName: "homePageComponents", - inFolder: false, - metaFile: false, - suffix: "homePageComponent", - xmlName: "HomePageComponent", + "directoryName": "externalServiceRegistrations", + "inFolder": false, + "metaFile": false, + "suffix": "externalServiceRegistration", + "xmlName": "ExternalServiceRegistration" }, { - directoryName: "homePageLayouts", - inFolder: false, - metaFile: false, - suffix: "homePageLayout", - xmlName: "HomePageLayout", + "directoryName": "roles", + "inFolder": false, + "metaFile": false, + "suffix": "role", + "xmlName": "Role" }, { - directoryName: "objectTranslations", - inFolder: false, - metaFile: false, - suffix: "objectTranslation", - xmlName: "CustomObjectTranslation", + "directoryName": "groups", + "inFolder": false, + "metaFile": false, + "suffix": "group", + "xmlName": "Group" }, { - directoryName: "translations", - inFolder: false, - metaFile: false, - suffix: "translation", - xmlName: "Translations", + "directoryName": "globalValueSets", + "inFolder": false, + "metaFile": false, + "suffix": "globalValueSet", + "xmlName": "GlobalValueSet" }, { - directoryName: "globalValueSetTranslations", - inFolder: false, - metaFile: false, - suffix: "globalValueSetTranslation", - xmlName: "GlobalValueSetTranslation", + "directoryName": "standardValueSets", + "inFolder": false, + "metaFile": false, + "suffix": "standardValueSet", + "xmlName": "StandardValueSet" }, { - directoryName: "standardValueSetTranslations", - inFolder: false, - metaFile: false, - suffix: "standardValueSetTranslation", - xmlName: "StandardValueSetTranslation", + "directoryName": "customPermissions", + "inFolder": false, + "metaFile": false, + "suffix": "customPermission", + "xmlName": "CustomPermission" }, { - directoryName: "classes", - inFolder: false, - metaFile: true, - suffix: "cls", - xmlName: "ApexClass", + "childXmlNames": [ + "CustomField", + "Index", + "BusinessProcess", + "RecordType", + "CompactLayout", + "WebLink", + "ValidationRule", + "SharingReason", + "ListView", + "FieldSet" + ], + "directoryName": "objects", + "inFolder": false, + "metaFile": false, + "suffix": "object", + "xmlName": "CustomObject" }, { - directoryName: "triggers", - inFolder: false, - metaFile: true, - suffix: "trigger", - xmlName: "ApexTrigger", + "directoryName": "businessProcesses", + "inFolder": false, + "metaFile": false, + "suffix": "businessProcess", + "xmlName": "BusinessProcess" }, { - directoryName: "testSuites", - inFolder: false, - metaFile: false, - suffix: "testSuite", - xmlName: "ApexTestSuite", + "directoryName": "compactLayouts", + "inFolder": false, + "metaFile": false, + "suffix": "compactLayout", + "xmlName": "CompactLayout" }, { - directoryName: "profiles", - inFolder: false, - metaFile: false, - suffix: "profile", - xmlName: "Profile", + "directoryName": "fields", + "inFolder": false, + "metaFile": false, + "suffix": "field", + "xmlName": "CustomField" }, { - directoryName: "permissionsets", - inFolder: false, - metaFile: false, - suffix: "permissionset", - xmlName: "PermissionSet", + "directoryName": "fieldSets", + "inFolder": false, + "metaFile": false, + "suffix": "fieldSet", + "xmlName": "FieldSet" }, { - directoryName: "mutingpermissionsets", - inFolder: false, - metaFile: false, - suffix: "mutingpermissionset", - xmlName: "MutingPermissionSet", + "directoryName": "indexes", + "inFolder": false, + "metaFile": false, + "suffix": "index", + "xmlName": "Index" }, { - directoryName: "permissionsetgroups", - inFolder: false, - metaFile: false, - suffix: "permissionsetgroup", - xmlName: "PermissionSetGroup", + "directoryName": "listViews", + "inFolder": false, + "metaFile": false, + "suffix": "listView", + "xmlName": "ListView" }, { - directoryName: "profilePasswordPolicies", - inFolder: false, - metaFile: false, - suffix: "profilePasswordPolicy", - xmlName: "ProfilePasswordPolicy", + "directoryName": "recordTypes", + "inFolder": false, + "metaFile": false, + "suffix": "recordType", + "xmlName": "RecordType" }, { - directoryName: "profileSessionSettings", - inFolder: false, - metaFile: false, - suffix: "profileSessionSetting", - xmlName: "ProfileSessionSetting", + "directoryName": "sharingReasons", + "inFolder": false, + "metaFile": false, + "suffix": "sharingReason", + "xmlName": "SharingReason" }, { - directoryName: "myDomainDiscoverableLogins", - inFolder: false, - metaFile: false, - suffix: "myDomainDiscoverableLogin", - xmlName: "MyDomainDiscoverableLogin", + "directoryName": "validationRules", + "inFolder": false, + "metaFile": false, + "suffix": "validationRule", + "xmlName": "ValidationRule" }, { - directoryName: "oauthcustomscopes", - inFolder: false, - metaFile: false, - suffix: "oauthcustomscope", - xmlName: "OauthCustomScope", + "directoryName": "webLinks", + "inFolder": false, + "metaFile": false, + "suffix": "webLink", + "xmlName": "WebLink" }, { - directoryName: "datacategorygroups", - inFolder: false, - metaFile: false, - suffix: "datacategorygroup", - xmlName: "DataCategoryGroup", + "directoryName": "reportTypes", + "inFolder": false, + "metaFile": false, + "suffix": "reportType", + "xmlName": "ReportType" }, { - directoryName: "remoteSiteSettings", - inFolder: false, - metaFile: false, - suffix: "remoteSite", - xmlName: "RemoteSiteSetting", + "directoryName": "reports", + "inFolder": true, + "metaFile": false, + "suffix": "report", + "xmlName": "Report" }, { - directoryName: "cspTrustedSites", - inFolder: false, - metaFile: false, - suffix: "cspTrustedSite", - xmlName: "CspTrustedSite", + "directoryName": "dashboards", + "inFolder": true, + "metaFile": false, + "suffix": "dashboard", + "xmlName": "Dashboard" }, { - directoryName: "redirectWhitelistUrls", - inFolder: false, - metaFile: false, - suffix: "redirectWhitelistUrl", - xmlName: "RedirectWhitelistUrl", + "directoryName": "analyticSnapshots", + "inFolder": false, + "metaFile": false, + "suffix": "snapshot", + "xmlName": "AnalyticSnapshot" }, { - childXmlNames: "MatchingRule", - directoryName: "matchingRules", - inFolder: false, - metaFile: false, - suffix: "matchingRule", - xmlName: "MatchingRules", + "directoryName": "feedFilters", + "inFolder": false, + "metaFile": false, + "suffix": "feedFilter", + "xmlName": "CustomFeedFilter" }, { - directoryName: "duplicateRules", - inFolder: false, - metaFile: false, - suffix: "duplicateRule", - xmlName: "DuplicateRule", + "directoryName": "layouts", + "inFolder": false, + "metaFile": false, + "suffix": "layout", + "xmlName": "Layout" }, { - directoryName: "cleanDataServices", - inFolder: false, - metaFile: false, - suffix: "cleanDataService", - xmlName: "CleanDataService", + "directoryName": "documents", + "inFolder": true, + "metaFile": true, + "suffix": "document", + "xmlName": "Document" }, { - directoryName: "skills", - inFolder: false, - metaFile: false, - suffix: "skill", - xmlName: "Skill", + "directoryName": "weblinks", + "inFolder": false, + "metaFile": false, + "suffix": "weblink", + "xmlName": "CustomPageWebLink" }, { - directoryName: "serviceChannels", - inFolder: false, - metaFile: false, - suffix: "serviceChannel", - xmlName: "ServiceChannel", + "directoryName": "letterhead", + "inFolder": false, + "metaFile": false, + "suffix": "letter", + "xmlName": "Letterhead" }, { - directoryName: "queueRoutingConfigs", - inFolder: false, - metaFile: false, - suffix: "queueRoutingConfig", - xmlName: "QueueRoutingConfig", + "directoryName": "email", + "inFolder": true, + "metaFile": true, + "suffix": "email", + "xmlName": "EmailTemplate" }, { - directoryName: "servicePresenceStatuses", - inFolder: false, - metaFile: false, - suffix: "servicePresenceStatus", - xmlName: "ServicePresenceStatus", + "directoryName": "quickActions", + "inFolder": false, + "metaFile": false, + "suffix": "quickAction", + "xmlName": "QuickAction" }, { - directoryName: "presenceDeclineReasons", - inFolder: false, - metaFile: false, - suffix: "presenceDeclineReason", - xmlName: "PresenceDeclineReason", + "directoryName": "flexipages", + "inFolder": false, + "metaFile": false, + "suffix": "flexipage", + "xmlName": "FlexiPage" }, { - directoryName: "presenceUserConfigs", - inFolder: false, - metaFile: false, - suffix: "presenceUserConfig", - xmlName: "PresenceUserConfig", + "directoryName": "tabs", + "inFolder": false, + "metaFile": false, + "suffix": "tab", + "xmlName": "CustomTab" }, { - directoryName: "workSkillRoutings", - inFolder: false, - metaFile: false, - suffix: "workSkillRouting", - xmlName: "WorkSkillRouting", + "directoryName": "customApplicationComponents", + "inFolder": false, + "metaFile": false, + "suffix": "customApplicationComponent", + "xmlName": "CustomApplicationComponent" }, { - directoryName: "authproviders", - inFolder: false, - metaFile: false, - suffix: "authprovider", - xmlName: "AuthProvider", + "directoryName": "applications", + "inFolder": false, + "metaFile": false, + "suffix": "app", + "xmlName": "CustomApplication" }, { - directoryName: "eclair", - inFolder: false, - metaFile: true, - suffix: "geodata", - xmlName: "EclairGeoData", + "directoryName": "portals", + "inFolder": false, + "metaFile": false, + "suffix": "portal", + "xmlName": "Portal" }, { - directoryName: "channelLayouts", - inFolder: false, - metaFile: false, - suffix: "channelLayout", - xmlName: "ChannelLayout", + "directoryName": "customMetadata", + "inFolder": false, + "metaFile": false, + "suffix": "md", + "xmlName": "CustomMetadata" }, { - directoryName: "contentassets", - inFolder: false, - metaFile: true, - suffix: "asset", - xmlName: "ContentAsset", + "directoryName": "flows", + "inFolder": false, + "metaFile": false, + "suffix": "flow", + "xmlName": "Flow" }, { - directoryName: "sites", - inFolder: false, - metaFile: false, - suffix: "site", - xmlName: "CustomSite", + "directoryName": "flowtests", + "inFolder": false, + "metaFile": false, + "suffix": "flowtest", + "xmlName": "FlowTest" }, { - childXmlNames: ["SharingOwnerRule", "SharingCriteriaRule", "SharingGuestRule"], - directoryName: "sharingRules", - inFolder: false, - metaFile: false, - suffix: "sharingRules", - xmlName: "SharingRules", + "directoryName": "flowDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "flowDefinition", + "xmlName": "FlowDefinition" }, { - directoryName: "sharingSets", - inFolder: false, - metaFile: false, - suffix: "sharingSet", - xmlName: "SharingSet", + "directoryName": "postTemplates", + "inFolder": false, + "metaFile": false, + "suffix": "postTemplate", + "xmlName": "PostTemplate" }, { - directoryName: "iframeWhiteListUrlSettings", - inFolder: false, - metaFile: false, - suffix: "iframeWhiteListUrlSettings", - xmlName: "IframeWhiteListUrlSettings", + "directoryName": "approvalProcesses", + "inFolder": false, + "metaFile": false, + "suffix": "approvalProcess", + "xmlName": "ApprovalProcess" }, { - directoryName: "communities", - inFolder: false, - metaFile: false, - suffix: "community", - xmlName: "Community", + "directoryName": "homePageComponents", + "inFolder": false, + "metaFile": false, + "suffix": "homePageComponent", + "xmlName": "HomePageComponent" }, { - directoryName: "ChatterExtensions", - inFolder: false, - metaFile: false, - suffix: "ChatterExtension", - xmlName: "ChatterExtension", + "directoryName": "homePageLayouts", + "inFolder": false, + "metaFile": false, + "suffix": "homePageLayout", + "xmlName": "HomePageLayout" }, { - directoryName: "platformEventChannels", - inFolder: false, - metaFile: false, - suffix: "platformEventChannel", - xmlName: "PlatformEventChannel", + "directoryName": "classes", + "inFolder": false, + "metaFile": true, + "suffix": "cls", + "xmlName": "ApexClass" }, { - directoryName: "platformEventChannelMembers", - inFolder: false, - metaFile: false, - suffix: "platformEventChannelMember", - xmlName: "PlatformEventChannelMember", + "directoryName": "triggers", + "inFolder": false, + "metaFile": true, + "suffix": "trigger", + "xmlName": "ApexTrigger" }, { - directoryName: "PlatformEventSubscriberConfigs", - inFolder: false, - metaFile: false, - suffix: "platformEventSubscriberConfig", - xmlName: "PlatformEventSubscriberConfig", + "directoryName": "testSuites", + "inFolder": false, + "metaFile": false, + "suffix": "testSuite", + "xmlName": "ApexTestSuite" }, { - directoryName: "callCenters", - inFolder: false, - metaFile: false, - suffix: "callCenter", - xmlName: "CallCenter", + "directoryName": "permissionsets", + "inFolder": false, + "metaFile": false, + "suffix": "permissionset", + "xmlName": "PermissionSet" }, { - directoryName: "milestoneTypes", - inFolder: false, - metaFile: false, - suffix: "milestoneType", - xmlName: "MilestoneType", + "directoryName": "mutingpermissionsets", + "inFolder": false, + "metaFile": false, + "suffix": "mutingpermissionset", + "xmlName": "MutingPermissionSet" }, { - directoryName: "entitlementProcesses", - inFolder: false, - metaFile: false, - suffix: "entitlementProcess", - xmlName: "EntitlementProcess", + "directoryName": "permissionsetgroups", + "inFolder": false, + "metaFile": false, + "suffix": "permissionsetgroup", + "xmlName": "PermissionSetGroup" }, { - directoryName: "entitlementTemplates", - inFolder: false, - metaFile: false, - suffix: "entitlementTemplate", - xmlName: "EntitlementTemplate", + "directoryName": "profilePasswordPolicies", + "inFolder": false, + "metaFile": false, + "suffix": "profilePasswordPolicy", + "xmlName": "ProfilePasswordPolicy" }, { - directoryName: "timeSheetTemplates", - inFolder: false, - metaFile: false, - suffix: "timeSheetTemplate", - xmlName: "TimeSheetTemplate", + "directoryName": "profileSessionSettings", + "inFolder": false, + "metaFile": false, + "suffix": "profileSessionSetting", + "xmlName": "ProfileSessionSetting" }, { - directoryName: "appointmentSchedulingPolicies", - inFolder: false, - metaFile: false, - suffix: "policy", - xmlName: "AppointmentSchedulingPolicy", + "directoryName": "myDomainDiscoverableLogins", + "inFolder": false, + "metaFile": false, + "suffix": "myDomainDiscoverableLogin", + "xmlName": "MyDomainDiscoverableLogin" }, { - directoryName: "Canvases", - inFolder: false, - metaFile: false, - suffix: "Canvas", - xmlName: "CanvasMetadata", + "directoryName": "oauthcustomscopes", + "inFolder": false, + "metaFile": false, + "suffix": "oauthcustomscope", + "xmlName": "OauthCustomScope" }, { - directoryName: "MobileApplicationDetails", - inFolder: false, - metaFile: false, - suffix: "MobileApplicationDetail", - xmlName: "MobileApplicationDetail", + "directoryName": "datacategorygroups", + "inFolder": false, + "metaFile": false, + "suffix": "datacategorygroup", + "xmlName": "DataCategoryGroup" }, { - directoryName: "notificationtypes", - inFolder: false, - metaFile: false, - suffix: "notiftype", - xmlName: "CustomNotificationType", + "directoryName": "remoteSiteSettings", + "inFolder": false, + "metaFile": false, + "suffix": "remoteSite", + "xmlName": "RemoteSiteSetting" }, { - directoryName: "connectedApps", - inFolder: false, - metaFile: false, - suffix: "connectedApp", - xmlName: "ConnectedApp", + "directoryName": "cspTrustedSites", + "inFolder": false, + "metaFile": false, + "suffix": "cspTrustedSite", + "xmlName": "CspTrustedSite" }, { - directoryName: "appMenus", - inFolder: false, - metaFile: false, - suffix: "appMenu", - xmlName: "AppMenu", + "directoryName": "redirectWhitelistUrls", + "inFolder": false, + "metaFile": false, + "suffix": "redirectWhitelistUrl", + "xmlName": "RedirectWhitelistUrl" }, { - directoryName: "notificationTypeConfig", - inFolder: false, - metaFile: false, - suffix: "config", - xmlName: "NotificationTypeConfig", + "directoryName": "duplicateRules", + "inFolder": false, + "metaFile": false, + "suffix": "duplicateRule", + "xmlName": "DuplicateRule" }, { - directoryName: "delegateGroups", - inFolder: false, - metaFile: false, - suffix: "delegateGroup", - xmlName: "DelegateGroup", + "directoryName": "cleanDataServices", + "inFolder": false, + "metaFile": false, + "suffix": "cleanDataService", + "xmlName": "CleanDataService" }, { - directoryName: "siteDotComSites", - inFolder: false, - metaFile: true, - suffix: "site", - xmlName: "SiteDotCom", + "directoryName": "skills", + "inFolder": false, + "metaFile": false, + "suffix": "skill", + "xmlName": "Skill" }, { - directoryName: "experiences", - inFolder: false, - metaFile: true, - suffix: "site", - xmlName: "ExperienceBundle", + "directoryName": "serviceChannels", + "inFolder": false, + "metaFile": false, + "suffix": "serviceChannel", + "xmlName": "ServiceChannel" }, { - directoryName: "networks", - inFolder: false, - metaFile: false, - suffix: "network", - xmlName: "Network", + "directoryName": "queueRoutingConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "queueRoutingConfig", + "xmlName": "QueueRoutingConfig" }, { - directoryName: "networkBranding", - inFolder: false, - metaFile: true, - suffix: "networkBranding", - xmlName: "NetworkBranding", + "directoryName": "servicePresenceStatuses", + "inFolder": false, + "metaFile": false, + "suffix": "servicePresenceStatus", + "xmlName": "ServicePresenceStatus" }, { - directoryName: "brandingSets", - inFolder: false, - metaFile: false, - suffix: "brandingSet", - xmlName: "BrandingSet", + "directoryName": "presenceDeclineReasons", + "inFolder": false, + "metaFile": false, + "suffix": "presenceDeclineReason", + "xmlName": "PresenceDeclineReason" }, { - directoryName: "communityThemeDefinitions", - inFolder: false, - metaFile: false, - suffix: "communityThemeDefinition", - xmlName: "CommunityThemeDefinition", + "directoryName": "presenceUserConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "presenceUserConfig", + "xmlName": "PresenceUserConfig" }, { - directoryName: "communityTemplateDefinitions", - inFolder: false, - metaFile: false, - suffix: "communityTemplateDefinition", - xmlName: "CommunityTemplateDefinition", + "directoryName": "workSkillRoutings", + "inFolder": false, + "metaFile": false, + "suffix": "workSkillRouting", + "xmlName": "WorkSkillRouting" }, { - directoryName: "navigationMenus", - inFolder: false, - metaFile: false, - suffix: "navigationMenu", - xmlName: "NavigationMenu", + "directoryName": "authproviders", + "inFolder": false, + "metaFile": false, + "suffix": "authprovider", + "xmlName": "AuthProvider" }, { - directoryName: "audience", - inFolder: false, - metaFile: false, - suffix: "audience", - xmlName: "Audience", + "directoryName": "eclair", + "inFolder": false, + "metaFile": true, + "suffix": "geodata", + "xmlName": "EclairGeoData" }, { - directoryName: "flowCategories", - inFolder: false, - metaFile: false, - suffix: "flowCategory", - xmlName: "FlowCategory", + "directoryName": "channelLayouts", + "inFolder": false, + "metaFile": false, + "suffix": "channelLayout", + "xmlName": "ChannelLayout" }, { - directoryName: "lightningBolts", - inFolder: false, - metaFile: false, - suffix: "lightningBolt", - xmlName: "LightningBolt", + "directoryName": "contentassets", + "inFolder": false, + "metaFile": true, + "suffix": "asset", + "xmlName": "ContentAsset" }, { - directoryName: "lightningExperienceThemes", - inFolder: false, - metaFile: false, - suffix: "lightningExperienceTheme", - xmlName: "LightningExperienceTheme", + "directoryName": "sites", + "inFolder": false, + "metaFile": false, + "suffix": "site", + "xmlName": "CustomSite" }, { - directoryName: "lightningOnboardingConfigs", - inFolder: false, - metaFile: false, - suffix: "lightningOnboardingConfig", - xmlName: "LightningOnboardingConfig", + "childXmlNames": [ + "SharingOwnerRule", + "SharingCriteriaRule", + "SharingGuestRule" + ], + "directoryName": "sharingRules", + "inFolder": false, + "metaFile": false, + "suffix": "sharingRules", + "xmlName": "SharingRules" }, { - directoryName: "customHelpMenuSections", - inFolder: false, - metaFile: false, - suffix: "customHelpMenuSection", - xmlName: "CustomHelpMenuSection", + "directoryName": "sharingSets", + "inFolder": false, + "metaFile": false, + "suffix": "sharingSet", + "xmlName": "SharingSet" }, { - directoryName: "prompts", - inFolder: false, - metaFile: false, - suffix: "prompt", - xmlName: "Prompt", + "directoryName": "iframeWhiteListUrlSettings", + "inFolder": false, + "metaFile": false, + "suffix": "iframeWhiteListUrlSettings", + "xmlName": "IframeWhiteListUrlSettings" }, { - childXmlNames: "ManagedTopic", - directoryName: "managedTopics", - inFolder: false, - metaFile: false, - suffix: "managedTopics", - xmlName: "ManagedTopics", + "directoryName": "communities", + "inFolder": false, + "metaFile": false, + "suffix": "community", + "xmlName": "Community" }, { - directoryName: "moderation", - inFolder: false, - metaFile: false, - suffix: "keywords", - xmlName: "KeywordList", + "directoryName": "ChatterExtensions", + "inFolder": false, + "metaFile": false, + "suffix": "ChatterExtension", + "xmlName": "ChatterExtension" }, { - directoryName: "userCriteria", - inFolder: false, - metaFile: false, - suffix: "userCriteria", - xmlName: "UserCriteria", + "directoryName": "platformEventChannels", + "inFolder": false, + "metaFile": false, + "suffix": "platformEventChannel", + "xmlName": "PlatformEventChannel" }, { - directoryName: "moderation", - inFolder: false, - metaFile: false, - suffix: "rule", - xmlName: "ModerationRule", + "directoryName": "platformEventChannelMembers", + "inFolder": false, + "metaFile": false, + "suffix": "platformEventChannelMember", + "xmlName": "PlatformEventChannelMember" }, { - directoryName: "cmsConnectSource", - inFolder: false, - metaFile: false, - suffix: "cmsConnectSource", - xmlName: "CMSConnectSource", + "directoryName": "PlatformEventSubscriberConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "platformEventSubscriberConfig", + "xmlName": "PlatformEventSubscriberConfig" }, { - directoryName: "managedContentTypes", - inFolder: false, - metaFile: false, - suffix: "managedContentType", - xmlName: "ManagedContentType", + "directoryName": "callCenters", + "inFolder": false, + "metaFile": false, + "suffix": "callCenter", + "xmlName": "CallCenter" }, { - directoryName: "territory2Types", - inFolder: false, - metaFile: false, - suffix: "territory2Type", - xmlName: "Territory2Type", + "directoryName": "milestoneTypes", + "inFolder": false, + "metaFile": false, + "suffix": "milestoneType", + "xmlName": "MilestoneType" }, { - childXmlNames: ["Territory2Rule", "Territory2"], - directoryName: "territory2Models", - inFolder: false, - metaFile: false, - suffix: "territory2Model", - xmlName: "Territory2Model", + "directoryName": "entitlementProcesses", + "inFolder": false, + "metaFile": false, + "suffix": "entitlementProcess", + "xmlName": "EntitlementProcess" }, { - directoryName: "rules", - inFolder: false, - metaFile: false, - suffix: "territory2Rule", - xmlName: "Territory2Rule", + "directoryName": "entitlementTemplates", + "inFolder": false, + "metaFile": false, + "suffix": "entitlementTemplate", + "xmlName": "EntitlementTemplate" }, { - directoryName: "territories", - inFolder: false, - metaFile: false, - suffix: "territory2", - xmlName: "Territory2", + "directoryName": "timeSheetTemplates", + "inFolder": false, + "metaFile": false, + "suffix": "timeSheetTemplate", + "xmlName": "TimeSheetTemplate" }, { - directoryName: "campaignInfluenceModels", - inFolder: false, - metaFile: false, - suffix: "campaignInfluenceModel", - xmlName: "CampaignInfluenceModel", + "directoryName": "appointmentSchedulingPolicies", + "inFolder": false, + "metaFile": false, + "suffix": "policy", + "xmlName": "AppointmentSchedulingPolicy" }, { - directoryName: "samlssoconfigs", - inFolder: false, - metaFile: false, - suffix: "samlssoconfig", - xmlName: "SamlSsoConfig", + "directoryName": "Canvases", + "inFolder": false, + "metaFile": false, + "suffix": "Canvas", + "xmlName": "CanvasMetadata" }, { - directoryName: "corsWhitelistOrigins", - inFolder: false, - metaFile: false, - suffix: "corsWhitelistOrigin", - xmlName: "CorsWhitelistOrigin", + "directoryName": "MobileApplicationDetails", + "inFolder": false, + "metaFile": false, + "suffix": "MobileApplicationDetail", + "xmlName": "MobileApplicationDetail" }, { - directoryName: "actionLinkGroupTemplates", - inFolder: false, - metaFile: false, - suffix: "actionLinkGroupTemplate", - xmlName: "ActionLinkGroupTemplate", + "directoryName": "notificationtypes", + "inFolder": false, + "metaFile": false, + "suffix": "notiftype", + "xmlName": "CustomNotificationType" }, { - directoryName: "transactionSecurityPolicies", - inFolder: false, - metaFile: false, - suffix: "transactionSecurityPolicy", - xmlName: "TransactionSecurityPolicy", + "directoryName": "connectedApps", + "inFolder": false, + "metaFile": false, + "suffix": "connectedApp", + "xmlName": "ConnectedApp" }, { - directoryName: "liveChatDeployments", - inFolder: false, - metaFile: false, - suffix: "liveChatDeployment", - xmlName: "LiveChatDeployment", + "directoryName": "appMenus", + "inFolder": false, + "metaFile": false, + "suffix": "appMenu", + "xmlName": "AppMenu" }, { - directoryName: "liveChatButtons", - inFolder: false, - metaFile: false, - suffix: "liveChatButton", - xmlName: "LiveChatButton", + "directoryName": "notificationTypeConfig", + "inFolder": false, + "metaFile": false, + "suffix": "config", + "xmlName": "NotificationTypeConfig" }, { - directoryName: "liveChatAgentConfigs", - inFolder: false, - metaFile: false, - suffix: "liveChatAgentConfig", - xmlName: "LiveChatAgentConfig", + "directoryName": "delegateGroups", + "inFolder": false, + "metaFile": false, + "suffix": "delegateGroup", + "xmlName": "DelegateGroup" }, { - directoryName: "synonymDictionaries", - inFolder: false, - metaFile: false, - suffix: "synonymDictionary", - xmlName: "SynonymDictionary", + "directoryName": "siteDotComSites", + "inFolder": false, + "metaFile": true, + "suffix": "site", + "xmlName": "SiteDotCom" }, { - directoryName: "pathAssistants", - inFolder: false, - metaFile: false, - suffix: "pathAssistant", - xmlName: "PathAssistant", + "directoryName": "experiences", + "inFolder": false, + "metaFile": true, + "suffix": "site", + "xmlName": "ExperienceBundle" }, { - directoryName: "animationRules", - inFolder: false, - metaFile: false, - suffix: "animationRule", - xmlName: "AnimationRule", + "directoryName": "digitalExperienceConfigs", + "inFolder": false, + "metaFile": true, + "suffix": "digitalExperienceConfig", + "xmlName": "DigitalExperienceConfig" }, { - directoryName: "LeadConvertSettings", - inFolder: false, - metaFile: false, - suffix: "LeadConvertSetting", - xmlName: "LeadConvertSettings", + "directoryName": "digitalExperiences", + "inFolder": false, + "metaFile": true, + "suffix": "digitalExperience", + "xmlName": "DigitalExperienceBundle" }, { - directoryName: "liveChatSensitiveDataRule", - inFolder: false, - metaFile: false, - suffix: "liveChatSensitiveDataRule", - xmlName: "LiveChatSensitiveDataRule", + "directoryName": "networks", + "inFolder": false, + "metaFile": false, + "suffix": "network", + "xmlName": "Network" }, { - directoryName: "cachePartitions", - inFolder: false, - metaFile: false, - suffix: "cachePartition", - xmlName: "PlatformCachePartition", + "directoryName": "networkBranding", + "inFolder": false, + "metaFile": true, + "suffix": "networkBranding", + "xmlName": "NetworkBranding" }, { - directoryName: "topicsForObjects", - inFolder: false, - metaFile: false, - suffix: "topicsForObjects", - xmlName: "TopicsForObjects", + "directoryName": "brandingSets", + "inFolder": false, + "metaFile": false, + "suffix": "brandingSet", + "xmlName": "BrandingSet" }, { - directoryName: "recommendationStrategies", - inFolder: false, - metaFile: false, - suffix: "recommendationStrategy", - xmlName: "RecommendationStrategy", + "directoryName": "communityThemeDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "communityThemeDefinition", + "xmlName": "CommunityThemeDefinition" }, { - directoryName: "emailservices", - inFolder: false, - metaFile: false, - suffix: "xml", - xmlName: "EmailServicesFunction", + "directoryName": "communityTemplateDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "communityTemplateDefinition", + "xmlName": "CommunityTemplateDefinition" }, { - directoryName: "recordActionDeployments", - inFolder: false, - metaFile: false, - suffix: "deployment", - xmlName: "RecordActionDeployment", + "directoryName": "navigationMenus", + "inFolder": false, + "metaFile": false, + "suffix": "navigationMenu", + "xmlName": "NavigationMenu" }, { - directoryName: "EmbeddedServiceConfig", - inFolder: false, - metaFile: false, - suffix: "EmbeddedServiceConfig", - xmlName: "EmbeddedServiceConfig", + "directoryName": "audience", + "inFolder": false, + "metaFile": false, + "suffix": "audience", + "xmlName": "Audience" }, { - directoryName: "EmbeddedServiceLiveAgent", - inFolder: false, - metaFile: false, - suffix: "EmbeddedServiceLiveAgent", - xmlName: "EmbeddedServiceLiveAgent", + "directoryName": "flowCategories", + "inFolder": false, + "metaFile": false, + "suffix": "flowCategory", + "xmlName": "FlowCategory" }, { - directoryName: "EmbeddedServiceBranding", - inFolder: false, - metaFile: false, - suffix: "EmbeddedServiceBranding", - xmlName: "EmbeddedServiceBranding", + "directoryName": "lightningBolts", + "inFolder": false, + "metaFile": false, + "suffix": "lightningBolt", + "xmlName": "LightningBolt" }, { - directoryName: "EmbeddedServiceFlowConfig", - inFolder: false, - metaFile: false, - suffix: "EmbeddedServiceFlowConfig", - xmlName: "EmbeddedServiceFlowConfig", + "directoryName": "lightningExperienceThemes", + "inFolder": false, + "metaFile": false, + "suffix": "lightningExperienceTheme", + "xmlName": "LightningExperienceTheme" }, { - directoryName: "EmbeddedServiceMenuSettings", - inFolder: false, - metaFile: false, - suffix: "EmbeddedServiceMenuSettings", - xmlName: "EmbeddedServiceMenuSettings", + "directoryName": "lightningOnboardingConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "lightningOnboardingConfig", + "xmlName": "LightningOnboardingConfig" }, { - directoryName: "settings", - inFolder: false, - metaFile: false, - suffix: "settings", - xmlName: "Settings", + "directoryName": "customHelpMenuSections", + "inFolder": false, + "metaFile": false, + "suffix": "customHelpMenuSection", + "xmlName": "CustomHelpMenuSection" }, { - directoryName: "mlDomains", - inFolder: false, - metaFile: false, - suffix: "mlDomain", - xmlName: "MlDomain", + "directoryName": "prompts", + "inFolder": false, + "metaFile": false, + "suffix": "prompt", + "xmlName": "Prompt" }, { - directoryName: "discovery", - inFolder: false, - metaFile: true, - content: [ + "childXmlNames": "ManagedTopic", + "directoryName": "managedTopics", + "inFolder": false, + "metaFile": false, + "suffix": "managedTopics", + "xmlName": "ManagedTopics" + }, + { + "directoryName": "userCriteria", + "inFolder": false, + "metaFile": false, + "suffix": "userCriteria", + "xmlName": "UserCriteria" + }, + { + "directoryName": "moderation", + "inFolder": false, + "metaFile": false, + "xmlName": "VirtualModeration", + "content": [ { - suffix: "model", - xmlName: "DiscoveryAIModel", + "suffix": "keywords", + "xmlName": "KeywordList" }, { - suffix: "goal", - xmlName: "DiscoveryGoal", - }, + "suffix": "rule", + "xmlName": "ModerationRule" + } + ] + }, + { + "directoryName": "cmsConnectSource", + "inFolder": false, + "metaFile": false, + "suffix": "cmsConnectSource", + "xmlName": "CMSConnectSource" + }, + { + "directoryName": "managedContentTypes", + "inFolder": false, + "metaFile": false, + "suffix": "managedContentType", + "xmlName": "ManagedContentType" + }, + { + "directoryName": "territory2Types", + "inFolder": false, + "metaFile": false, + "suffix": "territory2Type", + "xmlName": "Territory2Type" + }, + { + "childXmlNames": [ + "Territory2Rule", + "Territory2" ], + "directoryName": "territory2Models", + "inFolder": false, + "metaFile": false, + "suffix": "territory2Model", + "xmlName": "Territory2Model" + }, + { + "directoryName": "rules", + "inFolder": false, + "metaFile": false, + "suffix": "territory2Rule", + "xmlName": "Territory2Rule" + }, + { + "directoryName": "territories", + "inFolder": false, + "metaFile": false, + "suffix": "territory2", + "xmlName": "Territory2" + }, + { + "directoryName": "campaignInfluenceModels", + "inFolder": false, + "metaFile": false, + "suffix": "campaignInfluenceModel", + "xmlName": "CampaignInfluenceModel" + }, + { + "directoryName": "samlssoconfigs", + "inFolder": false, + "metaFile": false, + "suffix": "samlssoconfig", + "xmlName": "SamlSsoConfig" + }, + { + "directoryName": "corsWhitelistOrigins", + "inFolder": false, + "metaFile": false, + "suffix": "corsWhitelistOrigin", + "xmlName": "CorsWhitelistOrigin" + }, + { + "directoryName": "actionLinkGroupTemplates", + "inFolder": false, + "metaFile": false, + "suffix": "actionLinkGroupTemplate", + "xmlName": "ActionLinkGroupTemplate" + }, + { + "directoryName": "transactionSecurityPolicies", + "inFolder": false, + "metaFile": false, + "suffix": "transactionSecurityPolicy", + "xmlName": "TransactionSecurityPolicy" + }, + { + "directoryName": "liveChatDeployments", + "inFolder": false, + "metaFile": false, + "suffix": "liveChatDeployment", + "xmlName": "LiveChatDeployment" + }, + { + "directoryName": "liveChatButtons", + "inFolder": false, + "metaFile": false, + "suffix": "liveChatButton", + "xmlName": "LiveChatButton" + }, + { + "directoryName": "liveChatAgentConfigs", + "inFolder": false, + "metaFile": false, + "suffix": "liveChatAgentConfig", + "xmlName": "LiveChatAgentConfig" + }, + { + "directoryName": "synonymDictionaries", + "inFolder": false, + "metaFile": false, + "suffix": "synonymDictionary", + "xmlName": "SynonymDictionary" + }, + { + "directoryName": "pathAssistants", + "inFolder": false, + "metaFile": false, + "suffix": "pathAssistant", + "xmlName": "PathAssistant" + }, + { + "directoryName": "animationRules", + "inFolder": false, + "metaFile": false, + "suffix": "animationRule", + "xmlName": "AnimationRule" + }, + { + "directoryName": "LeadConvertSettings", + "inFolder": false, + "metaFile": false, + "suffix": "LeadConvertSetting", + "xmlName": "LeadConvertSettings" + }, + { + "directoryName": "liveChatSensitiveDataRule", + "inFolder": false, + "metaFile": false, + "suffix": "liveChatSensitiveDataRule", + "xmlName": "LiveChatSensitiveDataRule" + }, + { + "directoryName": "cachePartitions", + "inFolder": false, + "metaFile": false, + "suffix": "cachePartition", + "xmlName": "PlatformCachePartition" + }, + { + "directoryName": "topicsForObjects", + "inFolder": false, + "metaFile": false, + "suffix": "topicsForObjects", + "xmlName": "TopicsForObjects" + }, + { + "directoryName": "recommendationStrategies", + "inFolder": false, + "metaFile": false, + "suffix": "recommendationStrategy", + "xmlName": "RecommendationStrategy" + }, + { + "directoryName": "emailservices", + "inFolder": false, + "metaFile": false, + "suffix": "xml", + "xmlName": "EmailServicesFunction" + }, + { + "directoryName": "recordActionDeployments", + "inFolder": false, + "metaFile": false, + "suffix": "deployment", + "xmlName": "RecordActionDeployment" + }, + { + "directoryName": "restrictionRules", + "inFolder": false, + "metaFile": false, + "suffix": "rule", + "xmlName": "RestrictionRule" + }, + { + "directoryName": "EmbeddedServiceConfig", + "inFolder": false, + "metaFile": false, + "suffix": "EmbeddedServiceConfig", + "xmlName": "EmbeddedServiceConfig" + }, + { + "directoryName": "EmbeddedServiceLiveAgent", + "inFolder": false, + "metaFile": false, + "suffix": "EmbeddedServiceLiveAgent", + "xmlName": "EmbeddedServiceLiveAgent" }, { - directoryName: "wave", - inFolder: false, - metaFile: true, - content: [ + "directoryName": "EmbeddedServiceBranding", + "inFolder": false, + "metaFile": false, + "suffix": "EmbeddedServiceBranding", + "xmlName": "EmbeddedServiceBranding" + }, + { + "directoryName": "EmbeddedServiceFlowConfig", + "inFolder": false, + "metaFile": false, + "suffix": "EmbeddedServiceFlowConfig", + "xmlName": "EmbeddedServiceFlowConfig" + }, + { + "directoryName": "EmbeddedServiceMenuSettings", + "inFolder": false, + "metaFile": false, + "suffix": "EmbeddedServiceMenuSettings", + "xmlName": "EmbeddedServiceMenuSettings" + }, + { + "directoryName": "settings", + "inFolder": false, + "metaFile": false, + "suffix": "settings", + "xmlName": "Settings" + }, + { + "directoryName": "mlDomains", + "inFolder": false, + "metaFile": false, + "suffix": "mlDomain", + "xmlName": "MlDomain" + }, + { + "directoryName": "discovery", + "inFolder": false, + "metaFile": true, + "xmlName": "VirtualDiscovery", + "content": [ { - suffix: "wapp", - xmlName: "WaveApplication", + "suffix": "model", + "xmlName": "DiscoveryAIModel" }, { - suffix: "wcomp", - xmlName: "WaveComponent", + "suffix": "goal", + "xmlName": "DiscoveryGoal" + } + ] + }, + { + "directoryName": "wave", + "inFolder": false, + "metaFile": true, + "xmlName": "VirtualWave", + "content": [ + { + "suffix": "wapp", + "xmlName": "WaveApplication" }, { - suffix: "wdf", - xmlName: "WaveDataflow", + "suffix": "wcomp", + "xmlName": "WaveComponent" }, { - suffix: "wdash", - xmlName: "WaveDashboard", + "suffix": "wdf", + "xmlName": "WaveDataflow" }, { - suffix: "wds", - xmlName: "WaveDataset", + "suffix": "wdash", + "xmlName": "WaveDashboard" }, { - suffix: "wlens", - xmlName: "WaveLens", + "suffix": "wds", + "xmlName": "WaveDataset" }, { - suffix: "wdpr", - xmlName: "WaveRecipe", + "suffix": "wlens", + "xmlName": "WaveLens" }, { - suffix: "xmd", - xmlName: "WaveXmd", + "suffix": "wdpr", + "xmlName": "WaveRecipe" }, - ], + { + "suffix": "xmd", + "xmlName": "WaveXmd" + } + ] }, { - directoryName: "waveTemplates", - inFolder: true, - metaFile: false, - xmlName: "WaveTemplateBundle", + "directoryName": "waveTemplates", + "inFolder": true, + "metaFile": false, + "xmlName": "WaveTemplateBundle" }, { - directoryName: "bots", - inFolder: false, - metaFile: true, - content: [ + "directoryName": "bots", + "inFolder": false, + "metaFile": true, + "xmlName": "VirtualBot", + "content": [ { - suffix: "bot", - xmlName: "Bot", + "suffix": "bot", + "xmlName": "Bot" }, { - suffix: "botVersion", - xmlName: "BotVersion", - }, - ], + "suffix": "botVersion", + "xmlName": "BotVersion" + } + ] }, { - directoryName: "workflows.alerts", - inFolder: false, - metaFile: false, - parentXmlName: "Workflow", - xmlName: "WorkflowAlert", - xmlTag: "alerts", - }, - { - directoryName: "workflows.fieldUpdates", - inFolder: false, - metaFile: false, - parentXmlName: "Workflow", - xmlName: "WorkflowFieldUpdate", - xmlTag: "fieldUpdates", - }, - { - directoryName: "labels.labels", - inFolder: false, - metaFile: false, - parentXmlName: "CustomLabels", - xmlName: "CustomLabel", - xmlTag: "labels", - }, - { - directoryName: "workflows.outboundMessages", - inFolder: false, - metaFile: false, - parentXmlName: "Workflow", - xmlName: "WorkflowOutboundMessage", - xmlTag: "outboundMessages", - }, - { - directoryName: "workflows.rules", - inFolder: false, - metaFile: false, - parentXmlName: "Workflow", - xmlName: "WorkflowRule", - xmlTag: "rules", + "childXmlNames": [ + "MarketingAppExtActivity" + ], + "directoryName": "marketingappextensions", + "inFolder": false, + "metaFile": false, + "suffix": "marketingappextension", + "xmlName": "MarketingAppExtension" }, { - directoryName: "sharingRules.sharingCriteriaRules", - inFolder: false, - metaFile: false, - parentXmlName: "SharingRules", - xmlName: "SharingCriteriaRule", - xmlTag: "sharingCriteriaRules", + "inFolder": false, + "metaFile": false, + "parentXmlName": "MarketingAppExtension", + "xmlName": "MarketingAppExtActivity", + "xmlTag": "marketingAppExtActivities", + "key": "fullName" }, { - directoryName: "sharingRules.sharingGuestRules", - inFolder: false, - metaFile: false, - parentXmlName: "SharingRules", - xmlName: "SharingGuestRule", - xmlTag: "sharingGuestRules", + "childXmlNames": [ + "WorkflowFieldUpdate", + "WorkflowKnowledgePublish", + "WorkflowTask", + "WorkflowAlert", + "WorkflowSend", + "WorkflowOutboundMessage", + "WorkflowRule" + ], + "directoryName": "workflows", + "inFolder": false, + "metaFile": false, + "suffix": "workflow", + "xmlName": "Workflow" + }, + { + "directoryName": "alerts", + "inFolder": false, + "metaFile": false, + "suffix": "alert", + "parentXmlName": "Workflow", + "xmlName": "WorkflowAlert", + "xmlTag": "alerts", + "key": "fullName" + }, + { + "directoryName": "fieldUpdates", + "inFolder": false, + "metaFile": false, + "suffix": "fieldUpdate", + "parentXmlName": "Workflow", + "xmlName": "WorkflowFieldUpdate", + "xmlTag": "fieldUpdates", + "key": "fullName" + }, + { + "directoryName": "labels", + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomLabels", + "xmlName": "CustomLabel", + "childXmlNames": "CustomLabel", + "suffix": "labels", + "xmlTag": "labels", + "key": "fullName" + }, + { + "directoryName": "outboundMessages", + "inFolder": false, + "metaFile": false, + "suffix": "outboundMessage", + "parentXmlName": "Workflow", + "xmlName": "WorkflowOutboundMessage", + "xmlTag": "outboundMessages", + "key": "fullName" + }, + { + "directoryName": "rules", + "inFolder": false, + "metaFile": false, + "suffix": "rule", + "parentXmlName": "Workflow", + "xmlName": "WorkflowRule", + "xmlTag": "rules", + "key": "fullName" + }, + { + "directoryName": "knowledgePublishes", + "inFolder": false, + "metaFile": false, + "suffix": "knowledgePublishe", + "parentXmlName": "Workflow", + "xmlName": "WorkflowKnowledgePublish", + "xmlTag": "knowledgePublishes", + "key": "fullName" + }, + { + "directoryName": "tasks", + "inFolder": false, + "metaFile": false, + "suffix": "task", + "parentXmlName": "Workflow", + "xmlName": "WorkflowTask", + "xmlTag": "tasks", + "key": "fullName" + }, + { + "directoryName": "sharingCriteriaRules", + "inFolder": false, + "metaFile": false, + "suffix": "sharingCriteriaRule", + "parentXmlName": "SharingRules", + "xmlName": "SharingCriteriaRule", + "xmlTag": "sharingCriteriaRules", + "key": "fullName" + }, + { + "directoryName": "sharingGuestRules", + "inFolder": false, + "metaFile": false, + "suffix": "sharingGuestRule", + "parentXmlName": "SharingRules", + "xmlName": "SharingGuestRule", + "xmlTag": "sharingGuestRules", + "key": "fullName" + }, + { + "directoryName": "sharingOwnerRules", + "inFolder": false, + "metaFile": false, + "suffix": "sharingOwnerRule", + "parentXmlName": "SharingRules", + "xmlName": "SharingOwnerRule", + "xmlTag": "sharingOwnerRules", + "key": "fullName" + }, + { + "directoryName": "sharingTerritoryRules", + "inFolder": false, + "metaFile": false, + "suffix": "sharingTerritoryRule", + "parentXmlName": "SharingRules", + "xmlName": "SharingTerritoryRule", + "xmlTag": "sharingTerritoryRules", + "key": "fullName" + }, + { + "childXmlNames": "AssignmentRule", + "directoryName": "assignmentRules", + "inFolder": false, + "metaFile": false, + "suffix": "assignmentRules", + "xmlName": "AssignmentRules" + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "AssignmentRules", + "xmlName": "AssignmentRule", + "xmlTag": "assignmentRule", + "key": "fullName" + }, + { + "childXmlNames": "AutoResponseRule", + "directoryName": "autoResponseRules", + "inFolder": false, + "metaFile": false, + "suffix": "autoResponseRules", + "xmlName": "AutoResponseRules" + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "AutoResponseRules", + "xmlName": "AutoResponseRule", + "xmlTag": "autoResponseRule", + "key": "fullName" + }, + { + "childXmlNames": "EscalationRule", + "directoryName": "escalationRules", + "inFolder": false, + "metaFile": false, + "suffix": "escalationRules", + "xmlName": "EscalationRules" + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "EscalationRules", + "xmlName": "EscalationRule", + "xmlTag": "escalationRule", + "key": "fullName" + }, + { + "childXmlNames": "MatchingRule", + "directoryName": "matchingRules", + "inFolder": false, + "metaFile": false, + "suffix": "matchingRule", + "xmlName": "MatchingRules" + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "MatchingRules", + "xmlName": "MatchingRule", + "xmlTag": "matchingRules", + "key": "fullName" + }, + { + "directoryName": "globalValueSetTranslations", + "inFolder": false, + "metaFile": false, + "suffix": "globalValueSetTranslation", + "xmlName": "GlobalValueSetTranslation", + "pruneOnly": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "GlobalValueSetTranslation", + "xmlName": "ValueTranslation", + "xmlTag": "valueTranslation", + "key": "masterLabel", + "excluded": true + }, + { + "directoryName": "standardValueSetTranslations", + "inFolder": false, + "metaFile": false, + "suffix": "standardValueSetTranslation", + "xmlName": "StandardValueSetTranslation", + "pruneOnly": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "StandardValueSetTranslation", + "xmlName": "ValueTranslation", + "xmlTag": "valueTranslation", + "key": "masterLabel", + "excluded": true + }, + { + "directoryName": "profiles", + "inFolder": false, + "metaFile": false, + "suffix": "profile", + "xmlName": "Profile", + "pruneOnly": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileApplicationVisibility", + "xmlTag": "categoryGroupVisibilities", + "key": "dataCategoryGroup", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileCategoryGroupVisibility", + "xmlTag": "applicationVisibilities", + "key": "application", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileApexClassAccess", + "xmlTag": "classAccesses", + "key": "apexClass", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileCustomMetadataTypeAccess[", + "xmlTag": "customMetadataTypeAccesses", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileCustomPermissions", + "xmlTag": "customPermissions", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileCustomSettingAccesses", + "xmlTag": "customSettingAccesses", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileExternalDataSourceAccess", + "xmlTag": "externalDataSourceAccesses", + "key": "externalDataSource", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileFieldLevelSecurity", + "xmlTag": "fieldPermissions", + "key": "field", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileFlowAccess", + "xmlTag": "flowAccesses", + "key": "flow", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "LoginFlow", + "xmlTag": "loginFlows", + "key": "friendlyname", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileObjectPermissions", + "xmlTag": "objectPermissions", + "key": "object", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileApexPageAccess", + "xmlTag": "pageAccesses", + "key": "apexPage", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileActionOverride", + "xmlTag": "profileActionOverrides", + "key": "actionName", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileRecordTypeVisibility", + "xmlTag": "recordTypeVisibilities", + "key": "recordType", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileTabVisibility", + "xmlTag": "tabVisibilities", + "key": "tab", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Profile", + "xmlName": "ProfileUserPermission", + "xmlTag": "userPermissions", + "key": "name", + "excluded": true + }, + { + "directoryName": "translations", + "inFolder": false, + "metaFile": false, + "suffix": "translation", + "xmlName": "Translations", + "pruneOnly": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "BotTranslation", + "xmlTag": "bots", + "key": "fullName", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "CustomApplicationTranslation", + "xmlTag": "customApplications", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "CustomLabelTranslation", + "xmlTag": "customLabels", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "CustomPageWebLinkTranslation", + "xmlTag": "customPageWebLinks", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "CustomTabTranslation", + "xmlTag": "customTabs", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "FlowDefinitionTranslation", + "xmlTag": "flowDefinitions", + "key": "fullName", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "PipelineInspMetricConfigTranslation", + "xmlTag": "pipelineInspMetricConfigs", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "PromptTranslation", + "xmlTag": "prompts", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "GlobalQuickActionTranslation", + "xmlTag": "quickActions", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "ReportTypeTranslation", + "xmlTag": "reportTypes", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "Translations", + "xmlName": "ScontrolTranslation", + "xmlTag": "scontrols", + "key": "name", + "excluded": true + }, + { + "directoryName": "objectTranslations", + "inFolder": false, + "metaFile": false, + "suffix": "objectTranslation", + "xmlName": "CustomObjectTranslation", + "pruneOnly": true + }, + { + "directoryName": "objectTranslations", + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "suffix": "fieldTranslation", + "xmlName": "CustomObjectTranslation", + "xmlTag": "fields", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "FieldSetTranslation", + "xmlTag": "fieldSets", + "key": "name", + "excluded": true + }, + { + + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "LayoutTranslation", + "xmlTag": "layouts", + "key": "layout", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "NamedFilterTranslation", + "xmlTag": "namedFilters", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "QuickActionTranslation", + "xmlTag": "quickActions", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "RecordTypeTranslation", + "xmlTag": "recordTypes", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "SharingReasonTranslation", + "xmlTag": "sharingReasons", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "ValidationRuleTranslation", + "xmlTag": "validationRules", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "WebLinkTranslation", + "xmlTag": "webLinks", + "key": "name", + "excluded": true + }, + { + "inFolder": false, + "metaFile": false, + "parentXmlName": "CustomObjectTranslation", + "xmlName": "WorkflowTaskTranslation", + "xmlTag": "workflowTasks", + "key": "name", + "excluded": true + }, + { + "directoryName": "clauseCatgConfigurations", + "inFolder": false, + "metaFile": false, + "suffix": "clauseCatgConfiguration", + "xmlName": "ClauseCatgConfiguration" + }, + { + "directoryName": "disclosureDefinitions", + "inFolder": false, + "metaFile": false, + "suffix": "disclosureDefinition", + "xmlName": "DisclosureDefinition" + }, + { + "directoryName": "disclosureDefinitionVersions", + "inFolder": false, + "metaFile": false, + "suffix": "disclosureDefinitionVersion", + "xmlName": "DisclosureDefinitionVersion" + }, + { + "directoryName": "disclosureTypes", + "inFolder": false, + "metaFile": false, + "suffix": "disclosureType ", + "xmlName": "DisclosureType" + }, + { + "directoryName": "fuelTypes", + "inFolder": false, + "metaFile": false, + "suffix": "fuelType", + "xmlName": "FuelType" }, { - directoryName: "sharingRules.sharingOwnerRules", - inFolder: false, - metaFile: false, - parentXmlName: "SharingRules", - xmlName: "SharingOwnerRule", - xmlTag: "sharingOwnerRules", + "directoryName": "fuelTypeSustnUoms", + "inFolder": false, + "metaFile": false, + "suffix": "fuelTypeSustnUom", + "xmlName": "FuelTypeSustnUom" }, { - directoryName: "sharingRules.sharingTerritoryRules", - inFolder: false, - metaFile: false, - parentXmlName: "SharingRules", - xmlName: "SharingTerritoryRule", - xmlTag: "sharingTerritoryRules", - }, + "directoryName": "sustnUomConversions", + "inFolder": false, + "metaFile": false, + "suffix": "sustnUomConversion", + "xmlName": "SustnUomConversion" + }, { - directoryName: "workflows.tasks", - inFolder: false, - metaFile: false, - parentXmlName: "Workflow", - xmlName: "WorkflowTask", - xmlTag: "tasks", - }, + "directoryName": "sustainabilityUoms", + "inFolder": false, + "metaFile": false, + "suffix": "sustainabilityUom", + "xmlName": "SustainabilityUom" + } ]; } diff --git a/src/common/notifProvider/apiProvider.ts b/src/common/notifProvider/apiProvider.ts index b5efff492..f894f0f5f 100644 --- a/src/common/notifProvider/apiProvider.ts +++ b/src/common/notifProvider/apiProvider.ts @@ -1,24 +1,23 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import { NotifProviderRoot } from "./notifProviderRoot"; -import { getCurrentGitBranch, getGitRepoName, uxLog } from "../utils"; -import { NotifMessage, NotifSeverity, UtilsNotifs } from "."; -import { getEnvVar } from "../../config"; +import { Connection, SfError } from "@salesforce/core"; +import c from "chalk"; +import { NotifProviderRoot } from "./notifProviderRoot.js"; +import { getCurrentGitBranch, getGitRepoName, uxLog } from "../utils/index.js"; +import { NotifMessage, NotifSeverity, UtilsNotifs } from "./index.js"; +import { CONSTANTS, getEnvVar } from "../../config/index.js"; -import { getSeverityIcon, removeMarkdown } from "../utils/notifUtils"; -import { Connection } from "jsforce"; -import { GitProvider } from "../gitProvider"; +import { getSeverityIcon, removeMarkdown } from "../utils/notifUtils.js"; +import { GitProvider } from "../gitProvider/index.js"; import axios, { AxiosRequestConfig } from "axios"; const MAX_LOKI_LOG_LENGTH = Number(process.env.MAX_LOKI_LOG_LENGTH || 200000); const TRUNCATE_LOKI_ELEMENTS_LENGTH = Number(process.env.TRUNCATE_LOKI_ELEMENTS_LENGTH || 500); export class ApiProvider extends NotifProviderRoot { - protected apiUrl: string; + protected apiUrl: string | null; public payload: ApiNotifMessage; public payloadFormatted: any; - protected metricsApiUrl: string; + protected metricsApiUrl: string | null; public metricsPayload: string; public getLabel(): string { @@ -40,7 +39,7 @@ export class ApiProvider extends NotifProviderRoot { const apiPromises: Promise[] = []; // Use Promises to optimize performances with api calls this.apiUrl = getEnvVar("NOTIF_API_URL"); if (this.apiUrl == null) { - throw new SfdxError("[ApiProvider] You need to define a variable NOTIF_API_URL to use sfdx-hardis Api notifications"); + throw new SfError("[ApiProvider] You need to define a variable NOTIF_API_URL to use sfdx-hardis Api notifications"); } // Build initial payload data from notifMessage await this.buildPayload(notifMessage); @@ -70,7 +69,7 @@ export class ApiProvider extends NotifProviderRoot { let logBodyText = UtilsNotifs.prefixWithSeverityEmoji(UtilsNotifs.slackToTeamsMarkdown(notifMessage.text), notifMessage.severity); // Add text details - if (notifMessage?.attachments?.length > 0) { + if (notifMessage?.attachments?.length && notifMessage?.attachments?.length > 0) { let text = "\n\n"; for (const attachment of notifMessage.attachments) { if (attachment.text) { @@ -83,7 +82,7 @@ export class ApiProvider extends NotifProviderRoot { } // Add action blocks - if (notifMessage.buttons?.length > 0) { + if (notifMessage.buttons?.length && notifMessage.buttons?.length > 0) { logBodyText += "Links:\n\n"; for (const button of notifMessage.buttons) { // Url button @@ -95,14 +94,14 @@ export class ApiProvider extends NotifProviderRoot { } // Add sfdx-hardis ref - logBodyText += "Powered by sfdx-hardis: https://sfdx-hardis.cloudity.com"; + logBodyText += `Powered by sfdx-hardis: ${CONSTANTS.DOC_URL_ROOT}`; logBodyText = removeMarkdown(logBodyText); // Build payload - const repoName = (await getGitRepoName()).replace(".git", ""); + const repoName = (await getGitRepoName() || "").replace(".git", ""); const currentGitBranch = await getCurrentGitBranch(); const conn: Connection = globalThis.jsForceConn; - const orgIdentifier = conn.instanceUrl.replace("https://", "").replace(".my.salesforce.com", "").replace(/\./gm, "__"); + const orgIdentifier = (conn.instanceUrl) ? conn.instanceUrl.replace("https://", "").replace(".my.salesforce.com", "").replace(/\./gm, "__") : currentGitBranch || "ERROR apiProvider"; const notifKey = orgIdentifier + "!!" + notifMessage.type; this.payload = { source: "sfdx-hardis", @@ -129,7 +128,7 @@ export class ApiProvider extends NotifProviderRoot { } private async formatPayload() { - if (this.apiUrl.includes("loki/api/v1/push")) { + if ((this.apiUrl || "").includes("loki/api/v1/push")) { await this.formatPayloadLoki(); return; } @@ -152,6 +151,7 @@ export class ApiProvider extends NotifProviderRoot { newPayloadData._logElementsTruncated = true; payloadDataJson = JSON.stringify(newPayloadData); uxLog( + "log", this, c.grey( `[ApiProvider] Truncated _logElements from ${logElements.length} to ${truncatedLogElements.length} to avoid Loki entry max size reached (initial size: ${bodyBytesLen} bytes)`, @@ -160,7 +160,7 @@ export class ApiProvider extends NotifProviderRoot { } else { newPayloadData._logBodyText = (newPayloadData._logBodyText || "").slice(0, 100) + "\n ... (truncated)"; payloadDataJson = JSON.stringify(newPayloadData); - uxLog(this, c.grey(`[ApiProvider] Truncated _logBodyText to 100 to avoid Loki entry max size reached (initial size: ${bodyBytesLen} bytes)`)); + uxLog("log", this, c.grey(`[ApiProvider] Truncated _logBodyText to 100 to avoid Loki entry max size reached (initial size: ${bodyBytesLen} bytes)`)); } } this.payloadFormatted = { @@ -181,8 +181,8 @@ export class ApiProvider extends NotifProviderRoot { // Basic Auth if (getEnvVar("NOTIF_API_BASIC_AUTH_USERNAME") != null) { axiosConfig.auth = { - username: getEnvVar("NOTIF_API_BASIC_AUTH_USERNAME"), - password: getEnvVar("NOTIF_API_BASIC_AUTH_PASSWORD"), + username: getEnvVar("NOTIF_API_BASIC_AUTH_USERNAME") || "", + password: getEnvVar("NOTIF_API_BASIC_AUTH_PASSWORD") || "", }; } // Bearer token @@ -191,18 +191,18 @@ export class ApiProvider extends NotifProviderRoot { } // POST message try { - const axiosResponse = await axios.post(this.apiUrl, this.payloadFormatted, axiosConfig); + const axiosResponse = await axios.post(this.apiUrl || "", this.payloadFormatted, axiosConfig); const httpStatus = axiosResponse.status; if (httpStatus > 200 && httpStatus < 300) { - uxLog(this, c.cyan(`[ApiProvider] Posted message to API ${this.apiUrl} (${httpStatus})`)); + uxLog("action", this, c.cyan(`[ApiProvider] Posted message to API ${this.apiUrl} (${httpStatus})`)); if (getEnvVar("NOTIF_API_DEBUG") === "true") { - uxLog(this, c.cyan(JSON.stringify(this.payloadFormatted, null, 2))); + uxLog("action", this, c.cyan(JSON.stringify(this.payloadFormatted, null, 2))); } } } catch (e) { - uxLog(this, c.yellow(`[ApiProvider] Error while sending message to API ${this.apiUrl}: ${e.message}`)); - uxLog(this, c.grey("Request body: \n" + JSON.stringify(this.payloadFormatted))); - uxLog(this, c.grey("Response body: \n" + JSON.stringify(e?.response?.data || {}))); + uxLog("warning", this, c.yellow(`[ApiProvider] Error while sending message to API ${this.apiUrl}: ${(e as Error).message}`)); + uxLog("log", this, c.grey("Request body: \n" + JSON.stringify(this.payloadFormatted))); + uxLog("log", this, c.grey("Response body: \n" + JSON.stringify((e as any)?.response?.data || {}))); } } @@ -215,7 +215,7 @@ export class ApiProvider extends NotifProviderRoot { `orgIdentifier=${this.payload.orgIdentifier},` + `gitIdentifier=${this.payload.gitIdentifier}`; // Add extra fields and value - const metricsPayloadLines = []; + const metricsPayloadLines: any[] = []; for (const metricId of Object.keys(this.payload.data._metrics)) { const metricData = this.payload.data._metrics[metricId]; let metricPayloadLine = metricId + "," + metricTags + " "; @@ -223,7 +223,7 @@ export class ApiProvider extends NotifProviderRoot { metricPayloadLine += "metric=" + metricData.toFixed(2); metricsPayloadLines.push(metricPayloadLine); } else if (typeof metricData === "object") { - const metricFields = []; + const metricFields: any[] = []; if (metricData.min) { metricFields.push("min=" + metricData.min.toFixed(2)); } @@ -251,8 +251,8 @@ export class ApiProvider extends NotifProviderRoot { // Basic Auth if (getEnvVar("NOTIF_API_METRICS_BASIC_AUTH_USERNAME") != null) { axiosConfig.auth = { - username: getEnvVar("NOTIF_API_METRICS_BASIC_AUTH_USERNAME"), - password: getEnvVar("NOTIF_API_METRICS_BASIC_AUTH_PASSWORD"), + username: getEnvVar("NOTIF_API_METRICS_BASIC_AUTH_USERNAME") || "", + password: getEnvVar("NOTIF_API_METRICS_BASIC_AUTH_PASSWORD") || "", }; } // Bearer token @@ -261,18 +261,18 @@ export class ApiProvider extends NotifProviderRoot { } // POST message try { - const axiosResponse = await axios.post(this.metricsApiUrl, this.metricsPayload, axiosConfig); + const axiosResponse = await axios.post(this.metricsApiUrl || "", this.metricsPayload, axiosConfig); const httpStatus = axiosResponse.status; if (httpStatus > 200 && httpStatus < 300) { - uxLog(this, c.cyan(`[ApiMetricProvider] Posted message to API ${this.metricsApiUrl} (${httpStatus})`)); + uxLog("action", this, c.cyan(`[ApiMetricProvider] Posted message to API ${this.metricsApiUrl} (${httpStatus})`)); if (getEnvVar("NOTIF_API_DEBUG") === "true") { - uxLog(this, c.cyan(JSON.stringify(this.metricsPayload, null, 2))); + uxLog("action", this, c.cyan(JSON.stringify(this.metricsPayload, null, 2))); } } } catch (e) { - uxLog(this, c.yellow(`[ApiMetricProvider] Error while sending message to API ${this.metricsApiUrl}: ${e.message}`)); - uxLog(this, c.grey("Request body: \n" + JSON.stringify(this.metricsPayload))); - uxLog(this, c.grey("Response body: \n" + JSON.stringify(e?.response?.data || {}))); + uxLog("warning", this, c.yellow(`[ApiMetricProvider] Error while sending message to API ${this.metricsApiUrl}: ${(e as Error).message}`)); + uxLog("log", this, c.grey("Request body: \n" + JSON.stringify(this.metricsPayload))); + uxLog("log", this, c.grey("Response body: \n" + JSON.stringify((e as any)?.response?.data || {}))); } } } diff --git a/src/common/notifProvider/emailProvider.ts b/src/common/notifProvider/emailProvider.ts index acc2703ad..b4e543b94 100644 --- a/src/common/notifProvider/emailProvider.ts +++ b/src/common/notifProvider/emailProvider.ts @@ -1,13 +1,13 @@ -import { SfdxError } from "@salesforce/core"; -import * as DOMPurify from "isomorphic-dompurify"; -import * as c from "chalk"; -import { NotifProviderRoot } from "./notifProviderRoot"; -import { getCurrentGitBranch, uxLog } from "../utils"; -import { NotifMessage, UtilsNotifs } from "."; -import { getEnvVar } from "../../config"; +import { SfError } from "@salesforce/core"; +import DOMPurify from "isomorphic-dompurify"; +import c from "chalk"; +import { NotifProviderRoot } from "./notifProviderRoot.js"; +import { getCurrentGitBranch, uxLog } from "../utils/index.js"; +import { NotifMessage, UtilsNotifs } from "./index.js"; +import { CONSTANTS, getEnvVar } from "../../config/index.js"; import { marked } from "marked"; -import { EmailMessage, sendEmail } from "../utils/emailUtils"; -import { removeMarkdown } from "../utils/notifUtils"; +import { EmailMessage, sendEmail } from "../utils/emailUtils.js"; +import { removeMarkdown } from "../utils/notifUtils.js"; export class EmailProvider extends NotifProviderRoot { public getLabel(): string { @@ -18,13 +18,13 @@ export class EmailProvider extends NotifProviderRoot { public async postNotification(notifMessage: NotifMessage): Promise { const mainEmailAddress = getEnvVar("NOTIF_EMAIL_ADDRESS"); if (mainEmailAddress == null) { - throw new SfdxError("[EmailProvider] You need to define a variable NOTIF_EMAIL_ADDRESS to use sfdx-hardis Email notifications"); + throw new SfError("[EmailProvider] You need to define a variable NOTIF_EMAIL_ADDRESS to use sfdx-hardis Email notifications"); } const emailAddresses = mainEmailAddress.split(","); // Add branch custom Teams channel if defined - const customEmailChannelVariable = `NOTIF_EMAIL_ADDRESS_${(await getCurrentGitBranch()).toUpperCase()}`; + const customEmailChannelVariable = `NOTIF_EMAIL_ADDRESS_${(await getCurrentGitBranch() || "").toUpperCase()}`; if (getEnvVar(customEmailChannelVariable)) { - emailAddresses.push(...getEnvVar(customEmailChannelVariable).split(",")); + emailAddresses.push(...(getEnvVar(customEmailChannelVariable) || "").split(",")); } /* jscpd:ignore-start */ @@ -37,7 +37,7 @@ export class EmailProvider extends NotifProviderRoot { let emailBody = UtilsNotifs.prefixWithSeverityEmoji(UtilsNotifs.slackToTeamsMarkdown(notifMessage.text), notifMessage.severity); // Add text details - if (notifMessage?.attachments?.length > 0) { + if (notifMessage?.attachments?.length && notifMessage?.attachments?.length > 0) { let text = "\n\n"; for (const attachment of notifMessage.attachments) { if (attachment.text) { @@ -51,7 +51,7 @@ export class EmailProvider extends NotifProviderRoot { /* jscpd:ignore-end */ // Add action blocks - if (notifMessage.buttons?.length > 0) { + if (notifMessage.buttons?.length && notifMessage.buttons?.length > 0) { emailBody += "**Links:**\n\n"; for (const button of notifMessage.buttons) { // Url button @@ -63,7 +63,7 @@ export class EmailProvider extends NotifProviderRoot { } // Add sfdx-hardis ref - emailBody += "_Powered by [sfdx-hardis](https://sfdx-hardis.cloudity.com)_"; + emailBody += `_Powered by [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT})_`; // Send email const emailBodyHtml1 = marked.parse(emailBody); @@ -76,11 +76,11 @@ export class EmailProvider extends NotifProviderRoot { attachments: notifMessage?.attachedFiles || [], }; const emailRes = await sendEmail(emailMessage); - if (emailRes.success) { - uxLog(this, c.cyan(`[EmailProvider] Sent email to ${emailAddresses.join(",")}`)); + if (emailRes?.success) { + uxLog("action", this, c.cyan(`[EmailProvider] Sent email to ${emailAddresses.join(",")}`)); } else { - uxLog(this, c.yellow(`[EmailProvider] Error while sending email to ${emailAddresses.join(",")}`)); - uxLog(this, c.grey(JSON.stringify(emailRes.detail, null, 2))); + uxLog("warning", this, c.yellow(`[EmailProvider] Error while sending email to ${emailAddresses.join(",")}`)); + uxLog("log", this, c.grey(JSON.stringify(emailRes?.detail, null, 2))); } return; } diff --git a/src/common/notifProvider/index.ts b/src/common/notifProvider/index.ts index 59b4c88e7..69af2f2c2 100644 --- a/src/common/notifProvider/index.ts +++ b/src/common/notifProvider/index.ts @@ -1,12 +1,12 @@ -import { uxLog } from "../utils"; -import * as c from "chalk"; -import { NotifProviderRoot } from "./notifProviderRoot"; -import { SlackProvider } from "./slackProvider"; -import { UtilsNotifs as utilsNotifs } from "./utils"; -import { TeamsProvider } from "./teamsProvider"; -import { getConfig } from "../../config"; -import { EmailProvider } from "./emailProvider"; -import { ApiProvider } from "./apiProvider"; +import { isCI, uxLog } from "../utils/index.js"; +import c from "chalk"; +import { NotifProviderRoot } from "./notifProviderRoot.js"; +import { SlackProvider } from "./slackProvider.js"; +import { UtilsNotifs as utilsNotifs } from "./utils.js"; +import { TeamsProvider } from "./teamsProvider.js"; +import { CONSTANTS, getConfig } from "../../config/index.js"; +import { EmailProvider } from "./emailProvider.js"; +import { ApiProvider } from "./apiProvider.js"; export abstract class NotifProvider { static getInstances(): NotifProviderRoot[] { @@ -32,39 +32,40 @@ export abstract class NotifProvider { // Post notifications to all configured channels // This method is sync to allow the command to continue and not negatively impact performances - static postNotifications(notifMessage: NotifMessage) { - getConfig("user").then((config) => { - const notificationsDisable = - config.notificationsDisable ?? (process.env?.NOTIFICATIONS_DISABLE ? process.env.NOTIFICATIONS_DISABLE.split(",") : []); - uxLog(this, c.gray(`[NotifProvider] Handling notification of type ${notifMessage.type}...`)); - const notifProviders = this.getInstances(); - if (notifProviders.length === 0) { + static async postNotifications(notifMessage: NotifMessage) { + const config = await getConfig("user"); + const notificationsDisable = + config.notificationsDisable ?? (process.env?.NOTIFICATIONS_DISABLE ? process.env.NOTIFICATIONS_DISABLE.split(",") : []); + uxLog("error", this, c.grey(`[NotifProvider] Handling notification of type ${notifMessage.type}...`)); + const notifProviders = this.getInstances(); + if (notifProviders.length === 0 && isCI) { + uxLog( + "log", + this, + c.grey( + `[NotifProvider] No notif has been configured: ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integrations-home/#message-notifications`, + ), + ); + } + for (const notifProvider of notifProviders) { + uxLog("error", this, c.grey(`[NotifProvider] - Notif target found: ${notifProvider.getLabel()}`)); + // Skip if matching NOTIFICATIONS_DISABLE except for Api + if (notificationsDisable.includes(notifMessage.type) && notifProvider.isUserNotifProvider()) { uxLog( + "warning", this, - c.gray( - `[NotifProvider] No notif has been configured: https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integrations-home/#message-notifications`, + c.yellow( + `[NotifProvider] Skip notification of type ${notifMessage.type} according to configuration (NOTIFICATIONS_DISABLE env var or notificationsDisable .sfdx-hardis.yml property)`, ), ); } - for (const notifProvider of notifProviders) { - uxLog(this, c.gray(`[NotifProvider] - Notif target found: ${notifProvider.getLabel()}`)); - // Skip if matching NOTIFICATIONS_DISABLE except for Api - if (notificationsDisable.includes(notifMessage.type) && notifProvider.isUserNotifProvider()) { - uxLog( - this, - c.yellow( - `[NotifProvider] Skip notification of type ${notifMessage.type} according to configuration (NOTIFICATIONS_DISABLE env var or notificationsDisable .sfdx-hardis.yml property)`, - ), - ); - } - // Do not send notifs for level "log" to Users, but just to logs/metrics API - else if (notifProvider.isApplicableForNotif(notifMessage)) { - notifProvider.postNotification(notifMessage); - } else { - uxLog(this, c.gray(`[NotifProvider] - Skipped: ${notifProvider.getLabel()} as not applicable for notification severity`)); - } + // Do not send notifs for level "log" to Users, but just to logs/metrics API + else if (notifProvider.isApplicableForNotif(notifMessage)) { + await notifProvider.postNotification(notifMessage); + } else { + uxLog("error", this, c.grey(`[NotifProvider] - Skipped: ${notifProvider.getLabel()} as not applicable for notification severity`)); } - }); + } } public getLabel(): string { @@ -73,7 +74,7 @@ export abstract class NotifProvider { // eslint-disable-next-line @typescript-eslint/no-unused-vars public async postNotification(notifMessage: string, buttons: any[] = [], attachments: any[] = []): Promise { - uxLog(this, c.grey("method postNotification is not implemented on " + this.getLabel())); + uxLog("log", this, c.grey("method postNotification is not implemented on " + this.getLabel())); } } @@ -82,21 +83,25 @@ export type NotifSeverity = "critical" | "error" | "warning" | "info" | "success export interface NotifMessage { text: string; type: - | "ACTIVE_USERS" - | "AUDIT_TRAIL" - | "APEX_TESTS" - | "BACKUP" - | "DEPLOYMENT" - | "LEGACY_API" - | "LICENSES" - | "LINT_ACCESS" - | "UNUSED_METADATAS" - | "METADATA_STATUS" - | "MISSING_ATTRIBUTES" - | "UNUSED_LICENSES" - | "UNUSED_USERS" - | "ORG_INFO" - | "ORG_LIMITS"; + | "ACTIVE_USERS" + | "AUDIT_TRAIL" + | "APEX_TESTS" + | "BACKUP" + | "DEPLOYMENT" + | "LEGACY_API" + | "LICENSES" + | "LINT_ACCESS" + | "UNUSED_METADATAS" + | "METADATA_STATUS" + | "MISSING_ATTRIBUTES" + | "UNUSED_LICENSES" + | "UNUSED_USERS" + | "UNUSED_APEX_CLASSES" + | "CONNECTED_APPS" + | "UNSECURED_CONNECTED_APPS" + | "ORG_INFO" + | "ORG_LIMITS" + | "RELEASE_UPDATES"; buttons?: NotifButton[]; attachments?: any[]; severity: NotifSeverity; diff --git a/src/common/notifProvider/notifProviderRoot.ts b/src/common/notifProvider/notifProviderRoot.ts index 44cc84451..bc537c07d 100644 --- a/src/common/notifProvider/notifProviderRoot.ts +++ b/src/common/notifProvider/notifProviderRoot.ts @@ -1,12 +1,12 @@ -import { SfdxError } from "@salesforce/core"; -import { uxLog } from "../utils"; -import { NotifMessage } from "."; +import { SfError } from "@salesforce/core"; +import { uxLog } from "../utils/index.js"; +import { NotifMessage } from "./index.js"; export abstract class NotifProviderRoot { protected token: string; public getLabel(): string { - throw new SfdxError("getLabel should be implemented on this call"); + throw new SfError("getLabel should be implemented on this call"); } // By default, we don't send logs to other notif targets than API to avoid noise @@ -21,7 +21,7 @@ export abstract class NotifProviderRoot { // eslint-disable-next-line @typescript-eslint/no-unused-vars public async postNotification(notifMessage: NotifMessage): Promise { - uxLog(this, `Method postNotification is not implemented yet on ${this.getLabel()}`); + uxLog("other", this, `Method postNotification is not implemented yet on ${this.getLabel()}`); return; } } diff --git a/src/common/notifProvider/slackProvider.ts b/src/common/notifProvider/slackProvider.ts index 0fb040ab1..0701ee5f2 100644 --- a/src/common/notifProvider/slackProvider.ts +++ b/src/common/notifProvider/slackProvider.ts @@ -1,17 +1,17 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import { NotifProviderRoot } from "./notifProviderRoot"; +import { SfError } from "@salesforce/core"; +import c from "chalk"; +import { NotifProviderRoot } from "./notifProviderRoot.js"; import { ActionsBlock, Block, Button, SectionBlock, WebClient } from "@slack/web-api"; -import { getCurrentGitBranch, uxLog } from "../utils"; -import { NotifMessage, UtilsNotifs } from "."; -import { getEnvVar } from "../../config"; +import { getCurrentGitBranch, uxLog } from "../utils/index.js"; +import { NotifMessage, UtilsNotifs } from "./index.js"; +import { getEnvVar } from "../../config/index.js"; export class SlackProvider extends NotifProviderRoot { private slackClient: InstanceType; constructor() { super(); - this.token = process.env.SLACK_TOKEN; + this.token = process.env.SLACK_TOKEN || ""; this.slackClient = new WebClient(this.token); } @@ -23,15 +23,15 @@ export class SlackProvider extends NotifProviderRoot { public async postNotification(notifMessage: NotifMessage): Promise { const mainNotifsChannelId = getEnvVar("SLACK_CHANNEL_ID"); if (mainNotifsChannelId == null) { - throw new SfdxError( + throw new SfError( "[SlackProvider] You need to define a variable SLACK_CHANNEL_ID to use sfdx-hardis Slack Integration. Otherwise, remove variable SLACK_TOKEN", ); } const slackChannelsIds = mainNotifsChannelId.split(","); // Add branch custom slack channel if defined - const customSlackChannelVariable = `SLACK_CHANNEL_ID_${(await getCurrentGitBranch()).toUpperCase()}`; + const customSlackChannelVariable = `SLACK_CHANNEL_ID_${(await getCurrentGitBranch() || "").toUpperCase()}`; if (getEnvVar(customSlackChannelVariable)) { - slackChannelsIds.push(...getEnvVar(customSlackChannelVariable).split(",")); + slackChannelsIds.push(...(getEnvVar(customSlackChannelVariable) || "").split(",")); } // Handle specific channel for Warnings and errors const warningsErrorsChannelId = getEnvVar("SLACK_CHANNEL_ID_ERRORS_WARNINGS"); @@ -58,8 +58,8 @@ export class SlackProvider extends NotifProviderRoot { } */ blocks.push(block); // Add action blocks - if (notifMessage.buttons?.length > 0) { - const actionElements = []; + if (notifMessage.buttons?.length && notifMessage.buttons?.length > 0) { + const actionElements: any[] = []; for (const button of notifMessage.buttons) { // Url button if (button.url) { @@ -93,10 +93,10 @@ export class SlackProvider extends NotifProviderRoot { }; try { const resp = await this.slackClient.chat.postMessage(slackMessage); - uxLog(this, c.cyan(`[SlackProvider] Sent slack notification to channel ${mainNotifsChannelId}: ${resp.ok}`)); + uxLog("action", this, c.cyan(`[SlackProvider] Sent slack notification to channel ${mainNotifsChannelId}: ${resp.ok}`)); } catch (error) { - uxLog(this, c.gray("[SlackProvider] Failed slack message content: \n" + JSON.stringify(slackMessage, null, 2))); - uxLog(this, c.red(`[SlackProvider] Error while sending message to channel ${mainNotifsChannelId}\n${error.message}`)); + uxLog("error", this, c.grey("[SlackProvider] Failed slack message content: \n" + JSON.stringify(slackMessage, null, 2))); + uxLog("error", this, c.red(`[SlackProvider] Error while sending message to channel ${mainNotifsChannelId}\n${(error as any).message}`)); } } return; diff --git a/src/common/notifProvider/teamsProvider.ts b/src/common/notifProvider/teamsProvider.ts index e73925fc6..be760cfa3 100644 --- a/src/common/notifProvider/teamsProvider.ts +++ b/src/common/notifProvider/teamsProvider.ts @@ -1,7 +1,8 @@ -import * as c from "chalk"; -import { NotifProviderRoot } from "./notifProviderRoot"; -import { uxLog } from "../utils"; -import { NotifMessage } from "."; +import c from "chalk"; +import { NotifProviderRoot } from "./notifProviderRoot.js"; +import { uxLog } from "../utils/index.js"; +import { NotifMessage } from "./index.js"; +import { CONSTANTS } from "../../config/index.js"; export class TeamsProvider extends NotifProviderRoot { public getLabel(): string { @@ -11,9 +12,10 @@ export class TeamsProvider extends NotifProviderRoot { // eslint-disable-next-line @typescript-eslint/no-unused-vars public async postNotification(notifMessage: NotifMessage): Promise { uxLog( + "warning", this, c.bold(c.yellow(`[TeamsProvider] MsTeams Web Hooks will be soon deprecated. Instead, please use EmailProvider with Ms Teams Channel e-mail`)), ); - uxLog(this, c.bold(c.yellow(`[TeamsProvider] User Guide: https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-email/`))); + uxLog("other", this, c.bold(c.yellow(`[TeamsProvider] User Guide: ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-integration-email/`))); } } diff --git a/src/common/notifProvider/utils.ts b/src/common/notifProvider/utils.ts index c300fcb83..6757c9f31 100644 --- a/src/common/notifProvider/utils.ts +++ b/src/common/notifProvider/utils.ts @@ -1,5 +1,5 @@ -import { NotifSeverity } from "."; -import { getEnvVar } from "../../config"; +import { NotifSeverity } from "./index.js"; +import { getEnvVar } from "../../config/index.js"; export class UtilsNotifs { public static isSlackAvailable() { @@ -44,14 +44,14 @@ export class UtilsNotifs { } public static prefixWithSeverityEmoji(text: string, severity: NotifSeverity | null) { - const emojis = { + const emojis: any = { critical: "💥", error: "❌", warning: "⚠️", info: "ℹ️", success: "✅", }; - const emoji = emojis[severity] || emojis["info"]; + const emoji = emojis[severity || ""] || emojis["info"]; return `${emoji} ${text}`; } diff --git a/src/common/ticketProvider/azureBoardsProvider.ts b/src/common/ticketProvider/azureBoardsProvider.ts index 9983dd102..6c0db1de3 100644 --- a/src/common/ticketProvider/azureBoardsProvider.ts +++ b/src/common/ticketProvider/azureBoardsProvider.ts @@ -1,21 +1,22 @@ /* jscpd:ignore-start */ import * as azdev from "azure-devops-node-api"; -import { TicketProviderRoot } from "./ticketProviderRoot"; -import * as c from "chalk"; -import * as sortArray from "sort-array"; -import { Ticket } from "."; -import { getBranchMarkdown, getOrgMarkdown } from "../utils/notifUtils"; -import { extractRegexMatches, uxLog } from "../utils"; -import { SfdxError } from "@salesforce/core"; -import { GitCommitRef } from "azure-devops-node-api/interfaces/GitInterfaces"; -import { JsonPatchDocument } from "azure-devops-node-api/interfaces/common/VSSInterfaces"; -import { getEnvVar } from "../../config"; +import { TicketProviderRoot } from "./ticketProviderRoot.js"; +import c from "chalk"; +import sortArray from "sort-array"; +import { Ticket } from "./index.js"; +import { getBranchMarkdown, getOrgMarkdown } from "../utils/notifUtils.js"; +import { extractRegexMatches, uxLog } from "../utils/index.js"; +import { SfError } from "@salesforce/core"; +import { getEnvVar } from "../../config/index.js"; +import { GitCommitRef } from "azure-devops-node-api/interfaces/GitInterfaces.js"; +import { JsonPatchDocument } from "azure-devops-node-api/interfaces/common/VSSInterfaces.js"; +import { CommonPullRequestInfo } from "../gitProvider/index.js"; /* jscpd:ignore-end */ export class AzureBoardsProvider extends TicketProviderRoot { - protected serverUrl: string; + protected serverUrl: string | null; protected azureApi: InstanceType; - protected teamProject: string; + protected teamProject: string | null; constructor() { super(); @@ -28,8 +29,8 @@ export class AzureBoardsProvider extends TicketProviderRoot { this.isActive = true; } if (this.isActive) { - const authHandler = azdev.getHandlerFromToken(this.token); - this.azureApi = new azdev.WebApi(this.serverUrl, authHandler); + const authHandler = azdev.getHandlerFromToken(this.token || ""); + this.azureApi = new azdev.WebApi(this.serverUrl || "", authHandler); } } @@ -49,7 +50,7 @@ export class AzureBoardsProvider extends TicketProviderRoot { return "sfdx-hardis JIRA connector"; } - public static async getTicketsFromString(text: string, options: any = {}): Promise { + public static async getTicketsFromString(text: string, prInfo: CommonPullRequestInfo | null): Promise { const tickets: Ticket[] = []; // Extract Azure Boards Work Items const azureBoardsUrlRegex = /(https:\/\/.*\/_workitems\/edit\/[0-9]+)/g; @@ -72,12 +73,12 @@ export class AzureBoardsProvider extends TicketProviderRoot { return ticketsSorted; } // Get tickets from Azure commits - if (options.commits) { + if (prInfo?.providerInfo?.commits) { const azureBoardsProvider = new AzureBoardsProvider(); const azureApi = azureBoardsProvider.azureApi; const azureGitApi = await azureApi.getGitApi(); - const repositoryId = getEnvVar("BUILD_REPOSITORY_ID"); - const commitIds = options.commits.filter((commit) => commit.hash).map((commit) => commit.hash); + const repositoryId = getEnvVar("BUILD_REPOSITORY_ID") || ""; + const commitIds = prInfo?.providerInfo?.commits.filter((commit) => commit.hash).map((commit) => commit.hash); const azureCommits: GitCommitRef[] = []; for (const commitId of commitIds) { const commitRefs = await azureGitApi.getCommits(repositoryId, { fromCommitId: commitId, toCommitId: commitId, includeWorkItems: true }); @@ -88,8 +89,8 @@ export class AzureBoardsProvider extends TicketProviderRoot { if (!tickets.some((ticket) => ticket.id === workItem.id)) { tickets.push({ provider: "AZURE", - url: workItem.url, - id: workItem.id, + url: workItem.url || "", + id: workItem.id || "", }); } } @@ -97,8 +98,8 @@ export class AzureBoardsProvider extends TicketProviderRoot { } // Get tickets from Azure PR - if (options?.pullRequestInfo?.workItemRefs?.length) { - for (const workItemRef of options.pullRequestInfo.workItemRefs) { + if (prInfo?.providerInfo?.workItemRefs?.length) { + for (const workItemRef of prInfo.providerInfo.workItemRefs) { if (!tickets.some((ticket) => ticket.id === workItemRef.id)) { tickets.push({ provider: "AZURE", @@ -117,15 +118,16 @@ export class AzureBoardsProvider extends TicketProviderRoot { const azureTicketsNumber = tickets.filter((ticket) => ticket.provider === "AZURE").length; if (azureTicketsNumber > 0) { uxLog( + "action", this, c.cyan( `[AzureBoardsProvider] Now trying to collect ${azureTicketsNumber} tickets infos from Azure Boards Server ` + - process.env.SYSTEM_COLLECTIONURI + - " ...", + process.env.SYSTEM_COLLECTIONURI + + " ...", ), ); } - const azureWorkItemApi = await this.azureApi.getWorkItemTrackingApi(this.serverUrl); + const azureWorkItemApi = await this.azureApi.getWorkItemTrackingApi(this.serverUrl || ""); for (const ticket of tickets) { if (ticket.provider === "AZURE") { const ticketInfo = await azureWorkItemApi.getWorkItem(Number(ticket.id)); @@ -137,44 +139,44 @@ export class AzureBoardsProvider extends TicketProviderRoot { if (ticketInfo?._links && ticketInfo._links["html"] && ticketInfo._links["html"]["href"]) { ticket.url = ticketInfo?._links["html"]["href"]; } - uxLog(this, c.grey("[AzureBoardsProvider] Collected data for Work Item " + ticket.id)); + uxLog("log", this, c.grey("[AzureBoardsProvider] Collected data for Work Item " + ticket.id)); } else { - uxLog(this, c.yellow("[AzureBoardsProvider] Unable to get Azure Boards WorkItem " + ticket.id + "\n" + c.grey(JSON.stringify(ticketInfo)))); + uxLog("warning", this, c.yellow("[AzureBoardsProvider] Unable to get Azure Boards WorkItem " + ticket.id + "\n" + c.grey(JSON.stringify(ticketInfo)))); } } } return tickets; } - public async postDeploymentComments(tickets: Ticket[], org: string, pullRequestInfo: any) { - uxLog(this, c.cyan(`[AzureBoardsProvider] Try to post comments on ${tickets.length} work items...`)); + public async postDeploymentComments(tickets: Ticket[], org: string, pullRequestInfo: CommonPullRequestInfo | null): Promise { + uxLog("action", this, c.cyan(`[AzureBoardsProvider] Try to post comments on ${tickets.length} work items...`)); const orgMarkdown = await getOrgMarkdown(org, "html"); const branchMarkdown = await getBranchMarkdown("html"); const tag = await this.getDeploymentTag(); const commentedTickets: Ticket[] = []; const taggedTickets: Ticket[] = []; - const azureWorkItemApi = await this.azureApi.getWorkItemTrackingApi(this.serverUrl); + const azureWorkItemApi = await this.azureApi.getWorkItemTrackingApi(this.serverUrl || ""); for (const ticket of tickets) { if (ticket.foundOnServer) { let azureBoardsComment = `Deployed from branch ${branchMarkdown} to org ${orgMarkdown}`; if (pullRequestInfo) { - const prUrl = pullRequestInfo.web_url || pullRequestInfo.html_url || pullRequestInfo.url; + const prUrl = pullRequestInfo.webUrl || ""; if (prUrl) { - const prAuthor = pullRequestInfo?.authorName || pullRequestInfo?.author?.login || pullRequestInfo?.author?.name || null; + const prAuthor = pullRequestInfo.authorName; azureBoardsComment += `

PR: ${pullRequestInfo.title}` + (prAuthor ? ` by ${prAuthor}` : ""); } } // Post comment try { - const commentPostRes = await azureWorkItemApi.addComment({ text: azureBoardsComment }, this.teamProject, Number(ticket.id)); - if (commentPostRes && commentPostRes?.id > 0) { + const commentPostRes = await azureWorkItemApi.addComment({ text: azureBoardsComment }, this.teamProject || "", Number(ticket.id)); + if (commentPostRes?.id && commentPostRes?.id > 0) { commentedTickets.push(ticket); } else { - throw new SfdxError("commentPostRes: " + commentPostRes); + throw new SfError("commentPostRes: " + commentPostRes); } } catch (e6) { - uxLog(this, c.yellow(`[AzureBoardsProvider] Error while posting comment on ${ticket.id}\n${e6.message}\n${c.grey(e6.stack)}`)); + uxLog("warning", this, c.yellow(`[AzureBoardsProvider] Error while posting comment on ${ticket.id}\n${(e6 as any).message}\n${c.grey((e6 as any).stack)}`)); } // Add tag @@ -186,26 +188,28 @@ export class AzureBoardsProvider extends TicketProviderRoot { value: tag, }, ]; - const workItem = await azureWorkItemApi.updateWorkItem({}, patchDocument, Number(ticket.id), this.teamProject); - if (workItem && workItem?.id > 0) { + const workItem = await azureWorkItemApi.updateWorkItem({}, patchDocument, Number(ticket.id), this.teamProject || ""); + if (workItem?.id && workItem?.id > 0) { taggedTickets.push(ticket); } else { - throw new SfdxError("tag workItem: " + workItem); + throw new SfError("tag workItem: " + workItem); } } catch (e6) { - uxLog(this, c.yellow(`[AzureBoardsProvider] Error while adding tag ${tag} on ${ticket.id}\n${e6.message}\n${c.grey(e6.stack)}`)); + uxLog("warning", this, c.yellow(`[AzureBoardsProvider] Error while adding tag ${tag} on ${ticket.id}\n${(e6 as any).message} \n${c.grey((e6 as any).stack)} `)); } } } uxLog( + "log", this, - c.gray( + c.grey( `[AzureBoardsProvider] Posted comments on ${commentedTickets.length} work item(s): ` + commentedTickets.map((ticket) => ticket.id).join(", "), ), ); uxLog( + "log", this, - c.gray( + c.grey( `[AzureBoardsProvider] Added tag ${tag} on ${taggedTickets.length} work item(s): ` + taggedTickets.map((ticket) => ticket.id).join(", "), ), ); diff --git a/src/common/ticketProvider/genericProvider.ts b/src/common/ticketProvider/genericProvider.ts index 9d2be154f..1c34c0fc5 100644 --- a/src/common/ticketProvider/genericProvider.ts +++ b/src/common/ticketProvider/genericProvider.ts @@ -1,12 +1,13 @@ -import { Ticket } from "."; -import * as sortArray from "sort-array"; -import { extractRegexMatches } from "../utils"; -import { TicketProviderRoot } from "./ticketProviderRoot"; -import { getEnvVar } from "../../config"; +import { Ticket } from "./index.js"; +import sortArray from "sort-array"; +import { extractRegexMatches } from "../utils/index.js"; +import { TicketProviderRoot } from "./ticketProviderRoot.js"; +import { getEnvVar } from "../../config/index.js"; +import { CommonPullRequestInfo } from "../gitProvider/index.js"; export class GenericTicketingProvider extends TicketProviderRoot { - private ticketRefRegex: string; - private ticketUrlBuilder: string; + private ticketRefRegex: string | null; + private ticketUrlBuilder: string | null; constructor() { super(); @@ -28,9 +29,9 @@ export class GenericTicketingProvider extends TicketProviderRoot { return tickets; } // Extract tickets using GENERIC_TICKETING_PROVIDER_REGEX regexp - const ticketRefRegexExec = new RegExp(getEnvVar("GENERIC_TICKETING_PROVIDER_REGEX"), "g"); + const ticketRefRegexExec = new RegExp(getEnvVar("GENERIC_TICKETING_PROVIDER_REGEX") || "", "g"); const regexMatches = await extractRegexMatches(ticketRefRegexExec, text); - const ticketUrlBuilder = getEnvVar("GENERIC_TICKETING_PROVIDER_URL_BUILDER"); + const ticketUrlBuilder = getEnvVar("GENERIC_TICKETING_PROVIDER_URL_BUILDER") || ""; for (const genericTicketRef of regexMatches) { const genericTicketUrl = ticketUrlBuilder.replace("{REF}", genericTicketRef); if (!tickets.some((ticket) => ticket.url === genericTicketUrl)) { @@ -55,7 +56,7 @@ export class GenericTicketingProvider extends TicketProviderRoot { } // eslint-disable-next-line @typescript-eslint/no-unused-vars - public async postDeploymentComments(tickets: Ticket[], _org: string, _pullRequestInfo: any) { + public async postDeploymentComments(tickets: Ticket[], _org: string, _pullRequestInfo: CommonPullRequestInfo | null) { // No remote server here so do nothing return tickets; } diff --git a/src/common/ticketProvider/index.ts b/src/common/ticketProvider/index.ts index 23e38b212..72825cc3b 100644 --- a/src/common/ticketProvider/index.ts +++ b/src/common/ticketProvider/index.ts @@ -1,10 +1,11 @@ -import * as c from "chalk"; -import * as sortArray from "sort-array"; -import { JiraProvider } from "./jiraProvider"; -import { TicketProviderRoot } from "./ticketProviderRoot"; -import { uxLog } from "../utils"; -import { GenericTicketingProvider } from "./genericProvider"; -import { AzureBoardsProvider } from "./azureBoardsProvider"; +import c from "chalk"; +import sortArray from "sort-array"; +import { JiraProvider } from "./jiraProvider.js"; +import { TicketProviderRoot } from "./ticketProviderRoot.js"; +import { uxLog } from "../utils/index.js"; +import { GenericTicketingProvider } from "./genericProvider.js"; +import { AzureBoardsProvider } from "./azureBoardsProvider.js"; +import { CommonPullRequestInfo } from "../gitProvider/index.js"; export const allTicketProviders = [JiraProvider, GenericTicketingProvider, AzureBoardsProvider]; @@ -34,7 +35,7 @@ export abstract class TicketProvider { public static async collectTicketsInfo(tickets: Ticket[]): Promise { const ticketProviders = this.getInstances(); if (ticketProviders.length === 0) { - uxLog(this, c.gray(`[TicketProvider] No ticket provider has been configured`)); + uxLog("error", this, c.grey(`[TicketProvider] No ticket provider has been configured`)); } for (const ticketProvider of ticketProviders) { if (ticketProvider.isActive) { @@ -46,7 +47,7 @@ export abstract class TicketProvider { // Process Ticket providers actions after a deployment. // Can be comments on JIRA, and maybe later status changes ? :) - public static async postDeploymentActions(tickets: Ticket[], org: string, pullRequestInfo: any) { + public static async postDeploymentActions(tickets: Ticket[], org: string, pullRequestInfo: CommonPullRequestInfo | null) { const ticketProviders = this.getInstances(); for (const ticketProvider of ticketProviders) { if (ticketProvider.isActive) { diff --git a/src/common/ticketProvider/jiraProvider.ts b/src/common/ticketProvider/jiraProvider.ts index a5aeb9242..37c53bf16 100644 --- a/src/common/ticketProvider/jiraProvider.ts +++ b/src/common/ticketProvider/jiraProvider.ts @@ -1,15 +1,16 @@ -import * as JiraApi from "jira-client"; -import { TicketProviderRoot } from "./ticketProviderRoot"; -import * as c from "chalk"; -import * as sortArray from "sort-array"; -import { Ticket } from "."; -import { getBranchMarkdown, getOrgMarkdown } from "../utils/notifUtils"; -import { extractRegexMatches, uxLog } from "../utils"; -import { SfdxError } from "@salesforce/core"; -import { getEnvVar } from "../../config"; +import JiraApi from "jira-client"; +import { TicketProviderRoot } from "./ticketProviderRoot.js"; +import c from "chalk"; +import sortArray from "sort-array"; +import { Ticket } from "./index.js"; +import { getBranchMarkdown, getOrgMarkdown } from "../utils/notifUtils.js"; +import { extractRegexMatches, uxLog } from "../utils/index.js"; +import { SfError } from "@salesforce/core"; +import { getEnvVar } from "../../config/index.js"; +import { CommonPullRequestInfo } from "../gitProvider/index.js"; export class JiraProvider extends TicketProviderRoot { - private jiraClient: InstanceType; + private jiraClient: InstanceType | any = null; constructor() { super(); @@ -21,13 +22,13 @@ export class JiraProvider extends TicketProviderRoot { }; // Basic Auth if (getEnvVar("JIRA_EMAIL") && getEnvVar("JIRA_TOKEN")) { - jiraOptions.username = getEnvVar("JIRA_EMAIL"); - jiraOptions.password = getEnvVar("JIRA_TOKEN"); + jiraOptions.username = getEnvVar("JIRA_EMAIL") || ""; + jiraOptions.password = getEnvVar("JIRA_TOKEN") || ""; this.isActive = true; } // Personal access token if (getEnvVar("JIRA_PAT")) { - jiraOptions.bearer = getEnvVar("JIRA_PAT"); + jiraOptions.bearer = getEnvVar("JIRA_PAT") || ""; this.isActive = true; } if (this.isActive) { @@ -80,7 +81,7 @@ export class JiraProvider extends TicketProviderRoot { } // Extract JIRA tickets using Identifiers const jiraBaseUrl = getEnvVar("JIRA_HOST") || "https://define.JIRA_HOST.in.cicd.variables/"; - const jiraRegex = getEnvVar("JIRA_TICKET_REGEX") || "(?<=[^a-zA-Z0-9_-]|^)([A-Za-z]{2,10}-\\d{1,6})(?=[^a-zA-Z0-9_-]|$)"; + const jiraRegex = getEnvVar("JIRA_TICKET_REGEX") || "(?<=[^a-zA-Z0-9_-]|^)([A-Za-z0-9]{2,10}-\\d{1,6})(?=[^a-zA-Z0-9_-]|$)"; const jiraRefRegex = new RegExp(jiraRegex, "gm"); const jiraRefs = await extractRegexMatches(jiraRefRegex, text); const jiraBaseUrlBrowse = jiraBaseUrl.replace(/\/$/, "") + "/browse/"; @@ -103,17 +104,18 @@ export class JiraProvider extends TicketProviderRoot { const jiraTicketsNumber = tickets.filter((ticket) => ticket.provider === "JIRA").length; if (jiraTicketsNumber > 0) { uxLog( + "action", this, c.cyan(`[JiraProvider] Now trying to collect ${jiraTicketsNumber} tickets infos from JIRA server ` + process.env.JIRA_HOST + " ..."), ); } for (const ticket of tickets) { if (ticket.provider === "JIRA") { - let ticketInfo: JiraApi.JsonResponse; + let ticketInfo: JiraApi.JsonResponse | null = null; try { ticketInfo = await this.jiraClient.getIssue(ticket.id); } catch (e) { - uxLog(this, c.yellow(`[JiraApi] Error while trying to get ${ticket.id} information: ${e.message}`)); + uxLog("warning", this, c.yellow(`[JiraApi] Error while trying to get ${ticket.id} information: ${(e as Error).message}`)); } if (ticketInfo) { const body = @@ -126,25 +128,25 @@ export class JiraProvider extends TicketProviderRoot { ticket.status = ticketInfo.fields?.status?.id || ""; ticket.statusLabel = ticketInfo.fields?.status?.name || ""; if (ticket.subject === "") { - uxLog(this, c.yellow("[JiraProvider] Unable to collect JIRA ticket info for " + ticket.id)); + uxLog("warning", this, c.yellow("[JiraProvider] Unable to collect JIRA ticket info for " + ticket.id)); if (JSON.stringify(ticketInfo).includes("")) { - uxLog(this, c.grey("[JiraProvider] This is probably a JIRA auth config issue, as HTML is returned")); + uxLog("log", this, c.grey("[JiraProvider] This is probably a JIRA auth config issue, as HTML is returned")); } else { - uxLog(this, c.grey(JSON.stringify(ticketInfo))); + uxLog("log", this, c.grey(JSON.stringify(ticketInfo))); } ticket.foundOnServer = false; } - uxLog(this, c.grey("[JiraProvider] Collected data for ticket " + ticket.id)); + uxLog("log", this, c.grey("[JiraProvider] Collected data for ticket " + ticket.id)); } else { - uxLog(this, c.yellow("[JiraProvider] Unable to get JIRA issue " + ticket.id)); + uxLog("warning", this, c.yellow("[JiraProvider] Unable to get JIRA issue " + ticket.id)); } } } return tickets; } - public async postDeploymentComments(tickets: Ticket[], org: string, pullRequestInfo: any) { - uxLog(this, c.cyan(`[JiraProvider] Try to post comments on ${tickets.length} tickets...`)); + public async postDeploymentComments(tickets: Ticket[], org: string, pullRequestInfo: CommonPullRequestInfo | null): Promise { + uxLog("action", this, c.cyan(`[JiraProvider] Try to post comments on ${tickets.length} tickets...`)); const genericHtmlResponseError = "Probably config/access error since response is HTML"; const orgMarkdown = JSON.parse(await getOrgMarkdown(org, "jira")); @@ -159,10 +161,10 @@ export class JiraProvider extends TicketProviderRoot { let prUrl = ""; let prAuthor = ""; if (pullRequestInfo) { - prUrl = pullRequestInfo.web_url || pullRequestInfo.html_url || pullRequestInfo.url; + prUrl = pullRequestInfo.webUrl; if (prUrl) { prTitle = pullRequestInfo.title; - prAuthor = pullRequestInfo?.authorName || pullRequestInfo?.author?.login || pullRequestInfo?.author?.name || null; + prAuthor = pullRequestInfo?.authorName; } } const jiraComment = this.getJiraDeploymentCommentAdf( @@ -178,11 +180,11 @@ export class JiraProvider extends TicketProviderRoot { try { const commentPostRes = await this.jiraClient.addCommentAdvanced(ticket.id, { body: jiraComment }); if (JSON.stringify(commentPostRes).includes("")) { - throw new SfdxError(genericHtmlResponseError); + throw new SfError(genericHtmlResponseError); } commentedTickets.push(ticket); } catch (e6) { - uxLog(this, c.yellow(`[JiraProvider] Error while posting comment on ${ticket.id}: ${e6.message}`)); + uxLog("warning", this, c.yellow(`[JiraProvider] Error while posting comment on ${ticket.id}: ${(e6 as any).message}`)); } // Add deployment label to JIRA ticket @@ -195,22 +197,24 @@ export class JiraProvider extends TicketProviderRoot { await this.jiraClient.updateIssue(ticket.id, issueUpdate); taggedTickets.push(ticket); } catch (e6) { - if (e6.message != null && e6.message.includes("")) { - e6.message = genericHtmlResponseError; + if ((e6 as any).message != null && (e6 as any).message.includes("")) { + (e6 as any).message = genericHtmlResponseError; } - uxLog(this, c.yellow(`[JiraProvider] Error while adding label ${tag} on ${ticket.id}: ${e6.message}`)); + uxLog("warning", this, c.yellow(`[JiraProvider] Error while adding label ${tag} on ${ticket.id}: ${(e6 as any).message}`)); } } } // Summary if (commentedTickets.length > 0 || taggedTickets.length > 0) { uxLog( + "log", this, - c.gray(`[JiraProvider] Posted comments on ${commentedTickets.length} ticket(s): ` + commentedTickets.map((ticket) => ticket.id).join(", ")), + c.grey(`[JiraProvider] Posted comments on ${commentedTickets.length} ticket(s): ` + commentedTickets.map((ticket) => ticket.id).join(", ")), ); uxLog( + "log", this, - c.gray(`[JiraProvider] Added label ${tag} on ${taggedTickets.length} ticket(s): ` + taggedTickets.map((ticket) => ticket.id).join(", ")), + c.grey(`[JiraProvider] Added label ${tag} on ${taggedTickets.length} ticket(s): ` + taggedTickets.map((ticket) => ticket.id).join(", ")), ); } return tickets; @@ -243,7 +247,7 @@ export class JiraProvider extends TicketProviderRoot { { type: "link", attrs: { - href: "https://sfdx-hardis.cloudity.com/", + href: "${CONSTANTS.DOC_URL_ROOT}/", }, }, ], diff --git a/src/common/ticketProvider/ticketProviderRoot.ts b/src/common/ticketProvider/ticketProviderRoot.ts index 977b3e6cf..2295083bb 100644 --- a/src/common/ticketProvider/ticketProviderRoot.ts +++ b/src/common/ticketProvider/ticketProviderRoot.ts @@ -1,33 +1,45 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import { Ticket } from "."; -import { getCurrentGitBranch, uxLog } from "../utils"; +import { SfError } from "@salesforce/core"; +import c from "chalk"; +import { Ticket } from "./index.js"; +import { getCurrentGitBranch, uxLog } from "../utils/index.js"; +import { CommonPullRequestInfo, GitProvider } from "../gitProvider/index.js"; export abstract class TicketProviderRoot { public isActive = false; - protected token: string; + protected token: string | null; public getLabel(): string { - throw new SfdxError("getLabel should be implemented on this call"); + throw new SfError("getLabel should be implemented on this call"); } public async collectTicketsInfo(tickets: Ticket[]) { - uxLog(this, c.yellow("collectTicketsInfo is not implemented on " + this.getLabel())); + uxLog("warning", this, c.yellow("collectTicketsInfo is not implemented on " + this.getLabel())); return tickets; } // eslint-disable-next-line @typescript-eslint/no-unused-vars - public async postDeploymentComments(tickets: Ticket[], _org: string, _pullRequestInfo: any) { - uxLog(this, c.yellow("postDeploymentComments is not implemented on " + this.getLabel())); + public async postDeploymentComments(tickets: Ticket[], _org: string, _pullRequestInfo: CommonPullRequestInfo | null): Promise { + uxLog("warning", this, c.yellow("postDeploymentComments is not implemented on " + this.getLabel())); return tickets; } public async getDeploymentTag(): Promise { - const currentGitBranch = await getCurrentGitBranch(); + const currentGitBranch = await getCurrentGitBranch() || ""; let tag = currentGitBranch.toUpperCase() + "_DEPLOYED"; + + if (GitProvider.isDeployBeforeMerge()) { + const prInfo = await GitProvider.getPullRequestInfo({ useCache: true }); + const targetBranch = prInfo?.targetBranch || process.env.FORCE_TARGET_BRANCH; + if (targetBranch) { + tag = targetBranch.toUpperCase() + "_DEPLOYED"; + } + } + if (process.env?.DEPLOYED_TAG_TEMPLATE && !(process.env?.DEPLOYED_TAG_TEMPLATE || "").includes("$(")) { - tag = process.env?.DEPLOYED_TAG_TEMPLATE.replace("{BRANCH}", currentGitBranch.toUpperCase()); + const branchToUse = tag.replace("_DEPLOYED", ""); + tag = process.env?.DEPLOYED_TAG_TEMPLATE.replace("{BRANCH}", branchToUse); } + return tag; } } diff --git a/src/common/utils/apiUtils.ts b/src/common/utils/apiUtils.ts index 0f8f6f353..e0148016b 100644 --- a/src/common/utils/apiUtils.ts +++ b/src/common/utils/apiUtils.ts @@ -1,155 +1,272 @@ -import { uxLog } from "."; -import * as c from "chalk"; -import { Connection, SfdxError } from "@salesforce/core"; -import { RestApiOptions, RecordResult } from "jsforce"; -import * as ora from "ora"; +import { execSfdxJson, uxLog } from './index.js'; +import c from 'chalk'; +import { Connection } from '@salesforce/core'; +import ora, { Ora } from 'ora'; +import { WebSocketClient } from '../websocketClient.js'; +import { generateCsvFile, generateReportPath } from './filesUtils.js'; + +// Constants for record limits +const MAX_CHUNKS = Number(process.env.SOQL_MAX_BATCHES ?? 50); +const CHUNK_SIZE = Number(process.env.SOQL_CHUNK_SIZE ?? 200); +const MAX_RECORDS = MAX_CHUNKS * CHUNK_SIZE; // Perform simple SOQL query (max results: 10000) -export function soqlQuery(soqlQuery: string, conn: Connection): Promise { - uxLog(this, c.grey("SOQL REST: " + c.italic(soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + "..." : soqlQuery) + " on " + conn.instanceUrl)); - return conn.query(soqlQuery); +export async function soqlQuery(soqlQuery: string, conn: Connection): Promise { + uxLog( + "log", + this, + c.grey( + '[SOQL Query] ' + + c.italic(soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + '...' : soqlQuery) + ) + ); + // First query + const res = await conn.query(soqlQuery); + let pageRes = Object.assign({}, res); + let batchCount = 1; + + // Get all page results + while (pageRes.done === false && pageRes.nextRecordsUrl && batchCount < MAX_CHUNKS) { + uxLog("log", this, c.grey(`Fetching batch ${batchCount + 1}/${MAX_CHUNKS}...`)); + pageRes = await conn.queryMore(pageRes.nextRecordsUrl); + res.records.push(...pageRes.records); + batchCount++; + } + if (!pageRes.done) { + uxLog("warning", this, c.yellow(`Warning: Query limit of ${MAX_RECORDS} records reached. Some records were not retrieved.`)); + uxLog("warning", this, c.yellow(`Consider using bulkQuery for larger datasets.`)); + } + if (batchCount > 1) { + uxLog("log", this, c.grey(`[SOQL Query] Retrieved ${res.records.length} records in ${batchCount} chunks(s)`)); + } + else { + uxLog("log", this, c.grey(`[SOQL Query] Retrieved ${res.records.length} records`)); + } + return res; } // Perform simple SOQL query with Tooling API -export function soqlQueryTooling(soqlQuery: string, conn: Connection): Promise { +export async function soqlQueryTooling(soqlQuery: string, conn: Connection): Promise { uxLog( + "log", this, - c.grey("SOQL REST Tooling: " + c.italic(soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + "..." : soqlQuery) + " on " + conn.instanceUrl), + c.grey( + '[SOQL Query Tooling] ' + + c.italic(soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + '...' : soqlQuery) + ) ); - return conn.tooling.query(soqlQuery); + // First query + const res = await conn.tooling.query(soqlQuery); + let pageRes = Object.assign({}, res); + let batchCount = 1; + // Get all page results + while (pageRes.done === false && pageRes.nextRecordsUrl) { + pageRes = await conn.tooling.queryMore(pageRes.nextRecordsUrl || ""); + res.records.push(...pageRes.records); + batchCount++; + } + if (batchCount > 1) { + uxLog("log", this, c.grey(`[SOQL Query Tooling] Retrieved ${res.records.length} records in ${batchCount} chunks(s)`)); + } else { + uxLog("log", this, c.grey(`[SOQL Query Tooling] Retrieved ${res.records.length} records`)); + } + return res; +} + +export function toolingRequest(endpoint, conn: Connection, info?): Promise { + const url = `${conn.instanceUrl}/services/data/v61.0/tooling/${endpoint}` + return conn.tooling.request({url, ...info}); +} + +// Perform Tooling Global Description API call +export function describeGlobalTooling(conn: Connection, callback?: () => void): Promise { + return conn.tooling.describeGlobal(callback); } let spinnerQ; const maxRetry = Number(process.env.BULK_QUERY_RETRY || 5); // Same than soqlQuery but using bulk. Do not use if there will be too many results for javascript to handle in memory export async function bulkQuery(soqlQuery: string, conn: Connection, retries = 3): Promise { - uxLog(this, c.grey("SOQL BULK: " + c.italic(soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + "..." : soqlQuery))); - conn.bulk.pollInterval = 5000; // 5 sec - conn.bulk.pollTimeout = 60000; // 60 sec - const records = []; - return new Promise((resolve, reject) => { - spinnerQ = ora({ text: `Bulk query...`, spinner: "moon" }).start(); - const job = conn.bulk.query(soqlQuery); - job - .on("record", async (record) => { - records.push(record); - }) - .on("error", async (err) => { - spinnerQ.fail(`Bulk query error.`); - uxLog(this, c.yellow("Bulk query error: " + err)); - // In case of timeout, retry if max retry is not reached - if ((err + "").includes("ETIMEDOUT") && retries < maxRetry) { - uxLog(this, c.yellow("Bulk query retry attempt #" + retries + 1)); - bulkQuery(soqlQuery, conn, retries + 1) - .then((resRetry) => { - resolve(resRetry); - }) - .catch((resErr) => { - reject(resErr); - }); - } else { - // If max retry attempts reached, give up - uxLog(this, c.red("Bulk query error: max retry attempts reached, or not timeout error.")); - globalThis.sfdxHardisFatalError = true; - reject(err); - } - }) - .on("end", () => { - spinnerQ.succeed(`Bulk query completed with ${records.length} results.`); - resolve({ records: records, totalSize: records.length }); - }); - }); + const queryLabel = soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + '...' : soqlQuery; + uxLog("log", this, c.grey('[BulkApiV2] ' + c.italic(queryLabel))); + conn.bulk2.pollInterval = process.env.BULKAPIV2_POLL_INTERVAL ? Number(process.env.BULKAPIV2_POLL_INTERVAL) : 5000; // 5 sec + conn.bulk2.pollTimeout = process.env.BULKAPIV2_POLL_TIMEOUT ? Number(process.env.BULKAPIV2_POLL_TIMEOUT) : 60000; // 60 sec + // Start query + try { + spinnerQ = ora({ text: `[BulkApiV2] Bulk Query: ${queryLabel}`, spinner: 'moon' }).start(); + const recordStream = await conn.bulk2.query(soqlQuery); + recordStream.on('error', (err) => { + uxLog("warning", this, c.yellow('Bulk Query error: ' + err)); + globalThis.sfdxHardisFatalError = true; + }); + // Wait for all results + const records = await recordStream.toArray(); + spinnerQ.succeed(`[BulkApiV2] Bulk Query completed with ${records.length} results.`); + if (WebSocketClient.isAliveWithLwcUI()) { + uxLog("log", this, c.grey(`[BulkApiV2] Bulk Query completed with ${records.length} results.`)); + } + return { records: records }; + } catch (e: any) { + spinnerQ.fail(`[BulkApiV2] Bulk query error: ${e.message}`); + // Try again if the reason is a timeout and max number of retries is not reached yet + const eStr = e + ''; + if ((eStr.includes('ETIMEDOUT') || eStr.includes('Polling timed out')) && retries < maxRetry) { + uxLog("warning", this, c.yellow('[BulkApiV2] Bulk Query retry attempt #' + retries + 1)); + uxLog("log", this, c.grey(`You can change Bulk API v2 Settings with env variables: +- BULKAPIV2_POLL_TIMEOUT (current: ${conn.bulk2.pollTimeout} ms) +- BULKAPIV2_POLL_INTERVAL (current: ${conn.bulk2.pollInterval} ms) +- BULK_QUERY_RETRY (current: ${maxRetry} max retries)`)); + return await bulkQuery(soqlQuery, conn, retries + 1); + } else { + throw e; + } + } } // When you might have more than 1000 elements in a IN condition, you need to split the request into several requests // Think to use {{IN}} in soqlQuery -export async function bulkQueryChunksIn(soqlQuery: string, conn: Connection, inElements: string[], batchSize = 1000, retries = 3): Promise { - const results = { records: [] }; +export async function bulkQueryChunksIn( + soqlQuery: string, + conn: Connection, + inElements: string[], + batchSize = 1000, + retries = 3 +): Promise { + const results = { records: [] as any[] }; for (let i = 0; i < inElements.length; i += batchSize) { const inElementsChunk = inElements.slice(i, i + batchSize); const replacementString = "'" + inElementsChunk.join("','") + "'"; - const soqlQueryWithInConstraint = soqlQuery.replace("{{IN}}", replacementString); + const soqlQueryWithInConstraint = soqlQuery.replace('{{IN}}', replacementString); const chunkResults = await bulkQuery(soqlQueryWithInConstraint, conn, retries); results.records.push(...chunkResults.records); } return results; } -let spinner; +// New method to bulk query records by chunks of 10000 +export async function bulkQueryByChunks( + soqlQuery: string, + conn: Connection, + batchSize = 100000, + retries = 3 +): Promise { + const results = { records: [] as any[] }; + let lastRecordId = null; + let hasMoreRecords = true; + + while (hasMoreRecords) { + let soqlQueryWithLimit = `${soqlQuery} ORDER BY Id LIMIT ${batchSize}`; + if (lastRecordId) { + soqlQueryWithLimit = `${soqlQuery} WHERE Id > '${lastRecordId}' ORDER BY Id LIMIT ${batchSize}`; + } + const chunkResults = await bulkQuery(soqlQueryWithLimit, conn, retries); + results.records.push(...chunkResults.records); + if (chunkResults.records.length > 0) { + lastRecordId = chunkResults.records[chunkResults.records.length - 1].Id; + } + hasMoreRecords = chunkResults.records.length === batchSize; + } + + return results; +} + +let spinner: Ora; // Same than soqlQuery but using bulk. Do not use if there will be too many results for javascript to handle in memory -export async function bulkUpdate(objectName: string, action: string, records: Array, conn: Connection): Promise { - uxLog(this, c.grey(`SOQL BULK on object ${c.bold(objectName)} with action ${c.bold(action)} (${c.bold(records.length)} records)`)); - conn.bulk.pollInterval = 5000; // 5 sec - conn.bulk.pollTimeout = 60000; // 60 sec - return new Promise((resolve, reject) => { - const job = conn.bulk.createJob(objectName, action); - const batch = job.createBatch(); - batch.execute(records); - batch.on("queue", async (batchInfo) => { - uxLog(this, c.grey("Bulk API job batch has been queued")); - uxLog(this, c.grey(JSON.stringify(batchInfo, null, 2))); - spinner = ora({ text: `Bulk Load on ${objectName} (${action})`, spinner: "moon" }).start(); - batch.poll(3 * 1000, 120 * 1000); - }); - batch.on("error", (batchInfo) => { - job.close(); - spinner.fail(`Bulk Load on ${objectName} (${action}) failed.`); - uxLog(this, c.red("Bulk query error:" + batchInfo)); - reject(batchInfo); - throw new SfdxError(c.red("Bulk query error:" + batchInfo)); - }); - batch.on("response", (results) => { - job.close(); - spinner.succeed(`Bulk Load on ${objectName} (${action}) completed.`); - resolve({ - results: results, - totalSize: results.length, - successRecordsNb: results.filter((result) => result.success).length, - errorRecordsNb: results.filter((result) => !result.success).length, - }); +export async function bulkUpdate( + objectName: string, + action: string, + records: Array, + conn: Connection +): Promise { + uxLog( + "log", + this, + c.grey( + `[BulkApiV2] Bulk ${c.bold(action.toUpperCase())} on ${c.bold(records.length)} records of object ${c.bold(objectName)}` + ) + ); + conn.bulk2.pollInterval = 5000; // 5 sec + conn.bulk2.pollTimeout = 60000; // 60 sec + // Initialize Job + spinner = ora({ text: `[BulkApiV2] Bulk ${c.bold(action.toUpperCase())} on ${c.bold(records.length)} records of object ${c.bold(objectName)}`, spinner: 'moon' }).start(); + const job = conn.bulk2.createJob({ + operation: action as any, + object: objectName, + }); + job.on('open', () => { + spinner.text = `[BulkApiV2] Load Job ${job.id} successfully created.`; + }); + // Upload job data + await job.open(); + await job.uploadData(records); + await job.close(); + // Monitor job execution + job.on('inProgress', (jobInfo: any) => { + spinner.text = `[BulkApiV2] Processed: ${jobInfo.numberRecordsProcessed}. Failed: ${jobInfo.numberRecordsFailed}`; + }); + job.on('failed', (e) => { + spinner.fail(`[BulkApiV2] Error: ${e.message}`); + }); + await job.poll(); + const res = await job.getAllResults(); + spinner.succeed(`[BulkApiV2] Bulk ${action.toUpperCase()} on ${objectName} completed.`); + uxLog("log", this, c.grey(`[BulkApiV2] Bulk ${action.toUpperCase()} on ${objectName} completed. +- Success: ${res.successfulResults.length} records +- Failed: ${res.failedResults.length} records +- Unprocessed: ${res.unprocessedRecords.length} records`)); + const outputFile = await generateReportPath('bulk', `${objectName}-${action}`, { withDate: true }); + if (res.failedResults.length > 0) { + uxLog("warning", this, c.yellow(`[BulkApiV2] Some records failed to ${action}. Check the results for details.`)); + } + await generateCsvFile(res.successfulResults, outputFile.replace('.csv', '-successful.csv'), + { + fileTitle: `${objectName} - ${action} - Successful`, + noExcel: true }); + await generateCsvFile(res.failedResults, outputFile.replace('.csv', '-failed.csv'), { + fileTitle: `${objectName} - ${action} - Failed`, + noExcel: true }); + // Return results + return res; } -export async function bulkDeleteTooling(objectName: string, recordsFull: { Id: string }[], conn: Connection): Promise { - return new Promise((resolve, reject) => { - const records = recordsFull.map((record) => record.Id); - const options: RestApiOptions = { allOrNone: false }; - const handleCallback = (err: Error, result: RecordResult | RecordResult[]) => { - if (err) { - const resultObject = createResultObject(records, false, `One or more ${objectName} records failed to delete.`); - uxLog(this, c.red(`Error deleting ${objectName} records:` + resultObject)); - reject(err); - throw new SfdxError(c.red(`Error deleting ${objectName} records:` + resultObject)); - } else { - const resultsArray = Array.isArray(result) ? result : [result]; - const anyFailure = resultsArray.some((result) => !result.success); +export async function bulkDelete( + objectName: string, + recordIds: string[], + conn: Connection +): Promise { + const records = recordIds.map(recordId => { return { Id: recordId } }); + return await bulkUpdate(objectName, "delete", records, conn); +} - const resultObject = createResultObject(records, !anyFailure, anyFailure ? `One or more ${objectName} records failed to delete.` : ""); - resolve(resultObject); +export async function bulkDeleteTooling( + objectName: string, + recordsIds: string[], + conn: Connection +): Promise { + uxLog("log", this, c.grey(`[ToolingApi] Delete ${recordsIds.length} records on ${objectName}: ${JSON.stringify(recordsIds)}`)); + try { + const deleteJobResults = await conn.tooling.destroy(objectName, recordsIds, { allOrNone: false }); + return deleteJobResults + } catch (e: any) { + uxLog("warning", this, c.yellow(`[ToolingApi] jsforce error while calling Tooling API. Fallback to to unitary delete (longer but should work !)`)); + uxLog("log", this, c.grey(e.message)); + const deleteJobResults: any = []; + for (const record of recordsIds) { + const deleteCommand = + `sf data:delete:record --sobject ${objectName} --record-id ${record} --target-org ${conn.getUsername()} --use-tooling-api`; + const deleteCommandRes = await execSfdxJson(deleteCommand, this, { + fail: false, + output: true + }); + const deleteResult: any = { Id: record, success: true } + if (!(deleteCommandRes.status === 0)) { + deleteResult.success = false; + deleteResult.error = JSON.stringify(deleteCommandRes); } - }; - const createResultObject = (records: string | string[], success: boolean, errorMessage: string) => { - const recordsArray = Array.isArray(records) ? records : [records]; - - return { - results: recordsArray.map((record) => ({ - id: record, - success: success, - errors: success ? [] : [errorMessage], - })), - totalSize: recordsArray.length, - successRecordsNb: success ? recordsArray.length : 0, - errorRecordsNb: success ? 0 : recordsArray.length, - errorDetails: success ? [] : [{ error: errorMessage }], - }; - }; - try { - conn.tooling.del(objectName, records, options, handleCallback); - } catch (error) { - const resultObject = createResultObject(records, false, `One or more records failed to delete due to a synchronous error.\n${error.message}`); - reject(resultObject); - throw new SfdxError(c.red("Tooling Error:" + resultObject)); + deleteJobResults.push(deleteResult); } - }); + return { results: deleteJobResults }; + } } diff --git a/src/common/utils/authUtils.ts b/src/common/utils/authUtils.ts new file mode 100644 index 000000000..57d7ad000 --- /dev/null +++ b/src/common/utils/authUtils.ts @@ -0,0 +1,510 @@ +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { + createTempDir, + execCommand, + execSfdxJson, + findJsonInString, + getCurrentGitBranch, + getExecutionContext, + isCI, + promptInstanceUrl, + uxLog, +} from './index.js'; +import { CONSTANTS, getConfig } from '../../config/index.js'; +import { SfError } from '@salesforce/core'; +import { clearCache } from '../cache/index.js'; +import { WebSocketClient } from '../websocketClient.js'; +import { decryptFile } from '../cryptoUtils.js'; +import spawn from 'cross-spawn'; + +// Authorize an org with sfdxAuthUrl, manually or with JWT +export async function authOrg(orgAlias: string, options: any) { + const isDevHub = orgAlias.includes('DevHub'); + + let doConnect = true; + let alias = null; + let setDefaultOrg = false; + if (!options.checkAuth) { + // Check if we are already authenticated + let orgDisplayCommand = 'sf org display'; + if (orgAlias && (isCI || isDevHub) && !orgAlias.includes('force://')) { + orgDisplayCommand += ' --target-org ' + orgAlias; + setDefaultOrg = orgAlias !== 'TECHNICAL_ORG' ? true : false; + } else { + if ( + options?.argv.includes('--target-org') || + options?.argv.includes('--targetusername') || + options?.argv.includes('-o') || + options?.argv.includes('-u') + ) { + const posUsername = + options.argv.indexOf('--target-org') > -1 + ? options.argv.indexOf('--target-org') + 1 + : options.argv.indexOf('--targetusername') > -1 + ? options.argv.indexOf('--targetusername') + 1 + : options.argv.indexOf('-o') > -1 + ? options.argv.indexOf('-o') + 1 + : options.argv.indexOf('-u') > -1 + ? options.argv.indexOf('-u') + 1 : null; + if (posUsername === null) { + throw new SfError("Unable to find alias (authUtils.authOrg)") + } + alias = options.argv[posUsername]; + orgDisplayCommand += ' --target-org ' + alias; + } + } + const orgInfoResult = await execSfdxJson(orgDisplayCommand, this, { + fail: false, + output: false, + debug: options.debug, + }); + if ( + orgInfoResult.result && + orgInfoResult.result.connectedStatus !== 'RefreshTokenAuthError' && + ((orgInfoResult.result.connectedStatus && orgInfoResult.result.connectedStatus.includes('Connected')) || + (options.scratch && orgInfoResult.result.connectedStatus.includes('Unknown')) || + (orgInfoResult.result.alias === orgAlias && orgInfoResult.result.id != null) || + (orgInfoResult.result.username === orgAlias && orgInfoResult.result.id != null) || + (isDevHub && orgInfoResult.result.id != null)) + ) { + if (orgInfoResult.result.apiVersion) { + globalThis.currentOrgApiVersion = orgInfoResult.result.apiVersion; + } + // Set as default username or devhubusername + uxLog( + "log", + this, + `[sfdx-hardis] You are already ${c.green('connected')} as ${c.green( + orgInfoResult.result.username + )} on org ${c.green(orgInfoResult.result.instanceUrl)} (apiVersion ${globalThis.currentOrgApiVersion})` + ); + + if (orgInfoResult.result.expirationDate) { + uxLog("action", this, c.cyan(`[sfdx-hardis] Org expiration date: ${c.yellow(orgInfoResult.result.expirationDate)}`)); + } + if (!isCI) { + uxLog( + "warning", + this, + c.yellow( + c.italic( + `If this is NOT the org you want to play with, ${c.whiteBright( + c.bold('hit CTRL+C') + )}, then input ${c.whiteBright(c.bold('sf hardis:org:select'))}` + ) + ) + ); + } + if (setDefaultOrg) { + const setDefaultOrgCommand = `sf config set ${alias ? alias : isDevHub ? 'target-dev-hub' : 'target-org'}=${orgInfoResult.result.username + }`; + await execSfdxJson(setDefaultOrgCommand, this, { fail: false }); + } + doConnect = false; + } + } + // Perform authentication + let updateSfCliCommandOrg = false; + if (doConnect) { + let logged = false; + const config = await getConfig('user'); + + // Manage auth with sfdxAuthUrl (CI & scratch org only) + const authUrlVarName = `SFDX_AUTH_URL_${orgAlias}`; + const authUrlVarNameUpper = `SFDX_AUTH_URL_${orgAlias.toUpperCase()}`; + let authUrl = process.env[authUrlVarName] || process.env[authUrlVarNameUpper] || orgAlias || ''; + if (isDevHub) { + authUrl = + process.env[authUrlVarName] || + process.env[authUrlVarNameUpper] || + process.env.SFDX_AUTH_URL_DEV_HUB || + orgAlias || + ''; + } + if (authUrl.includes('force://')) { + const authFile = path.join(await createTempDir(), 'sfdxScratchAuth.txt'); + await fs.writeFile(authFile, authUrl, 'utf8'); + const authCommand = + `sf org login sfdx-url -f ${authFile}` + + (isDevHub ? ` --set-default-dev-hub` : (setDefaultOrg ? ` --set-default` : '')) + + (!orgAlias.includes('force://') ? ` --alias ${orgAlias}` : ''); + await execCommand(authCommand, this, { fail: true, output: false }); + uxLog("action", this, c.cyan('Successfully logged using sfdxAuthUrl')); + await fs.remove(authFile); + return; + } + + // Get auth variables, with priority CLI arguments, environment variables, then .sfdx-hardis.yml config file + let username = + typeof options.Command.flags?.targetusername === 'string' + ? options.Command.flags?.targetusername + : process.env.TARGET_USERNAME || isDevHub + ? config.devHubUsername + : config.targetUsername; + if (username == null && isCI) { + const gitBranchFormatted = await getCurrentGitBranch({ formatted: true }); + console.error( + c.yellow( + `[sfdx-hardis][WARNING] You may have to define ${c.bold( + isDevHub + ? 'devHubUsername in .sfdx-hardis.yml' + : options.scratch + ? 'cache between your CI jobs: folder ".cache/sfdx-hardis/.sfdx"' + : `targetUsername in config/branches/.sfdx-hardis.${gitBranchFormatted}.yml` + )} ` + ) + ); + process.exit(1); + } + let instanceUrl = + typeof options.Command?.flags?.instanceurl === 'string' && + (options.Command?.flags?.instanceurl || '').startsWith('https') + ? options.Command.flags.instanceurl + : (process.env.INSTANCE_URL || '').startsWith('https') + ? process.env.INSTANCE_URL + : config.instanceUrl + ? config.instanceUrl + : 'https://login.salesforce.com'; + // Get JWT items clientId and certificate key + const sfdxClientId = await getSfdxClientId(orgAlias, config); + const crtKeyfile = await getCertificateKeyFile(orgAlias, config); + const usernameArg = options.setDefault === false ? '' : isDevHub ? '--set-default-dev-hub' : '--set-default'; + if (crtKeyfile && sfdxClientId && username) { + // Login with JWT + const loginCommand = + 'sf org login jwt' + + ` ${usernameArg}` + + ` --client-id ${sfdxClientId}` + + ` --jwt-key-file ${crtKeyfile}` + + ` --username ${username}` + + ` --instance-url ${instanceUrl}` + + (orgAlias ? ` --alias ${orgAlias}` : ''); + const jwtAuthRes = await execSfdxJson(loginCommand, this, { + fail: false, + output: false + }); + // await fs.remove(crtKeyfile); // Delete private key file from temp folder TODO: move to postrun hook + logged = jwtAuthRes.status === 0; + if (!logged) { + console.error(c.red(`[sfdx-hardis][ERROR] JWT login error: \n${JSON.stringify(jwtAuthRes)}`)); + process.exit(1); + } + } else if (!isCI) { + // Login with web auth + const orgLabel = `org ${orgAlias}`; + console.warn( + c.yellow( + c.bold( + `[sfdx-hardis] You must be connected to ${orgLabel} to perform this command. Please login in the open web browser` + ) + ) + ); + + if (isCI) { + console.error( + c.red(`See CI authentication doc at ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`) + ); + throw new SfError( + `In CI context, you may define: + - a .sfdx-hardis.yml file with instanceUrl and targetUsername properties (or INSTANCE_URL and TARGET_USERNAME repo variables) + - a repository secret variable SFDX_CLIENT_ID with consumer key of SF CLI connected app + - store server.key file within ssh folder + ` + ); + } + const orgTypes = isDevHub ? ['login'] : ['login', 'test']; + instanceUrl = await promptInstanceUrl(orgTypes, orgAlias); + + const configInfoUsr = await getConfig('user'); + + const executionContext = getExecutionContext(); + + // // Prompt user for Web or Device login + // const loginTypeRes = await prompts({ + // name: 'loginType', + // type: 'select', + // message: "Select a login type", + // description: 'Choose the authentication method that works best for your environment. Use Web if unsure.', + // choices: [ + // { + // title: '🌐 Web Login (If VsCode is locally installed on your computer)', + // value: 'web', + // }, + // { + // title: '📟 Device Login (Useful for CodeBuilder / CodeSpaces)', + // value: 'device', + // description: 'Look at the instructions in the console terminal if you select this option', + // }, + // ], + // default: 'web', + // initial: 'web', + // }); + + let loginResult: any = null; + // Manage device login + if (executionContext === "web") { + loginResult = await authenticateUsingDeviceLogin(instanceUrl, orgAlias, configInfoUsr, options, isDevHub, loginResult); + } + // Web Login if device login not used + if (loginResult == null) { + uxLog("action", this, c.cyan("Authenticating using web login...")); + const loginCommand = + 'sf org login web' + + (alias ? ` --alias ${alias}` : options.setDefault === false ? '' : isDevHub ? ' --set-default-dev-hub' : ' --set-default') + + ` --instance-url ${instanceUrl}` + + (orgAlias && orgAlias !== configInfoUsr?.scratchOrgAlias ? ` --alias ${orgAlias}` : ''); + try { + loginResult = await execCommand(loginCommand, this, { output: false, fail: true, spinner: false }); + } catch (e) { + // Give instructions if server is unavailable + if (((e as Error).message || '').includes('Cannot start the OAuth redirect server on port')) { + uxLog( + "warning", + this, + c.yellow( + c.bold( + 'You might have a ghost SF CLI command. Open Task Manager, search for Node.js processes, kill them, then try again' + ) + ) + ); + } + throw e; + } + } + await clearCache('sf org list'); + logged = loginResult.status === 0; + username = loginResult?.username || 'err'; + instanceUrl = loginResult?.instanceUrl || instanceUrl; + updateSfCliCommandOrg = true; + } else { + console.error(c.red(`[sfdx-hardis] Unable to connect to org ${orgAlias} with browser. Please try again :)`)); + } + if (logged) { + // Retrieve default username or dev hub username if not returned by command + if (username === 'err') { + // Using alias + if (alias) { + const configGetRes = await execSfdxJson(`sf org display --target-org ${alias}`, this, { + output: false, + fail: false, + }); + username = configGetRes?.result?.username || ''; + } else { + // Using default org + const configGetRes = await execSfdxJson('sf config get ' + (isDevHub ? 'target-dev-hub' : 'target-org'), this, { + output: false, + fail: false, + }); + username = configGetRes?.result[0]?.value || ''; + } + } + uxLog("other", this, `Successfully logged to ${c.green(instanceUrl)} with ${c.green(username)}`); + WebSocketClient.sendRefreshStatusMessage(); + // Assign org to SfCommands + // if (isDevHub) { + // options.Command.flags["target-org"] = username; + // options.Command.assignHubOrg(); // seems to be automatically done by SfCommand under the hook + // } else { + // options.Command.flags["target-dev-hub"] = username; + // options.Command.assignOrg(); // seems to be automatically done by SfCommand under the hook + // } + // Display warning message in case of local usage (not CI), and not login command + if (!(options?.Command?.id || "").startsWith("hardis:auth:login") && updateSfCliCommandOrg === true) { + uxLog("warning", this, c.yellow("*** IF YOU SEE AN AUTH ERROR PLEASE RUN AGAIN THE SAME COMMAND :) ***")); + } + } else { + console.error(c.red('[sfdx-hardis][ERROR] You must be logged to an org to perform this action')); + process.exit(1); // Exit because we should succeed to connect + } + } +} + +export async function authenticateUsingDeviceLogin(instanceUrl: any, orgAlias: string, configInfoUsr: any, options: any, isDevHub: boolean, loginResult: any) { + uxLog("action", this, c.cyan("Authenticating using device login...")); + const loginCommandArgs = ['org login device', '--instance-url', instanceUrl]; + if (orgAlias && orgAlias !== configInfoUsr?.scratchOrgAlias) { + loginCommandArgs.push(...['--alias', orgAlias]); + } + if (options.setDefault === true && isDevHub) { + loginCommandArgs.push('--set-default-dev-hub'); + } + if (options.setDefault === true && !isDevHub) { + loginCommandArgs.push('--set-default'); + } + const loginCommand = 'sf ' + loginCommandArgs.join(' ') + " --json"; + try { + // Spawn and get output in real time to send it to the console + const authProcess = spawn(loginCommand, { shell: true }); + if (!authProcess.stdout || !authProcess.stderr) { + throw new SfError('Error during device login (no output)'); + } + let allOutput = ""; + authProcess.stdout.on('data', (data) => { + allOutput += data.toString(); + const jsonOutput = findJsonInString(allOutput); + if (jsonOutput) { + if (jsonOutput.verification_uri && jsonOutput.user_code) { + uxLog("action", this, `To authenticate, visit ${c.cyan(jsonOutput.verification_uri)} and enter code ${c.green(jsonOutput.user_code)}`); + allOutput = ""; + } + else if (jsonOutput?.status === 0 && jsonOutput?.result?.username) { + loginResult = jsonOutput.result; + loginResult.status = loginResult.status ?? jsonOutput.status; + } + } + }); + authProcess.stderr.on('data', (data) => { + uxLog("warning", this, "Warning: " + c.yellow(data.toString())); + }); + await new Promise((resolve, reject) => { + authProcess.on('close', (data) => { + resolve(data); + }); + authProcess.on('error', (data) => { + reject(data); + }); + }); + } catch (e) { + uxLog("error", this, c.red(`Device login error: \n${(e as Error).message || e}. Falling back to web login...`)); + loginResult = null; + } + return loginResult; +} + +// Get clientId for SFDX connected app +async function getSfdxClientId(orgAlias: string, config: any) { + // Try to find in global variables + const sfdxClientIdVarName = `SFDX_CLIENT_ID_${orgAlias}`; + if (process.env[sfdxClientIdVarName]) { + console.log(c.grey(`[sfdx-hardis] Using ${sfdxClientIdVarName.toUpperCase()} env variable`)); + return process.env[sfdxClientIdVarName]; + } + const sfdxClientIdVarNameUpper = sfdxClientIdVarName.toUpperCase(); + if (process.env[sfdxClientIdVarNameUpper]) { + console.log(c.grey(`[sfdx-hardis] Using ${sfdxClientIdVarNameUpper} env variable`)); + return process.env[sfdxClientIdVarNameUpper]; + } + if (process.env.SFDX_CLIENT_ID) { + console.warn( + c.yellow( + `[sfdx-hardis] If you use CI on multiple branches & orgs, you should better define CI variable ${c.bold( + sfdxClientIdVarNameUpper + )} than SFDX_CLIENT_ID` + ) + ); + console.warn( + c.yellow(`See CI authentication doc at ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`) + ); + return process.env.SFDX_CLIENT_ID; + } + // Try to find in config files ONLY IN LOCAL MODE (in CI, it's supposed to be a CI variable) + if (!isCI && config.devHubSfdxClientId) { + console.log(c.grey(`[sfdx-hardis] Using devHubSfdxClientId config variable`)); + return config.devHubSfdxClientId; + } + if (isCI) { + console.error( + c.red( + `[sfdx-hardis] You must set env variable ${c.bold( + sfdxClientIdVarNameUpper + )} with the Consumer Key value defined on SFDX Connected app` + ) + ); + console.error(c.red(`See CI authentication doc at ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`)); + } + return null; +} + +// Get clientId for SFDX connected app +async function getKey(orgAlias: string, config: any) { + // Try to find in global variables + const sfdxClientKeyVarName = `SFDX_CLIENT_KEY_${orgAlias}`; + if (process.env[sfdxClientKeyVarName]) { + console.log(c.grey(`[sfdx-hardis] Using ${sfdxClientKeyVarName.toUpperCase()} env variable`)); + return process.env[sfdxClientKeyVarName]; + } + const sfdxClientKeyVarNameUpper = sfdxClientKeyVarName.toUpperCase(); + if (process.env[sfdxClientKeyVarNameUpper]) { + console.log(c.grey(`[sfdx-hardis] Using ${sfdxClientKeyVarNameUpper} env variable`)); + return process.env[sfdxClientKeyVarNameUpper]; + } + if (process.env.SFDX_CLIENT_KEY) { + console.warn( + c.yellow( + `[sfdx-hardis] If you use CI on multiple branches & orgs, you should better define CI variable ${c.bold( + sfdxClientKeyVarNameUpper + )} than SFDX_CLIENT_KEY` + ) + ); + console.warn( + c.yellow(`See CI authentication doc at ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`) + ); + return process.env.SFDX_CLIENT_KEY; + } + // Try to find in config files ONLY IN LOCAL MODE (in CI, it's supposed to be a CI variable) + if (!isCI && config.devHubSfdxClientKey) { + console.log(c.grey(`[sfdx-hardis] Using devHubSfdxClientKey config variable`)); + return config.devHubSfdxClientKey; + } + if (isCI) { + console.error( + c.red( + `[sfdx-hardis] You must set env variable ${c.bold( + sfdxClientKeyVarNameUpper + )} with the value of SSH private key encryption key` + ) + ); + console.error(c.red(`See CI authentication doc at ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`)); + } + return null; +} + +// Try to find certificate key file for SF CLI connected app in different locations +async function getCertificateKeyFile(orgAlias: string, config: any) { + const filesToTry = [ + `./config/branches/.jwt/${orgAlias}.key`, + `./config/.jwt/${orgAlias}.key`, + `./ssh/${orgAlias}.key`, + `./.ssh/${orgAlias}.key`, + './ssh/server.key', + ]; + // Check if we find multiple files + const filesFound = filesToTry.filter((file) => fs.existsSync(file)); + if (filesFound.length > 1) { + console.warn( + c.yellow( + `[sfdx-hardis] Multiple certificate key files found: ${filesFound.join( + ', ' + )}. Please keep only one certificate key file. If you don't know which one, remove all and re-run the configuration command` + ) + ); + } + + for (const file of filesToTry) { + if (fs.existsSync(file)) { + // Decrypt SSH private key and write a temporary file + const sshKey = await getKey(orgAlias, config); + if (sshKey == null) { + continue; + } + + const tmpSshKeyFile = path.join(await createTempDir(), `${orgAlias}.key`); + console.log(c.grey(`[sfdx-hardis] Decrypting key...`)); + await decryptFile(file, tmpSshKeyFile, sshKey); + return tmpSshKeyFile; + } + } + if (isCI) { + console.error( + c.red( + `[sfdx-hardis] You must put a certificate key to connect via JWT.Possible locations:\n -${filesToTry.join( + '\n -' + )}` + ) + ); + console.error(c.red(`See CI authentication doc at ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`)); + } + return null; +} diff --git a/src/common/utils/branchStrategyMermaidBuilder.ts b/src/common/utils/branchStrategyMermaidBuilder.ts new file mode 100644 index 000000000..e6eae4c0a --- /dev/null +++ b/src/common/utils/branchStrategyMermaidBuilder.ts @@ -0,0 +1,268 @@ +import sortArray from "sort-array"; +import { prettifyFieldName } from "./flowVisualiser/nodeFormatUtils.js"; +import { isIntegration, isPreprod, isProduction } from "./orgConfigUtils.js"; + + +export class BranchStrategyMermaidBuilder { + private branchesAndOrgs: any[]; + private gitBranches: any[]; + private salesforceOrgs: any[] = []; + private salesforceDevOrgsGroup: string[] = []; + private gitLinks: any[] = []; + private deployLinks: any[] = []; + private sbDevLinks: any[] = []; + private retrofitLinks: any[] = []; + private mermaidLines: string[] = []; + private featureBranchNb: number = 0; + + constructor(branchesAndOrgs: any[]) { + this.branchesAndOrgs = branchesAndOrgs; + } + + public build(options: { format: "list" | "string", withMermaidTag: boolean }): string | string[] { + this.listGitBranchesAndLinks(); + this.listSalesforceOrgsAndLinks(); + this.generateMermaidLines(); + if (options.withMermaidTag) { + this.mermaidLines.unshift("```mermaid"); + this.mermaidLines.push("```"); + } + return options.format === "list" ? this.mermaidLines : this.mermaidLines.join("\n"); + } + + private listGitBranchesAndLinks(): void { + const branchesWhoAreMergeTargets: string[] = []; + const branchesMergingInPreprod: string[] = []; + this.gitBranches = this.branchesAndOrgs.map((branchAndOrg) => { + const nodeName = branchAndOrg.branchName + "Branch" + for (const mergeTarget of branchAndOrg.mergeTargets || []) { + if (!branchesWhoAreMergeTargets.includes(mergeTarget)) { + branchesWhoAreMergeTargets.push(mergeTarget); + } + if (isPreprod(mergeTarget)) { + branchesMergingInPreprod.push(branchAndOrg.branchName); + } + this.gitLinks.push({ + source: nodeName, + target: mergeTarget + "Branch", + type: "gitMerge", + label: "Merge" + }); + } + return { + name: branchAndOrg.branchName, + nodeName: nodeName, + label: branchAndOrg.branchName, + class: isProduction(branchAndOrg.branchName) ? "gitMain" : "gitMajor", + level: branchAndOrg.level + }; + }); + // Create feature branches for branches that are not merge targets + const noMergeTargetBranchAndOrg = this.branchesAndOrgs.filter((branchAndOrg) => !branchesWhoAreMergeTargets.includes(branchAndOrg.branchName)); + if (branchesMergingInPreprod.length < 2 && !noMergeTargetBranchAndOrg.find((branchAndOrg) => isPreprod(branchAndOrg.branchName))) { + + // We must check if a 'preprod' branch exists before adding it to the array. + // The .find() method returns undefined if no matching element is found. + // Without this check, an 'undefined' value could be pushed to the array, + // causing a null pointer exception later when the code tries to access the 'branchName' property. + const preprodBranch = this.branchesAndOrgs.find((branchAndOrg) => + isPreprod(branchAndOrg.branchName) + ); + if (preprodBranch) { + noMergeTargetBranchAndOrg.push(preprodBranch); + } + } + for (const branchAndOrg of noMergeTargetBranchAndOrg) { + const nameBase = isPreprod(branchAndOrg.branchName) ? "hotfix" : "feature"; + const level = branchAndOrg.level - 1 + this.salesforceDevOrgsGroup.push(branchAndOrg.branchName); + this.addFeatureBranch(nameBase, level, branchAndOrg); + this.addFeatureBranch(nameBase, level, branchAndOrg); + } + // Add retrofit link only if it does not mess with the diagram display :/ + if (branchesMergingInPreprod.length < 2) { + const mainBranch = this.branchesAndOrgs.find((branchAndOrg) => isProduction(branchAndOrg.branchName)); + const preprodBranch = this.branchesAndOrgs.find((branchAndOrg) => isPreprod(branchAndOrg.branchName)); + const integrationBranch = this.branchesAndOrgs.find((branchAndOrg) => isIntegration(branchAndOrg.branchName)); + if (mainBranch && preprodBranch && integrationBranch) { + this.retrofitLinks.push({ + source: mainBranch.branchName + "Branch", + target: integrationBranch.branchName + "Branch", + type: "gitMerge", + label: "Retrofit from RUN to BUILD" + }); + } + } + // Sort branches & links + this.gitBranches = sortArray(this.gitBranches, { by: ['level', 'name'], order: ['asc', 'asc'] }); + this.gitLinks = sortArray(this.gitLinks, { by: ['level', 'source'], order: ['asc', 'asc'] }); + } + + private addFeatureBranch(nameBase: string, level: number, branchAndOrg: any) { + this.featureBranchNb++; + const nameBase1 = nameBase + this.featureBranchNb; + const nodeName1 = nameBase + "Branch" + this.featureBranchNb; + this.gitBranches.push({ + name: nameBase1, + nodeName: nodeName1, + label: nameBase1, + class: "gitFeature", + level: level, + group: branchAndOrg.branchName + }); + this.gitLinks.push({ + source: nodeName1, + target: this.gitBranches.find((gitBranch) => gitBranch.name === branchAndOrg.branchName)?.nodeName || "ERROR", + type: "gitMerge", + label: "Merge" + }); + } + + private listSalesforceOrgsAndLinks(): any { + for (const gitBranch of this.gitBranches) { + const branchAndOrg = this.branchesAndOrgs.find((branchAndOrg) => branchAndOrg.branchName === gitBranch.name); + if (branchAndOrg) { + // Major org + const nodeName = branchAndOrg.branchName + "Org"; + this.salesforceOrgs.push({ + name: branchAndOrg.branchName, + nodeName: branchAndOrg.branchName + "Org", + label: isProduction(branchAndOrg.branchName) ? "Production Org" : prettifyFieldName(branchAndOrg.branchName) + " Org", + class: gitBranch.class === "gitMain" ? "salesforceProd" : gitBranch.class === "gitMajor" ? "salesforceMajor" : "salesforceDev", + level: branchAndOrg.level + }); + this.deployLinks.push({ + source: gitBranch.nodeName, + target: nodeName, + type: "sfDeploy", + label: "Deploy to Org", + level: branchAndOrg.level + }); + } + else { + const nodeName = gitBranch.name + "Org"; + this.salesforceOrgs.push({ + name: gitBranch.name, + nodeName: nodeName, + label: "Dev " + prettifyFieldName(gitBranch.name), + class: "salesforceDev", + level: gitBranch.level, + group: gitBranch.group + }); + this.sbDevLinks.push({ + source: nodeName, + target: gitBranch.nodeName, + type: "sfPushPull", + label: "Push / Pull", + level: gitBranch.level, + }); + } + } + // Sort orgs & links + this.salesforceOrgs = sortArray(this.salesforceOrgs, { by: ['level', 'name'], order: ['desc', 'asc'] }); + this.deployLinks = sortArray(this.deployLinks, { by: ['level', 'source'], order: ['desc', 'asc'] }); + this.sbDevLinks = sortArray(this.sbDevLinks, { by: ['level', 'source'], order: ['asc', 'asc'] }); + } + + private generateMermaidLines() { + /* jscpd:ignore-start */ + this.mermaidLines.push("flowchart LR"); + this.mermaidLines.push(""); + + // Git branches + this.mermaidLines.push(this.indent("subgraph GitBranches [Git Branches]", 1)); + this.mermaidLines.push(this.indent("direction TB", 2)); + for (const gitBranch of this.gitBranches) { + this.mermaidLines.push(this.indent(`${gitBranch.nodeName}["${gitBranch.label}"]:::${gitBranch.class}`, 2)); + } + this.mermaidLines.push(this.indent("end", 1)); + this.mermaidLines.push(""); + + // Salesforce orgs + this.mermaidLines.push(this.indent("subgraph SalesforceOrgs [Salesforce Major Orgs]", 1)); + this.mermaidLines.push(this.indent("direction TB", 2)); + for (const salesforceOrg of this.salesforceOrgs.filter((salesforceOrg) => ["salesforceProd", "salesforceMajor"].includes(salesforceOrg.class))) { + this.mermaidLines.push(this.indent(`${salesforceOrg.nodeName}(["${salesforceOrg.label}"]):::${salesforceOrg.class}`, 2)); + } + this.mermaidLines.push(this.indent("end", 1)); + this.mermaidLines.push(""); + + // Salesforce dev orgs + for (const devOrgsGroup of this.salesforceDevOrgsGroup) { + this.mermaidLines.push(this.indent(`subgraph SalesforceDevOrgs${devOrgsGroup} [Salesforce Dev Orgs]`, 1)); + this.mermaidLines.push(this.indent("direction TB", 2)); + for (const salesforceOrg of this.salesforceOrgs.filter((salesforceOrg) => salesforceOrg.group === devOrgsGroup && (salesforceOrg.name.startsWith("feature") || salesforceOrg.name.startsWith("hotfix")))) { + this.mermaidLines.push(this.indent(`${salesforceOrg.nodeName}(["${salesforceOrg.label}"]):::${salesforceOrg.class}`, 2)); + } + this.mermaidLines.push(this.indent("end", 1)); + this.mermaidLines.push(""); + } + + // Links + this.addLinks(this.gitLinks); + this.addLinks(this.deployLinks); + this.addLinks(this.sbDevLinks); + this.addLinks(this.retrofitLinks); + + // Classes and styles + this.mermaidLines.push(...this.listClassesAndStyles()); + for (const salesforceDevOrgsGroup of this.salesforceDevOrgsGroup) { + this.mermaidLines.push(`style SalesforceDevOrgs${salesforceDevOrgsGroup} fill:#EBF6FF,color:#000000,stroke:#0077B5,stroke-width:1px;`); + } + /* jscpd:ignore-end */ + const allLinks = [...this.gitLinks, ...this.deployLinks, ...this.sbDevLinks, ...this.retrofitLinks]; + let pos = 0; + const positions: any = {} + for (const link of allLinks) { + if (!positions[link.type]) { + positions[link.type] = []; + } + positions[link.type].push(pos); + pos++; + } + const linksDef = this.listLinksDef(); + for (const key of Object.keys(positions)) { + const styleDef = linksDef[key]; + this.mermaidLines.push(`linkStyle ${positions[key].join(",")} ${styleDef}`); + } + } + + private addLinks(links) { + for (const link of links) { + if (link.type === "gitMerge") { + this.mermaidLines.push(this.indent(`${link.source} ==>|"${link.label}"| ${link.target}`, 1)); + } else if (link.type === "sfDeploy") { + this.mermaidLines.push(this.indent(`${link.source} -. ${link.label} .-> ${link.target}`, 1)); + } else if (link.type === "sfPushPull") { + this.mermaidLines.push(this.indent(`${link.source} <-. ${link.label} .-> ${link.target}`, 1)); + } + } + this.mermaidLines.push(""); + } + + listClassesAndStyles(): string[] { + const classesAndStyles = ` classDef salesforceDev fill:#9BC3FF,stroke:#2B65D9,stroke-width:2px,color:#000000,font-weight:bold,border-radius:10px; + classDef salesforceMajor fill:#67B7D1,stroke:#004D66,stroke-width:2px,color:#FFFFFF,font-weight:bold,border-radius:10px; + classDef salesforceProd fill:#4C98C3,stroke:#003B5A,stroke-width:2px,color:#FFFFFF,font-weight:bold,border-radius:10px; + classDef gitMajor fill:#FFCA76,stroke:#E65C00,stroke-width:2px,color:#000000,font-weight:bold,border-radius:10px; + classDef gitMain fill:#F97B8B,stroke:#CC2936,stroke-width:2px,color:#000000,font-weight:bold,border-radius:10px; + classDef gitFeature fill:#B0DE87,stroke:#2D6A4F,stroke-width:2px,color:#000000,font-weight:bold,border-radius:10px; + + style GitBranches fill:#F4F5F9,color:#000000,stroke:#8B72B2,stroke-width:1px; + style SalesforceOrgs fill:#F1F7F5,color:#000000,stroke:#468C70,stroke-width:1px; +` + return classesAndStyles.split("\n"); + } + + private listLinksDef(): any { + return { + "gitMerge": "stroke:#4B0082,stroke-width:4px,color:#4B0082,background-color:transparent;", + "sfDeploy": "stroke:#4169E1,stroke-width:2px,color:#4169E1,background-color:transparent;", + "sfPushPull": "stroke:#5F9EA0,stroke-width:2px,color:#5F9EA0,background-color:transparent;" + } + } + + private indent(str: string, number: number): string { + return ' '.repeat(number) + str; + } +} diff --git a/src/common/utils/classUtils.ts b/src/common/utils/classUtils.ts index 45cdc0e0f..7a5899b66 100644 --- a/src/common/utils/classUtils.ts +++ b/src/common/utils/classUtils.ts @@ -1,6 +1,6 @@ -import { countRegexMatches, uxLog } from "."; -import * as c from "chalk"; -import * as readFilesRecursive from "fs-readdir-recursive"; +import { countRegexMatches, uxLog } from "./index.js"; +import c from "chalk"; +import readFilesRecursive from "fs-readdir-recursive"; import * as path from "path"; import * as fs from "fs"; @@ -19,12 +19,12 @@ function findSubstringInFile(filePath: string, substring: string): Promise !file.includes("node_modules") && file.includes("classes") && file.endsWith(".cls")) .map((file) => { @@ -38,7 +38,7 @@ export async function getApexTestClasses(classRegexFilter: string | null = null, const className = entry.fileName.substring(0, entry.fileName.length - 4); // Check if need to exclude SeeAllData=true if (excludeSeeAllData === true && (await findSubstringInFile(entry.fullPath, "SeeAllData=true"))) { - uxLog(this, c.grey(`Filtered class ${className} because is contains SeeAllData=true`)); + uxLog("log", this, c.grey(`Filtered class ${className} because is contains SeeAllData=true`)); continue; } // Check if regex filter @@ -48,7 +48,7 @@ export async function getApexTestClasses(classRegexFilter: string | null = null, } } - uxLog(this, c.grey(`Found APEX tests: ${c.bold(testClasses.join())}`)); + uxLog("log", this, c.grey(`Found APEX tests: ${c.bold(testClasses.join())}`)); return testClasses; } @@ -57,7 +57,7 @@ async function matchRegexFilter(classRegexFilter: string, className: string) { if ((await countRegexMatches(new RegExp(classRegexFilter), className)) > 0) { return true; } - uxLog(this, c.grey(`Filtered class ${className} because not matching RegExp ${classRegexFilter}`)); + uxLog("log", this, c.grey(`Filtered class ${className} because not matching RegExp ${classRegexFilter}`)); return false; } return true; diff --git a/src/common/utils/dataUtils.ts b/src/common/utils/dataUtils.ts index 0c5aae743..c2f1e3aa0 100644 --- a/src/common/utils/dataUtils.ts +++ b/src/common/utils/dataUtils.ts @@ -1,100 +1,158 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { elapseEnd, elapseStart, execCommand, uxLog } from "."; -import { getConfig } from "../../config"; -import { prompts } from "./prompts"; +import { SfError } from '@salesforce/core'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { elapseEnd, elapseStart, execCommand, uxLog } from './index.js'; +import { getConfig } from '../../config/index.js'; +import { prompts } from './prompts.js'; +import { isProductionOrg } from './orgUtils.js'; -export const dataFolderRoot = path.join(".", "scripts", "data"); +export const DATA_FOLDERS_ROOT = path.join(process.cwd(), 'scripts', 'data'); // Import data from sfdmu folder -export async function importData(sfdmuPath: string, commandThis: any, options: any = {}) { +export async function importData(sfdmuPath: string, commandThis: any, options: any = { cwd: process.cwd() }) { + const cwd = options?.cwd || process.cwd(); const dtl = await getDataWorkspaceDetail(sfdmuPath); - if (dtl.isDelete === true) { - throw new SfdxError("Your export.json contains deletion info, please use appropriate delete command"); + if (dtl?.isDelete === true) { + throw new SfError('Your export.json contains deletion info, please use appropriate delete command'); } - uxLog(commandThis, c.cyan(`Importing data from ${c.green(dtl.full_label)} ...`)); + const targetUsername = options.targetUsername || commandThis?.org?.getConnection().username; + uxLog("action", commandThis, c.cyan(`Importing data from ${c.green(dtl?.full_label)} into ${targetUsername}...`)); /* jscpd:ignore-start */ - uxLog(commandThis, c.italic(c.grey(dtl.description))); - const targetUsername = options.targetUsername || commandThis.org.getConnection().username; - await fs.ensureDir(path.join(sfdmuPath, "logs")); - const config = await getConfig("branch"); + if (dtl?.description) { + uxLog("log", commandThis, c.italic(c.grey("Data Workspace Description:" + dtl?.description))); + } + await fs.ensureDir(path.join(sfdmuPath, 'logs')); + const config = await getConfig('branch'); const dataImportCommand = - "sfdx sfdmu:run" + + 'sf sfdmu:run' + ` --sourceusername csvfile` + - ` --targetusername ${targetUsername}` + + ` --targetusername ${targetUsername}` + // Keep targetusername until sfdmu switches to target-org ` -p ${sfdmuPath}` + - " --noprompt" + - (config.sfdmuCanModify ? ` --canmodify ${config.sfdmuCanModify}` : ""); + ' --noprompt' + + // Needed for production orgs + (config.sfdmuCanModify || process.env.SFDMU_CAN_MODIFY ? ` --canmodify ${config.sfdmuCanModify || process.env.SFDMU_CAN_MODIFY}` : ''); /* jscpd:ignore-end */ - elapseStart(`import ${dtl.full_label}`); - await execCommand(dataImportCommand, commandThis, { + elapseStart(`import ${dtl?.full_label}`); + const res = await execCommand(dataImportCommand, commandThis, { fail: true, output: true, + cwd: cwd, }); - elapseEnd(`import ${dtl.full_label}`); + uxLog("success", commandThis, c.green(`Data imported successfully from ${c.green(dtl?.full_label)} into ${targetUsername}`)); + uxLog("log", commandThis, c.italic(c.grey(res.stdout || ''))); + elapseEnd(`import ${dtl?.full_label}`); } // Delete data using sfdmu folder -export async function deleteData(sfdmuPath: string, commandThis: any, options: any = {}) { +export async function deleteData(sfdmuPath: string, commandThis: any, options: any = { cwd: process.cwd() }) { + const config = await getConfig('branch'); + const cwd = options?.cwd || process.cwd(); const dtl = await getDataWorkspaceDetail(sfdmuPath); - if (dtl.isDelete === false) { - throw new SfdxError( - "Your export.json does not contain deletion information. Please check http://help.sfdmu.com/full-documentation/advanced-features/delete-from-source", + if (dtl?.isDelete === false) { + throw new SfError( + 'Your export.json does not contain deletion information. Please check http://help.sfdmu.com/full-documentation/advanced-features/delete-from-source' ); } - uxLog(commandThis, c.cyan(`Deleting data from ${c.green(dtl.full_label)} ...`)); - uxLog(commandThis, c.italic(c.grey(dtl.description))); - const targetUsername = options.targetUsername || commandThis.org.getConnection().username; - await fs.ensureDir(path.join(sfdmuPath, "logs")); - const config = await getConfig("branch"); + // If org is production, make sure that "runnableInProduction": true is present in export.json + const isProdOrg = await isProductionOrg(options?.targetUsername || options?.conn?.username || "ERROR", options); + if (isProdOrg === true && (dtl?.runnableInProduction || false) !== true) { + throw new SfError(`To run this delete SFDMU script in production, you need to define "runnableInProduction": true in its export.json file`); + } + if (isProdOrg === true && !config.sfdmuCanModify) { + uxLog("warning", commandThis, c.yellow(`If you see a sfdmu error, you probably need to add a property sfdmuCanModify: YOUR_ORG_INSTANCE_URL in the related config/branches/.sfdx-hardis.YOUR_BRANCH.yml config file.`)); + } + uxLog("action", commandThis, c.cyan(`Deleting data from ${c.green(dtl?.full_label)} ...`)); + if (dtl?.description) { + uxLog("log", commandThis, c.italic(c.grey("Data Workspace Description:" + dtl?.description))); + } + const targetUsername = options.targetUsername || options.conn.username; + await fs.ensureDir(path.join(sfdmuPath, 'logs')); const dataImportCommand = - "sfdx sfdmu:run" + + 'sf sfdmu:run' + ` --sourceusername ${targetUsername}` + ` -p ${sfdmuPath}` + - " --noprompt" + - (config.sfdmuCanModify ? ` --canmodify ${config.sfdmuCanModify}` : ""); - elapseStart(`delete ${dtl.full_label}`); - await execCommand(dataImportCommand, commandThis, { + ' --noprompt' + + (config.sfdmuCanModify ? ` --canmodify ${config.sfdmuCanModify}` : ''); + elapseStart(`delete ${dtl?.full_label}`); + const res = await execCommand(dataImportCommand, commandThis, { fail: true, output: true, + cwd: cwd, }); - elapseEnd(`delete ${dtl.full_label}`); + uxLog("success", commandThis, c.green(`Data deleted successfully from ${c.green(dtl?.full_label)}`)); + uxLog("log", commandThis, c.italic(c.grey(res.stdout || ''))); + elapseEnd(`delete ${dtl?.full_label}`); } // Export data from sfdmu folder -export async function exportData(sfdmuPath: string, commandThis: any, options: any = {}) { +export async function exportData(sfdmuPath: string, commandThis: any, options: any = { cwd: process.cwd() }) { /* jscpd:ignore-start */ + const cwd = options?.cwd || process.cwd(); const dtl = await getDataWorkspaceDetail(sfdmuPath); - if (dtl.isDelete === true) { - throw new SfdxError("Your export.json contains deletion info, please use appropriate delete command"); + if (dtl?.isDelete === true) { + throw new SfError('Your export.json contains deletion info, please use appropriate delete command'); } /* jscpd:ignore-end */ - uxLog(commandThis, c.cyan(`Exporting data from ${c.green(dtl.full_label)} ...`)); - uxLog(commandThis, c.italic(c.grey(dtl.description))); - const sourceUsername = options.sourceUsername || commandThis.org.getConnection().username; - await fs.ensureDir(path.join(sfdmuPath, "logs")); - const dataImportCommand = `sfdx sfdmu:run --sourceusername ${sourceUsername} --targetusername csvfile -p ${sfdmuPath} --noprompt`; - await execCommand(dataImportCommand, commandThis, { + uxLog("action", commandThis, c.cyan(`Exporting data from ${c.green(dtl?.full_label)} ...`)); + if (dtl?.description) { + uxLog("log", commandThis, c.italic(c.grey("Data Workspace Description:" + dtl?.description))); + } + const sourceUsername = options.sourceUsername || commandThis?.org?.getConnection().username; + await fs.ensureDir(path.join(sfdmuPath, 'logs')); + const dataImportCommand = `sf sfdmu:run --sourceusername ${sourceUsername} --targetusername csvfile -p ${sfdmuPath} --noprompt`; + elapseStart(`export ${dtl?.full_label}`); + const res = await execCommand(dataImportCommand, commandThis, { fail: true, output: true, + cwd: cwd, }); + uxLog("success", commandThis, c.green(`Data exported successfully from ${c.green(dtl?.full_label)}`)); + uxLog("log", commandThis, c.italic(c.grey(res.stdout || ''))); + elapseEnd(`export ${dtl?.full_label}`); } -export async function selectDataWorkspace(opts = { selectDataLabel: "Please select a data workspace to export" }) { - if (!fs.existsSync(dataFolderRoot)) { - throw new SfdxError( - "There is no sfdmu root folder 'scripts/data' in your workspace. Create it and define sfdmu exports using sfdmu: https://help.sfdmu.com/", - ); +export async function findDataWorkspaceByName(projectName: string) { + const folderPath = path.join(DATA_FOLDERS_ROOT, projectName); + if (fs.existsSync(folderPath)) { + return folderPath; } + throw new SfError(`There is no sfdmu folder named ${projectName} in your workspace (${DATA_FOLDERS_ROOT})`); +} +export async function hasDataWorkspaces(cwd: string = process.cwd()) { + const dataFolderToSearch = path.join(cwd, 'scripts', 'data'); + if (!fs.existsSync(dataFolderToSearch)) { + return false; + } + const sfdmuFolders = listDataFolders(dataFolderToSearch); + return sfdmuFolders.length > 0; +} + +function listDataFolders(dataFolderToSearch: string) { const sfdmuFolders = fs - .readdirSync(dataFolderRoot, { withFileTypes: true }) + .readdirSync(dataFolderToSearch, { withFileTypes: true }) .filter((dirent) => dirent.isDirectory()) - .map((dirent) => path.join(".", "scripts", "data", dirent.name)); + .map((dirent) => path.join('scripts', 'data', dirent.name)); + return sfdmuFolders +} + +export async function selectDataWorkspace(opts: { selectDataLabel: string, multiple?: boolean, initial?: string | string[], cwd?: string } = { selectDataLabel: 'Please select a data workspace to export', multiple: false }): Promise { + let dataFolderToSearch = DATA_FOLDERS_ROOT; + if (opts.cwd) { + dataFolderToSearch = path.join(opts.cwd, 'scripts', 'data'); + } + if (!fs.existsSync(dataFolderToSearch)) { + uxLog("warning", this, + c.yellowBright( + "There is no sfdmu root folder 'scripts/data' in your workspace. Create it and define sfdmu exports using sfdmu: https://help.sfdmu.com/" + )); + return null; + } + + const sfdmuFolders = listDataFolders(dataFolderToSearch); if (sfdmuFolders.length === 0) { - throw new SfdxError("There is no sfdmu folder in your workspace. Create them using sfdmu: https://help.sfdmu.com/"); + throw new SfError('There is no sfdmu folder in your workspace. Create them using sfdmu: https://help.sfdmu.com/'); } const choices: any = []; for (const sfdmuFolder of sfdmuFolders) { @@ -108,30 +166,37 @@ export async function selectDataWorkspace(opts = { selectDataLabel: "Please sele } } const sfdmuDirResult = await prompts({ - type: "select", - name: "value", + type: opts.multiple ? 'multiselect' : 'select', + name: 'value', message: c.cyanBright(opts.selectDataLabel), + description: 'Select the SFDMU data configuration to use for this operation', choices: choices, + initial: opts?.initial === "all" ? sfdmuFolders : opts?.initial ?? null }); return sfdmuDirResult.value; } export async function getDataWorkspaceDetail(dataWorkspace: string) { - const exportFile = path.join(dataWorkspace, "export.json"); + const exportFile = path.join(dataWorkspace, 'export.json'); if (!fs.existsSync(exportFile)) { - uxLog(this, c.yellow(`Your SFDMU folder ${c.bold(dataWorkspace)} must contain an ${c.bold("export.json")} configuration file`)); + uxLog( + "warning", + this, + c.yellow(`Your SFDMU folder ${c.bold(dataWorkspace)} must contain an ${c.bold('export.json')} configuration file`) + ); return null; } - const exportFileJson = JSON.parse(await fs.readFile(exportFile, "utf8")); - const folderName = dataWorkspace.replace(/\\/g, "/").match(/([^/]*)\/*$/)[1]; + const exportFileJson = JSON.parse(await fs.readFile(exportFile, 'utf8')); + const folderName = (dataWorkspace.replace(/\\/g, '/').match(/([^/]*)\/*$/) || [])[1]; const hardisLabel = exportFileJson.sfdxHardisLabel || folderName; const hardisDescription = exportFileJson.sfdxHardisDescription || dataWorkspace; return { - full_label: `[${folderName}]${folderName != hardisLabel ? `: ${hardisLabel}` : ""}`, + full_label: `[${folderName}]${folderName != hardisLabel ? `: ${hardisLabel}` : ''}`, label: hardisLabel, description: hardisDescription, exportJson: exportFileJson, isDelete: isDeleteDataWorkspace(exportFileJson), + runnableInProduction: exportFileJson.runnableInProduction || false }; } @@ -139,7 +204,7 @@ export async function getDataWorkspaceDetail(dataWorkspace: string) { export function isDeleteDataWorkspace(exportFileJson: any) { let isDelete = false; for (const objectConfig of exportFileJson.objects) { - if (objectConfig?.deleteFromSource === true || objectConfig.operation === "DeleteSource") { + if (objectConfig?.deleteFromSource === true || objectConfig.operation === 'DeleteSource') { isDelete = true; } } diff --git a/src/common/utils/deltaUtils.ts b/src/common/utils/deltaUtils.ts new file mode 100644 index 000000000..518f5a734 --- /dev/null +++ b/src/common/utils/deltaUtils.ts @@ -0,0 +1,172 @@ + +import { parsePackageXmlFile, writePackageXmlFile } from './xmlUtils.js'; + +type PackageType = { + members: string[]; + name: string; +}; + +let fullPackageTypes = null; + +async function getAllTypes(fullPackageFile: string): Promise> { + if (fullPackageTypes) { + return fullPackageTypes; + } + + fullPackageTypes = await parsePackageXmlFile(fullPackageFile); + return fullPackageTypes ?? new Map(); +} + +async function getAllLanguages(fullPackageFile: string): Promise> { + return (await getAllTypes(fullPackageFile))["Translations"] ?? []; +} + +function addTypeIfMissing(types: Map, typeToAdd: PackageType) { + if (typeToAdd === null) { + return; + } + types[typeToAdd.name] = [...new Set([...(types[typeToAdd.name] ?? []), ...typeToAdd.members])]; +} + +// Generic processor factory +/* Config: + - separator: optional, if provided, the member must contain this separator and will be split on it + - skipCondition: optional, if provided, a function that takes the member and returns true if the member should be skipped +*/ +function createProcessor(config: { + separator?: string; + skipCondition?: (member: string) => boolean; + targetType: string; + memberGenerator: (member: string, fullPackageFile: string, allTypesMap: Map) => Promise; +}) { + return async function (member: string, fullPackageFile: string): Promise { + if (config.skipCondition && config.skipCondition(member)) { + return null; + } + + if (config.separator) { + const parts = member.split(config.separator); + if (parts.length !== 2) { + return null; + } + } + const types = await getAllTypes(fullPackageFile); + const members = await config.memberGenerator(member, fullPackageFile, types); + return members.length ? { members, name: config.targetType } : null; + }; +} + +/* Extends a delta package.xml file with dependencies found in a full package.xml file. + For example, if the delta package.xml contains a CustomField, it will add the corresponding CustomObject and RecordTypes. + It also adds translations for all languages found in the full package.xml file. +*/ +export async function extendPackageFileWithDependencies( + deltaXmlFile: string, + fullPackageFile: string, +) { + const languages = await getAllLanguages(fullPackageFile); + + // Generic processors using the factory + const metadataProcessors = listMetadataProcessors(languages); + + const parsedTypes = await parsePackageXmlFile(deltaXmlFile); + const clonedTypes = structuredClone(parsedTypes); + + for (const metadataType in clonedTypes) { + const members = clonedTypes[metadataType]; + if (Object.hasOwn(metadataProcessors, metadataType)) { + for (const member of members) { + const processors = Array.isArray(metadataProcessors[metadataType]) ? metadataProcessors[metadataType] : [metadataProcessors[metadataType]]; + for (const processor of processors) { + addTypeIfMissing(parsedTypes, await processor(member, fullPackageFile)); + } + } + } + } + + await writePackageXmlFile(deltaXmlFile, parsedTypes); +} + +function listMetadataProcessors(languages: string[]) { + const allCustomFields = createProcessor({ + targetType: "CustomField", + memberGenerator: async (member, _, allTypesMap) => { + const baseName = member.split('.')[0]; + return allTypesMap["CustomField"]?.filter(field => field.startsWith(baseName)) ?? []; + } + }); + + const allCustomMetadataRecords = createProcessor({ + skipCondition: (member) => !member.includes('__mdt'), + targetType: "CustomMetadata", + memberGenerator: async (member, _, allTypesMap) => { + const baseName = member.split('__mdt')[0]; + return allTypesMap["CustomMetadata"]?.filter(member => member.startsWith(baseName)) ?? []; + } + }); + + const allObjectRecordTypes = createProcessor({ + separator: '.', + skipCondition: (member) => member.includes("__mdt"), + targetType: "RecordType", + memberGenerator: async (member, _, allTypesMap) => { + const sobject = member.split('.')[0]; + return allTypesMap["RecordType"]?.filter(member => member.startsWith(sobject + '.')) ?? []; + } + }); + + const dashSeparatedObjectToObjectTranslation = createProcessor({ + separator: '-', + targetType: "CustomObjectTranslation", + memberGenerator: async (member) => { + const sobject = member.split('-')[0]; + return languages.map(languageSuffix => sobject + "-" + languageSuffix); + } + }); + + const dotSeparatedObjectToObjectTranslation = createProcessor({ + separator: '.', + skipCondition: (member) => member.includes("__mdt"), + targetType: "CustomObjectTranslation", + memberGenerator: async (member) => { + const sobject = member.split('.')[0]; + return languages.map(languageSuffix => sobject + "-" + languageSuffix); + } + }); + + const globalTranslations = createProcessor({ + targetType: "Translations", + memberGenerator: async () => languages + }); + + const leadConvertSettings = createProcessor({ + skipCondition: (member) => !member.startsWith('Opportunity') && !member.includes('Account') && !member.includes('Contact') && !member.includes('Lead'), + targetType: "LeadConvertSettings", + memberGenerator: async (_, __, allTypesMap) => { + return allTypesMap["LeadConvertSettings"] ?? []; + } + }); + + const objectTranslations = createProcessor({ + targetType: "CustomObjectTranslation", + memberGenerator: async (member) => { + return languages.map(suffix => member + "-" + suffix); + } + }); + + // Map of metadata types to their processors + const metadataProcessors = { + "CustomField": [allObjectRecordTypes, allCustomMetadataRecords, dotSeparatedObjectToObjectTranslation, leadConvertSettings], + "CustomLabel": globalTranslations, + "CustomMetadata": allCustomFields, + "CustomObject": objectTranslations, + "CustomPageWebLink": globalTranslations, + "CustomTab": globalTranslations, + "Layout": dashSeparatedObjectToObjectTranslation, + "QuickAction": dotSeparatedObjectToObjectTranslation, + "RecordType": dotSeparatedObjectToObjectTranslation, + "ReportType": globalTranslations, + "ValidationRule": dotSeparatedObjectToObjectTranslation, + }; + return metadataProcessors; +} diff --git a/src/common/utils/deployTipJson.ts b/src/common/utils/deployTipJson.ts new file mode 100644 index 000000000..f2d1ccf26 --- /dev/null +++ b/src/common/utils/deployTipJson.ts @@ -0,0 +1,257 @@ +// Analyze deployment errors to provide tips to user :) +import c from "chalk"; +import format from "string-template"; +import { getAllTips } from "./deployTipsList.js"; +import { stripAnsi, uxLog } from "./index.js"; +import { AiProvider, AiResponse } from "../aiProvider/index.js"; +import { updatePullRequestResult } from "./deployTips.js"; +import { shortenLogLines } from "./deployUtils.js"; + + +export async function analyzeDeployErrorLogsJson(resultJson: any, log: string, includeInLog = true, options: any): Promise { + const allTips = getAllTips(); + const tips: any = []; + + // Filter to keep only errors (we don't care about warnings) and build legacy message to match deploymentTips + const errors = (resultJson?.result?.details?.componentFailures || []) + .filter(error => error.success === false && error.problemType === "Error") + .map(error => { + error.messageInitial = `Error ${error.fullName} ${error.problem}`; + error.messageInitialDisplay = `${error.componentType} ${error.fullName}: ${error.problem}`; + error.tips = []; + return error; + }); + + // Collect errors & tips + for (const error of errors) { + for (const tipDefinition of allTips) { + await matchesTip(tipDefinition, error); + if (error.tips.length > 0) { + tips.push(tipDefinition); + } + } + // Add default tip if not found + if (error.tips.length === 0) { + error.message = stripAnsi(error.messageInitial); + const errorBase = Object.assign({}, error); + delete errorBase.tips; + error.tips.push({ + error: errorBase + }); + } + } + + // Enrich with AI if applicable + const alreadyProcessedErrors: string[] = []; + for (const error of errors) { + for (const errorTip of error.tips) { + const aiTip = await findAiTip(errorTip.error, alreadyProcessedErrors); + if (aiTip) { + errorTip.tipFromAi = { + promptResponse: aiTip.promptResponse, + } + } + } + } + + // Gather failing tests + const failedTests = extractFailedTestsInfo(resultJson?.result?.details?.runTestResult?.failures || []); + + // Build output list of errors & tips + const errorsAndTips: any[] = []; + for (const error of errors) { + for (const errorTip of error.tips) + errorsAndTips.push(errorTip); + } + + const detailedErrorLines: string[] = []; + + // Fallback in case we have not been able to identify errors: Check if there are code coverage warnings + if (errorsAndTips.length === 0 && failedTests.length === 0 && resultJson?.result?.details?.runTestResult?.codeCoverageWarnings?.length > 0) { + for (const cvrgWarning of resultJson.result.details.runTestResult.codeCoverageWarnings) { + const coverageErrorMsg = (cvrgWarning.name ? `${cvrgWarning.name} - ` : "") + cvrgWarning.message; + errorsAndTips.push(({ + error: { message: coverageErrorMsg }, + tip: { + label: "CodeCoverageWarning", + message: "Please fix code coverage so your deployment can pass", + docUrl: "https://developer.salesforce.com/docs/atlas.en-us.apexcode.meta/apexcode/apex_code_coverage_intro.htm", + }, + })) + detailedErrorLines.push(...["", "⛔ " + c.red(c.bold("Coverage issue: " + coverageErrorMsg)), ""]); + } + } + + // Fallback : declare an error if we have not been able to identify errors + if (errorsAndTips.length === 0 && failedTests.length === 0 && resultJson?.result?.errorMessage) { + errorsAndTips.push(({ + error: { message: resultJson.result.errorMessage }, + tip: { + label: resultJson.result.errorStatusCode || "UNKNOWN", + message: "Please fix unknown errors (", + }, + })) + detailedErrorLines.push(...["", "⛔ " + c.red(c.bold("Unknown issue: " + resultJson.result.errorMessage)), ""]); + } + + // Fallback : declare an error if we have not been able to identify errors + if (errorsAndTips.length === 0 && failedTests.length === 0) { + errorsAndTips.push(({ + error: { message: "There has been an issue parsing errors, please notify sfdx-hardis maintainers" }, + tip: { + label: "SfdxHardisInternalError", + message: "Declare issue on https://github.com/hardisgroupcom/sfdx-hardis/issues", + }, + })) + detailedErrorLines.push(...["", "⛔ " + c.red(c.bold("There has been an issue parsing errors, please notify sfdx-hardis maintainers")), ""]); + } + + // Create output log for errors + for (const error of errors) { + detailedErrorLines.push(...["", "⛔ " + c.red(c.bold(error.messageInitialDisplay)), ""]); + if (error.tips.length > 0 && error.tips.some(err => err.tip || err.tipFromAi)) { + for (const errorTip of error.tips) { + if (errorTip.tip) { + detailedErrorLines.push(...[ + c.yellow(c.italic("✏️ Error " + c.bold(errorTip.tip.label)) + ":"), + c.yellow(errorTip.tip.messageConsole), + c.yellow(`Documentation: ${errorTip.tip.docUrl}`) + ]) + } + if (errorTip.tipFromAi) { + detailedErrorLines.push(...[ + c.yellow(c.italic("🤖 AI response:")), + c.yellow(errorTip.tipFromAi.promptResponse) + ]) + } + } + } + else { + detailedErrorLines.push(...[c.yellow("No tip found for error. Try asking ChatGPT, Google or a Release Manager :)")]) + } + } + detailedErrorLines.push(""); + + // Create output log for test failures + if (failedTests.length > 0) { + detailedErrorLines.push(...["", c.red(c.bold("Test failures:"))], ""); + for (const failedTest of failedTests) { + detailedErrorLines.push(...[ + c.red(`💥 ${c.bold(failedTest.class)}.${c.bold(failedTest.method)}: ${failedTest.error}`), + c.grey(`Stack: ${failedTest.stack || "none"}`), + "" + ]); + } + } + + // Update data that will be used for Pull Request comment + await updatePullRequestResult(errorsAndTips, failedTests, options); + // Return results + const newLog = includeInLog ? shortenLogLines(log) + "\n\n" + detailedErrorLines.join("\n") : shortenLogLines(log); + return { tips, errorsAndTips, failedTests, errLog: newLog }; +} + +async function matchesTip(tipDefinition: any, error: any) { + matchStringBasedTip(tipDefinition, error); + matchRegExpBasedTip(tipDefinition, error); +} + +function matchStringBasedTip(tipDefinition: any, error: any) { + if (tipDefinition.expressionString && + tipDefinition.expressionString.filter((expressionString: any) => error.messageInitial.includes(expressionString)).length > 0) { + error.message = stripAnsi(error.messageInitial); + const errorBase = Object.assign({}, error); + delete errorBase.tips; + error.tips.push({ + error: errorBase, + tip: { + label: tipDefinition.label, + docUrl: tipDefinition.docUrl, + message: tipDefinition.tip, + messageConsole: tipDefinition.tip, + }, + }); + } +} + +function matchRegExpBasedTip(tipDefinition: any, error: any) { + if ( + tipDefinition.expressionRegex && + tipDefinition.expressionRegex.filter((expressionRegex: any) => { + expressionRegex.lastIndex = 0; // reset regex last index to be able to reuse it + return expressionRegex.test(error.messageInitial); + }).length > 0 + ) { + const regex = tipDefinition.expressionRegex.filter((expressionRegex: any) => { + expressionRegex.lastIndex = 0; // reset regex last index to be able to reuse it + return expressionRegex.test(error.messageInitial); + })[0]; + regex.lastIndex = 0; // reset regex last index to be able to reuse it + const matches = [...error.messageInitial.matchAll(regex)]; + for (const m of matches) { + const replacements = m.map((str: string) => c.bold(str.trim().replace(/'/gm, ""))); + const replacementsMarkdown = m.map((str: string) => `**${str.trim().replace(/'/gm, "")}**`); + error.message = stripAnsi(format(error.messageInitial, replacementsMarkdown)).replace(/\*\*.\*\*/gm, ".") + const errorBase = Object.assign({}, error); + delete errorBase.tips; + error.tips.push({ + error: errorBase, + tip: { + label: tipDefinition.label, + docUrl: tipDefinition.docUrl, + message: stripAnsi(format(tipDefinition.tip, replacementsMarkdown).replace(/\*\*.\*\*/gm, ".")), + messageConsole: tipDefinition.tip.split(/\r?\n/).map((str: string) => format(str, replacements)).join("\n") + }, + }); + } + } +} + +function extractFailedTestsInfo(failedTestsIn: any[]) { + const failedTests: any[] = []; + for (const failedTestIn of failedTestsIn || []) { + const failedTestRes: any = { + class: (failedTestIn.namespace ? failedTestIn.namespace + "__" : '') + failedTestIn.name, + method: failedTestIn.methodName, + error: failedTestIn.message, + }; + if (failedTestIn?.stackTrace) { + failedTestRes.stack = failedTestIn.stackTrace; + } + failedTests.push(failedTestRes); + } + return failedTests; +} + + +async function findAiTip(error: any, alreadyProcessedErrors: string[]): Promise { + if (alreadyProcessedErrors.includes(error.message)) { + return null; + } + alreadyProcessedErrors.push(error.message); + if (AiProvider.isAiAvailable()) { + if (alreadyProcessedErrors.length > parseInt(process.env.MAX_DEPLOYMENT_TIPS_AI_CALLS || "20")) { + uxLog("warning", this, c.yellow(`[AI] Maximum number of AI calls for deployment tips reached. Increase with env var MAX_DEPLOYMENT_TIPS_AI_CALLS`)); + return null; + } + const prompt = buildPrompt(error); + try { + const aiResponse = await AiProvider.promptAi(prompt, "PROMPT_SOLVE_DEPLOYMENT_ERROR"); + return aiResponse; + } catch (e) { + uxLog("warning", this, c.yellow("[AI] Error while calling AI Provider: " + (e as Error).message)); + } + } + return null; +} + +function buildPrompt(error: any) { + const prompt = + `You are a Salesforce release manager using Salesforce CLI commands to perform deployments \n` + + `How to solve the following Salesforce deployment error ?\n` + + "- Please answer using sfdx source format, not metadata format. \n" + + "- Please provide XML example if applicable. \n" + + "- Please skip the part of the response about how to retrieve or deploy the changes with Salesforce CLI.\n" + + `The error is: \n${JSON.stringify(error, null, 2)}`; + return prompt; +} \ No newline at end of file diff --git a/src/common/utils/deployTips.ts b/src/common/utils/deployTips.ts index dea93ded2..2a5e87cba 100644 --- a/src/common/utils/deployTips.ts +++ b/src/common/utils/deployTips.ts @@ -1,20 +1,30 @@ // Analyze deployment errors to provide tips to user :) -import * as c from "chalk"; -import * as format from "string-template"; +import c from "chalk"; +import format from "string-template"; -import { getAllTips } from "./deployTipsList"; -import { deployErrorsToMarkdown, testFailuresToMarkdown } from "../gitProvider/utilsMarkdown"; -import { stripAnsi, uxLog } from "."; -import { AiProvider, AiResponse } from "../aiProvider"; +import { getAllTips } from "./deployTipsList.js"; +import { deployErrorsToMarkdown, testFailuresToMarkdown } from "../gitProvider/utilsMarkdown.js"; +import { findJsonInString, stripAnsi, uxLog } from "./index.js"; +import { AiProvider, AiResponse } from "../aiProvider/index.js"; +import { analyzeDeployErrorLogsJson } from "./deployTipJson.js"; +import { PullRequestData } from "../gitProvider/index.js"; -let logRes = null; -let errorsAndTips = []; -let alreadyProcessedErrors = []; +let logRes: string | null = null; +let errorsAndTips: any[] = []; +let alreadyProcessedErrors: any[] = []; const firstYellowChar = c.yellow("*")[0]; // Checks for deploy tips in a log string // returns formatted and completed error log export async function analyzeDeployErrorLogs(log: string, includeInLog = true, options: any): Promise { + // New way using json: should be always be used + const jsonResult = findJsonInString(log); + if (jsonResult) { + const resultsFromJson = await analyzeDeployErrorLogsJson(jsonResult, log, includeInLog, options); + if (resultsFromJson && (resultsFromJson?.errorsAndTips.length > 0 || resultsFromJson?.failedTests?.length > 0)) { + return resultsFromJson; + } + } errorsAndTips = []; // reset alreadyProcessedErrors = []; // reset logRes = returnErrorLines(log).join("\n"); // reset @@ -25,7 +35,7 @@ export async function analyzeDeployErrorLogs(log: string, includeInLog = true, o } } // Add default error messages for errors without tips - const logResLines = []; + const logResLines: any[] = []; const updatedLogLines = returnErrorLines(logRes); let index = 0; for (const logLine of updatedLogLines) { @@ -34,7 +44,9 @@ export async function analyzeDeployErrorLogs(log: string, includeInLog = true, o index++; continue; } - if (logLine.trim().startsWith("Error") && !(updatedLogLines[index + 1] && !updatedLogLines[index + 1].trim().startsWith("Error"))) { + if (isErrorLine(logLine) && + (updatedLogLines[index + 1] && isErrorLine(updatedLogLines[index + 1]) || !updatedLogLines[index + 1]) + ) { const aiTip = await findAiTip(logLine.trim()); // Complete with AI if possible if (aiTip && aiTip.success) { @@ -48,7 +60,7 @@ export async function analyzeDeployErrorLogs(log: string, includeInLog = true, o }, }); } else { - const promptText = buildPrompt(logLine.trim()); + const promptText = AiProvider.buildPrompt("PROMPT_SOLVE_DEPLOYMENT_ERROR", { "ERROR": logLine.trim() }); // No tip found, give the user an AI prompt logResLines.push(c.yellow("No sfdx-hardis tip to solve this error. You can try the following prompt:")); logResLines.push(c.yellow(promptText)); @@ -65,15 +77,45 @@ export async function analyzeDeployErrorLogs(log: string, includeInLog = true, o } // Extract failed test classes - const failedTests = []; const logRaw = stripAnsi(log); + const failedTests: any[] = []; + // sf project deploy output + extractFailedTestsInfoForSfCommand(logRaw, failedTests); + if (failedTests.length === 0) { + // Legacy sfdx force:source:deploy output + extractFailedTestsInfoForSfdxCommand(logRaw, failedTests); + } + // Fallback in case we have not been able to identify errors + if (errorsAndTips.length === 0 && failedTests.length === 0) { + errorsAndTips.push(({ + error: { message: "There has been an issue parsing errors, probably because of a SF CLI output format update. Please check console logs." }, + tip: { + label: "SfdxHardisParseError", + message: "If you are in CI/CD, please check at the bottom of deployment check job logs. The issue will be fixed ASAP.", + }, + })) + } + + await updatePullRequestResult(errorsAndTips, failedTests, options); + return { tips, errorsAndTips, failedTests, errLog: logResLines.join("\n") }; +} + +function isErrorLine(str: string) { + const strTrim = str.trim(); + if (strTrim.startsWith("Error") || strTrim.startsWith("| Error")) { + return true; + } + return false; +} + +function extractFailedTestsInfoForSfdxCommand(logRaw: string, failedTests: any[]) { const regexFailedTests = /Test Failures([\S\s]*?)Test Success/gm; if (logRaw.match(regexFailedTests)) { - const failedTestsLines = regexFailedTests - .exec(logRaw)[1] + const failedTestsLines = (regexFailedTests + .exec(logRaw) || [])[1] .split("\n") .map((s) => s.trim()); - let failedTest = null; + let failedTest: any = null; // Parse strings to extract main error line then stack for (const line of failedTestsLines) { const regex = /^(\w+[\d_]*)\s+(\w+[\d_]*)\s*(.*)$/; @@ -87,7 +129,7 @@ export async function analyzeDeployErrorLogs(log: string, includeInLog = true, o failedTest = { class: match[1], method: match[2], - error: errSplit.shift().trim(), + error: (errSplit.shift() || "").trim(), }; if (errSplit.length > 0) { failedTest.stack = "Class." + errSplit.join("\nClass."); @@ -96,19 +138,39 @@ export async function analyzeDeployErrorLogs(log: string, includeInLog = true, o } } } - updatePullRequestResult(errorsAndTips, failedTests, options); - return { tips, errorsAndTips, failedTests, errLog: logResLines.join("\n") }; +} + +function extractFailedTestsInfoForSfCommand(logRaw: string, failedTests: any[]) { + const regexFailedTests = /Test Failures([\S\s]*?)Test Success/gm; + if (logRaw.match(regexFailedTests)) { + const failedTestsString = (regexFailedTests.exec(logRaw) || [])[1].split(/\r?\n/).join("\n") + "\n•"; + // Parse strings to extract main error line then stack + // eslint-disable-next-line no-regex-spaces, no-useless-escape + const regex = /^• (.*)\n message: (.*)\n stacktrace: ([\s\S]*?)(?=\n•|\z)/gm; + const matches = [...failedTestsString.matchAll(regex)]; + for (const match of matches || []) { + const failedTest: any = { + class: match[1].split(".")[0], + method: match[1].split(".")[1], + error: match[2].trim(), + }; + if (match[3]) { + failedTest.stack = match[3]; + } + failedTests.push(failedTest); + } + } } // Checks if the error string or regex is found in the log // Adds the fix tip under the line if includeInLog is true async function matchesTip(tipDefinition: any, includeInLog = true): Promise { - const newLogLines = []; + const newLogLines: any[] = []; // string matching if ( tipDefinition.expressionString && tipDefinition.expressionString.filter((expressionString: any) => { - return logRes.includes(expressionString); + return (logRes || "").includes(expressionString); }).length > 0 ) { if (includeInLog) { @@ -128,6 +190,7 @@ async function matchesTip(tipDefinition: any, includeInLog = true): Promise 0 ) { if (includeInLog) { - const newLogLines = []; + const newLogLines: any[] = []; const logLines = returnErrorLines(logRes); for (const line of logLines) { newLogLines.push(line); @@ -183,6 +246,7 @@ async function matchesTip(tipDefinition: any, includeInLog = true): Promise str.startsWith("Error") || str.startsWith(" Error") || str.startsWith(firstYellowChar)); + return strIn.split(/\r?\n/).filter((str) => isErrorLine(str) || str.startsWith(firstYellowChar)); } // This data will be caught later to build a pull request message -async function updatePullRequestResult(errorsAndTips: Array, failedTests: Array, options: any) { - const prData: any = { +export async function updatePullRequestResult(errorsAndTips: Array, failedTests: Array, options: any) { + const prData: Partial = { messageKey: "deployment", title: options.check ? "✅ Deployment check success" : "✅ Deployment success", deployErrorsMarkdownBody: "No error has been found during the deployment", @@ -243,22 +307,18 @@ async function findAiTip(errorLine: any): Promise { } alreadyProcessedErrors.push(errorLine); if (AiProvider.isAiAvailable()) { - const prompt = buildPrompt(errorLine); + if (alreadyProcessedErrors.length > parseInt(process.env.MAX_DEPLOYMENT_TIPS_AI_CALLS || "20")) { + uxLog("warning", this, c.yellow(`[AI] Maximum number of AI calls for deployment tips reached. Increase with env var MAX_DEPLOYMENT_TIPS_AI_CALLS`)); + return null; + } + const prompt = AiProvider.buildPrompt("PROMPT_SOLVE_DEPLOYMENT_ERROR", { "ERROR": errorLine }); try { - const aiResponse = await AiProvider.promptAi(prompt); + const aiResponse = await AiProvider.promptAi(prompt, "PROMPT_SOLVE_DEPLOYMENT_ERROR"); return aiResponse; } catch (e) { - uxLog(this, c.yellow("[AI] Error while calling OpenAI: " + e.message)); + uxLog("warning", this, c.yellow("[AI] Error while calling OpenAI: " + (e as Error).message)); } } return null; } -function buildPrompt(errorLine: string) { - const prompt = - `How to solve Salesforce deployment error "${errorLine}" ? \n` + - "- Please answer using sfdx source format, not metadata format. \n" + - "- Please provide XML example if applicable. \n" + - "- Please skip the part of the response about retrieving or deploying the changes with Salesforce CLI."; - return prompt; -} diff --git a/src/common/utils/deployTipsList.ts b/src/common/utils/deployTipsList.ts index 07b2742a5..4660fb8bb 100644 --- a/src/common/utils/deployTipsList.ts +++ b/src/common/utils/deployTipsList.ts @@ -1,4 +1,15 @@ +import { CONSTANTS } from "../../config/index.js"; + export function getAllTips() { + const allTips = listAllTips().map((tip: any) => { + tip.docUrl = `${CONSTANTS.DOC_URL_ROOT}/sf-deployment-assistant/${tip.label.replace(/[^a-zA-Z0-9 -]|\s/g, '-')}/` + return tip; + }); + + return allTips; +} + +function listAllTips() { return [ { name: "api-version-error", @@ -6,7 +17,7 @@ export function getAllTips() { expressionRegex: [/Error (.*) The (.*) apiVersion can't be "([0-9]+)"/gm], tip: `{1} metadata has probably been created/updated in a sandbox already upgraded to next platform version (ex: Sandbox in Summer'23 and Production in Spring'23) - First, try to update the api version in the XML of {1} metadata file (decrement the number in {3}.0) -- If it still doesn't work because the metadata structure has changed between version, you may try a force:source:retrieve of the metadata by forcing --apiversion at the end of the command. +- If it still doesn't work because the metadata structure has changed between version, you may try a sf project:retrieve:start of the metadata by forcing --api-version at the end of the command. `, }, { @@ -67,7 +78,7 @@ THIS MAY BE A FALSE POSITIVE if you are just testing the deployment, as destruct expressionRegex: [/Error (.*) Cannot find folder:(.*)/gm], tip: `Folder {2} is missing. - If folder {2} is existing in sources, add it in related package.xml -- If folder {2} is not existing in DX sources, please use sfdx hardis:project:clean:retrievefolders to retrieve it +- If folder {2} is not existing in DX sources, please use sf hardis:project:clean:retrievefolders to retrieve it - If both previous solutions did not work, go create manually folder {2} in target org `, }, @@ -106,7 +117,12 @@ Example of XML you have to remove in {1}: - Delete field {1} in target org: it will be recreated after deployment (but you will loose data on existing records, so be careful if your target is a production org) - Create another field with desired type and manage data recovery if the target is a production org`, }, - + { + name: "change-matching-rule", + label: "Change Matching Rule", + expressionRegex: [/Error (.*) Before you change a matching rule, you must deactivate it/gm], + tip: `To be able to deploy, you must go in target org setup to manually deactivate matching rule {1}`, + }, { name: "condition-missing-reference", label: "Condition missing reference", @@ -114,6 +130,15 @@ Example of XML you have to remove in {1}: tip: `There is a reference to {2} in {1}, and {2} is not found. You can either: - Add {2} in your deployment sources and make sure it is named in package.xml - Remove the reference to {2} in {1} +`, + }, + { + name: "could-not-retrieve-field-info", + label: "Couldn't retrieve or load information on the field", + expressionRegex: [/Error (.*) Something went wrong. We couldn't retrieve or load the information on the field: (.*)\./gm], + tip: `There is a reference to {2} in {1}, and {2} is not found. You can either: +- Commit {2} in your deployment sources and make sure it is named in package.xml +- Remove the reference to {2} in {1} `, }, { @@ -124,7 +149,7 @@ Example of XML you have to remove in {1}: - If you renamed the custom object, do a search/replace in sources with previous object name and new object name - If you deleted the custom object, or if you don't want to deploy it, do a search on the custom object name, and remove XML elements referencing it - If the object should exist, make sure it is in force-app/main/default/objects and that the object name is in manifest/package.xml in CustomObject section -You may also have a look to command sfdx hardis:project:clean:references +You may also have a look to command sf hardis:project:clean:references `, }, { @@ -136,9 +161,12 @@ You may also have a look to command sfdx hardis:project:clean:references - If you deleted {3}.{4}, or if you don't want to deploy it, do a search on {4} in all sources, and remove all XML elements referring to {3}.{4} (except in destructiveChanges.xml) - If {3}.{4} should exist, make sure it is in force-app/main/default/objects/{3}/fields and that {3}.{4} is in manifest/package.xml in CustomField section - If {3}.{4} is standard, the error is because {3}.{4} is not available in the org you are trying to deploy to. You can: - - Remove the reference to {4} in the XML of {1} ( maybe sfdx hardis:project:clean:references can clean automatically for you ! ) + - Remove the reference to {4} in the XML of {1} ( maybe sf hardis:project:clean:references can clean automatically for you ! ) - Activate the required features/license in the target org `, + examples: [ + "Error PS_Admin In field: field - no CustomField named User.expcloud__Portal_Username__c found" + ] }, { name: "custom-field-rights-mandatory", @@ -163,6 +191,12 @@ Example of element to delete: - If you use a package.xml, is {3} present within type CustomMetadata ? `, }, + { + name: "expired-access-token", + label: "Expired Access / Refresh Token", + expressionString: ["expired access/refresh token"], + tip: `Run command "Select another org" from Status panel (or sf hardis:org:select) to authenticate again to your org`, + }, { name: "missingDataCategoryGroup", label: "Missing Data Category Group", @@ -196,7 +230,7 @@ Example of element to delete: label: "Missing e-mail template", expressionRegex: [/In field: template - no EmailTemplate named (.*) found/gm], tip: `An email template should be present in the sources. To retrieve it, you can run: -sfdx force:source:retrieve -m EmailTemplate:{1} -u YOUR_ORG_USERNAME`, +sf project retrieve start -m EmailTemplate:{1} -o YOUR_ORG_USERNAME`, }, { name: "empty-item", @@ -219,7 +253,7 @@ You probably also need to add CRM Analytics Admin Permission Set assignment to t { name: "error-parsing-file", label: "Error parsing file", - expressionRegex: [/Error (.*) Error parsing file: (.*) /gm], + expressionRegex: [/Error (.*) Error parsing file: (.*)/gm], tip: `There has been an error parsing the XML file of {1}: {2} - Open file {1} and look where the error can be ! (merge issue, typo, XML tag not closed...)`, }, @@ -278,7 +312,7 @@ More details at https://help.salesforce.com/articleView?id=sf.tips_on_building_f expressionString: ["Invalid scope:Mine, not allowed"], tip: `Replace Mine by Everything in the list view SFDX source XML. Have a look at this command to manage that automatically :) -https://sfdx-hardis.cloudity.com/hardis/org/fix/listviewmine/ +${CONSTANTS.DOC_URL_ROOT}/hardis/org/fix/listviewmine/ `, }, { @@ -418,7 +452,7 @@ If it is already done, you may manually check "MarketingUser" field on the scrat expressionString: ["ProductRequest"], tip: `ProductRequest object is not available in the target org. Maybe you would like to clean its references within Profiles / PS using the following command ? -sfdx hardis:project:clean:references , then select "ProductRequest references"`, +sf hardis:project:clean:references , then select "ProductRequest references"`, }, { name: "missing-feature-social-customer-service", @@ -490,7 +524,7 @@ sfdx hardis:project:clean:references , then select "ProductRequest references"`, label: "Missing report", expressionRegex: [/Error (.*) The (.*) report chart has a problem with the "reportName" field/gm], tip: `{1} is referring to unknown report {2}. To retrieve it, you can run: -- sfdx force:source:retrieve -m Report:{2} -u YOUR_ORG_USERNAME +- sf project retrieve start -m Report:{2} -o YOUR_ORG_USERNAME - If it fails, looks for the report folder and add it before report name to the retrieve command (ex: MYFOLDER/MYREPORTNAME) `, }, @@ -512,7 +546,7 @@ sfdx hardis:project:clean:references , then select "ProductRequest references"`, expressionString: ["sharing operation already in progress"], tip: `You can not deploy multiple SharingRules at the same time. You can either: - Remove SharingOwnerRules and SharingRule from package.xml (so it becomes a manual operation) -- Use sfdx hardis:work:save to generate a deploymentPlan in .sfdx-hardis.json, +- Use sf hardis:work:save to generate a deploymentPlan in .sfdx-hardis.json, - If you are trying to create a scratch org, add DeferSharingCalc in features in project-scratch-def.json `, }, @@ -553,7 +587,7 @@ Go manually make the change in the target org, so the deployment will pass label: "Picklist value not found", expressionRegex: [/Picklist value: (.*) in picklist: (.*) not found/gm], tip: `Sources have references to value {1} of picklist {2} -- If picklist {2} is standard, add the picklist to sfdx sources by using "sfdx force:source:retrieve -m StandardValueSet:{2}", then save again +- If picklist {2} is standard, add the picklist to sfdx sources by using "sf project retrieve start -m StandardValueSet:{2}", then save again - Else, perform a search in all code of {1}, then remove XML tags referring to {1} (for example in record types metadatas) `, }, @@ -585,8 +619,8 @@ Go manually make the change in the target org, so the deployment will pass label: "CRM Analytics: A Recipe must specify a DataFlow", expressionRegex: [/Error (.*) A Recipe must specify a Dataflow/gm], tip: `You must include related WaveDataFlow {1} in sources (and probably in package.xml too). -To retrieve it, run: sfdx force:source:retrieve -m WaveDataFlow:{1} -u SOURCE_ORG_USERNAME -You can also retrieve all analytics sources in one shot using sfdx hardis:org:retrieve:source:analytics -u SOURCE_ORG_USERNAME +To retrieve it, run: sf project retrieve start -m WaveDataFlow:{1} -u SOURCE_ORG_USERNAME +You can also retrieve all analytics sources in one shot using sf hardis:org:retrieve:source:analytics -u SOURCE_ORG_USERNAME - https://salesforce.stackexchange.com/a/365453/33522 - https://help.salesforce.com/s/articleView?id=000319274&type=1`, }, @@ -714,3 +748,4 @@ If you see two {2} XML blocks with {3}, please decide which one you keep and rem }, ]; } + diff --git a/src/common/utils/deployUtils.ts b/src/common/utils/deployUtils.ts index c9274e368..c81258bdf 100644 --- a/src/common/utils/deployUtils.ts +++ b/src/common/utils/deployUtils.ts @@ -1,37 +1,56 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as path from "path"; -import * as sortArray from "sort-array"; -import { createTempDir, elapseEnd, elapseStart, execCommand, execSfdxJson, getCurrentGitBranch, git, gitHasLocalUpdates, isCI, uxLog } from "."; -import { CONSTANTS, getConfig, setConfig } from "../../config"; -import { GitProvider } from "../gitProvider"; -import { deployCodeCoverageToMarkdown } from "../gitProvider/utilsMarkdown"; -import { MetadataUtils } from "../metadata-utils"; -import { importData } from "./dataUtils"; -import { analyzeDeployErrorLogs } from "./deployTips"; -import { callSfdxGitDelta } from "./gitUtils"; -import { createBlankSfdxProject, isSfdxProject } from "./projectUtils"; -import { prompts } from "./prompts"; -import { arrangeFilesBefore, restoreArrangedFiles } from "./workaroundUtils"; -import { isPackageXmlEmpty, parseXmlFile, removePackageXmlFilesContent, writeXmlFile } from "./xmlUtils"; -import { ResetMode } from "simple-git"; -import { isSandbox } from "./orgUtils"; +import { Connection, SfError } from '@salesforce/core'; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import * as path from 'path'; +import sortArray from 'sort-array'; +import { + createTempDir, + elapseEnd, + elapseStart, + execCommand, + execSfdxJson, + findJsonInString, + getCurrentGitBranch, + git, + gitHasLocalUpdates, + isCI, + killBoringExitHandlers, + replaceJsonInString, + sortCrossPlatform, + uxLog, + uxLogTable, +} from './index.js'; +import { getApiVersion, getConfig, getReportDirectory, setConfig } from '../../config/index.js'; +import { GitProvider } from '../gitProvider/index.js'; +import { deployCodeCoverageToMarkdown } from '../gitProvider/utilsMarkdown.js'; +import { MetadataUtils } from '../metadata-utils/index.js'; +import { importData } from './dataUtils.js'; +import { analyzeDeployErrorLogs } from './deployTips.js'; +import { callSfdxGitDelta } from './gitUtils.js'; +import { createBlankSfdxProject, GLOB_IGNORE_PATTERNS, isSfdxProject } from './projectUtils.js'; +import { prompts } from './prompts.js'; +import { arrangeFilesBefore, restoreArrangedFiles } from './workaroundUtils.js'; +import { countPackageXmlItems, isPackageXmlEmpty, parseXmlFile, removePackageXmlFilesContent, writeXmlFile } from './xmlUtils.js'; +import { ResetMode } from 'simple-git'; +import { isProductionOrg } from './orgUtils.js'; +import { soqlQuery } from './apiUtils.js'; +import { checkSfdxHardisTraceAvailable } from './orgConfigUtils.js'; +import { PullRequestData } from '../gitProvider/index.js'; +import { WebSocketClient } from '../websocketClient.js'; // Push sources to org // For some cases, push must be performed in 2 times: the first with all passing sources, and the second with updated sources requiring the first push export async function forceSourcePush(scratchOrgAlias: string, commandThis: any, debug = false, options: any = {}) { - elapseStart("force:source:push"); - const config = await getConfig("user"); + elapseStart('project:deploy:start'); + const config = await getConfig('user'); const currentBranch = await getCurrentGitBranch(); - let arrangedFiles = []; + let arrangedFiles: any[] = []; if (!(config[`tmp_${currentBranch}_pushed`] === true)) { arrangedFiles = await arrangeFilesBefore(commandThis, options); } try { - const sfdxPushCommand = options.sfdxPushCommand || "force:source:push"; - const pushCommand = `sfdx ${sfdxPushCommand} -g -w 60 --forceoverwrite -u ${scratchOrgAlias}`; + const pushCommand = `sf project deploy start --ignore-warnings --ignore-conflicts -o ${scratchOrgAlias} --wait 60 --json`; await execCommand(pushCommand, commandThis, { fail: true, output: !isCI, @@ -46,74 +65,76 @@ export async function forceSourcePush(scratchOrgAlias: string, commandThis: any, }); const configToSet = {}; configToSet[`tmp_${currentBranch}_pushed`] = true; - await setConfig("user", configToSet); + await setConfig('user', configToSet); } - elapseEnd("force:source:push"); + elapseEnd('project:deploy:start'); } catch (e) { await restoreArrangedFiles(arrangedFiles, commandThis); // Manage beta/legacy boza - const stdOut = e.stdout + e.stderr; - if (stdOut.includes(`'force:source:legacy:push' with your existing tracking files`)) { - options.sfdxPushCommand = "force:source:legacy:push"; - uxLog(this, c.yellow("Salesforce internal mess... trying with force:source:legacy:push")); - const pullRes = await forceSourcePush(scratchOrgAlias, commandThis, debug, options); - return pullRes; - } else if (stdOut.includes(`'force:source:beta:push' with your existing tracking files`)) { - options.sfdxPushCommand = "force:source:beta:push"; - uxLog(this, c.yellow("Salesforce internal mess... trying with force:source:beta:push")); - const pullRes = await forceSourcePush(scratchOrgAlias, commandThis, debug, options); - return pullRes; - } else if (stdOut.includes(`getaddrinfo EAI_AGAIN`)) { - uxLog(this, c.red(c.bold("The error has been caused by your unstable internet connection. Please Try again !"))); + const stdOut = (e as any).stdout + (e as any).stderr; + if (stdOut.includes(`getaddrinfo EAI_AGAIN`)) { + uxLog("error", this, c.red(c.bold('The error has been caused by your unstable internet connection. Please Try again !'))); } // Analyze errors - const { tips, errLog } = await analyzeDeployErrorLogs(stdOut, true, {}); - uxLog(commandThis, c.red("Sadly there has been push error(s)")); - uxLog(this, c.red("\n" + errLog)); - uxLog( - commandThis, - c.yellow(c.bold(`You may${tips.length > 0 ? " also" : ""} copy-paste errors on google to find how to solve the push issues :)`)), - ); - elapseEnd("force:source:push"); - throw new SfdxError("Deployment failure. Check messages above"); + const { errLog } = await analyzeDeployErrorLogs(stdOut, true, {}); + uxLog("error", commandThis, c.red('Sadly there has been push error(s)')); + uxLog("error", this, c.red('\n' + errLog)); + elapseEnd('project:deploy:start'); + killBoringExitHandlers(); + throw new SfError('Deployment failure. Check messages above'); } } -export async function forceSourcePull(scratchOrgAlias: string, debug = false, options: any = {}) { - const sfdxPullCommand = options.sfdxPullCommand || "force:source:pull"; +export async function forceSourcePull(scratchOrgAlias: string, debug = false) { + let pullCommandResult: any; try { - const pullCommand = `sfdx ${sfdxPullCommand} -w 60 --forceoverwrite -u ${scratchOrgAlias}`; - await execCommand(pullCommand, this, { + const pullCommand = `sf project retrieve start --ignore-conflicts -o ${scratchOrgAlias} --wait 60 --json`; + pullCommandResult = await execCommand(pullCommand, this, { fail: true, output: true, debug: debug, }); + // Parse json in stdout and if json.result.status and json.result.files, create a list of files with "type" + "file name", then order it, then display it in logs + if ((pullCommandResult?.result?.status === 'Succeeded' || pullCommandResult?.status === 0) && pullCommandResult?.result?.files) { + // Build an array of objects for table display + const files = pullCommandResult.result.files + .filter((file: any) => file?.state !== "Failed") + .map((file: any) => ({ + Type: file.type, + Name: file.fullName, + State: file.state, + Path: file.filePath || '' + })); + // Sort files by Type then Name + sortArray(files, { by: ['Type', 'Name'], order: ['asc', 'asc'] }); + uxLog("action", this, c.green('Successfully pulled sources from scratch org / source-tracked sandbox')); + // Display as a table + if (files.length > 0) { + // Use the uxLogTable utility for consistent table output + uxLogTable(this, files, ['Type', 'Name', 'State']); + } else { + uxLog("log", this, c.grey('No files pulled.')); + } + } else { + uxLog("error", this, c.red(`Pull command did not return expected results\n${JSON.stringify(pullCommandResult, null, 2)}`)); + } } catch (e) { // Manage beta/legacy boza - const stdOut = e.stdout + e.stderr; - if (stdOut.includes(`'force:source:legacy:pull' with your existing tracking files`)) { - options.sfdxPullCommand = "force:source:legacy:pull"; - uxLog(this, c.yellow("Salesforce internal mess... trying with force:source:legacy:pull")); - const pullRes = await forceSourcePull(scratchOrgAlias, debug, options); - return pullRes; - } else if (stdOut.includes(`'force:source:beta:pull' with your existing tracking files`)) { - options.sfdxPullCommand = "force:source:beta:pull"; - uxLog(this, c.yellow("Salesforce internal mess... trying with force:source:beta:pull")); - const pullRes = await forceSourcePull(scratchOrgAlias, debug, options); - return pullRes; - } + const stdOut = (e as any).stdout + (e as any).stderr; // Analyze errors - const { tips, errLog } = await analyzeDeployErrorLogs(stdOut, true, {}); - uxLog(this, c.red("Sadly there has been pull error(s)")); - uxLog(this, c.red("\n" + errLog)); + const { errLog } = await analyzeDeployErrorLogs(stdOut, true, {}); + uxLog("error", this, c.red('Sadly there has been pull error(s)')); + uxLog("error", this, c.red('\n' + errLog)); // List unknown elements from output const forceIgnoreElements = [...stdOut.matchAll(/Entity of type '(.*)' named '(.*)' cannot be found/gm)]; if (forceIgnoreElements.length > 0 && !isCI) { // Propose user to ignore elements const forceIgnoreRes = await prompts({ - type: "multiselect", - message: "If you want to try again with updated .forceignore file, please select elements you want to add, else escape", - name: "value", + type: 'multiselect', + message: + 'If you want to try again with updated .forceignore file, please select elements you want to add, else escape', + description: 'Select metadata elements to add to .forceignore to resolve deployment conflicts', + name: 'value', choices: forceIgnoreElements.map((forceIgnoreElt) => { return { title: `${forceIgnoreElt[1]}: ${forceIgnoreElt[2]}`, @@ -121,171 +142,333 @@ export async function forceSourcePull(scratchOrgAlias: string, debug = false, op }; }), }); - if (forceIgnoreRes.value.length > 0 && forceIgnoreRes.value[0] !== "exitNow") { - const forceIgnoreFile = "./.forceignore"; - const forceIgnore = await fs.readFile(forceIgnoreFile, "utf-8"); - const forceIgnoreLines = forceIgnore.replace("\r\n", "\n").split("\n"); + if (forceIgnoreRes.value.length > 0 && forceIgnoreRes.value[0] !== 'exitNow') { + const forceIgnoreFile = './.forceignore'; + const forceIgnore = await fs.readFile(forceIgnoreFile, 'utf-8'); + const forceIgnoreLines = forceIgnore.replace('\r\n', '\n').split('\n'); forceIgnoreLines.push(...forceIgnoreRes.value); - await fs.writeFile(forceIgnoreFile, forceIgnoreLines.join("\n") + "\n"); - uxLog(this, "Updated .forceignore file"); + await fs.writeFile(forceIgnoreFile, forceIgnoreLines.join('\n') + '\n'); + uxLog("log", this, 'Updated .forceignore file'); return await forceSourcePull(scratchOrgAlias, debug); } } - uxLog(this, c.yellow(c.bold(`You may${tips.length > 0 ? " also" : ""} copy-paste errors on google to find how to solve the pull issues :)`))); - throw new SfdxError("Pull failure. Check messages above"); + killBoringExitHandlers(); + throw new SfError('Pull failure. Check messages above'); } - // Check if some items has to be forced-retrieved because sfdx does not detect updates - const config = await getConfig("project"); + // Check if some items has to be forced-retrieved because SF CLI does not detect updates + const config = await getConfig('project'); if (config.autoRetrieveWhenPull) { - uxLog(this, c.cyan("Retrieving additional sources that are usually forgotten by force:source:pull ...")); - const metadataConstraint = config.autoRetrieveWhenPull.join(", "); - const retrieveCommand = `sfdx force:source:retrieve -w 60 -m "${metadataConstraint}" -u ${scratchOrgAlias}`; + uxLog("action", this, c.cyan('Retrieving additional sources that are usually forgotten by sf project:retrieve:start ...')); + const metadataConstraint = config.autoRetrieveWhenPull.join(', '); + const retrieveCommand = `sf project retrieve start -m "${metadataConstraint}" -o ${scratchOrgAlias} --wait 60`; await execCommand(retrieveCommand, this, { fail: true, output: true, debug: debug, }); } + + // If there are SharingRules, retrieve all of them to avoid the previous one are deleted (SF Cli strange/buggy behavior) + if (pullCommandResult?.stdout?.includes("SharingRules")) { + uxLog("action", this, c.cyan('Detected Sharing Rules in the pull: retrieving the whole of them to avoid silly overrides !')); + const sharingRulesNamesMatches = [...pullCommandResult.stdout.matchAll(/([^ \\/]+)\.sharingRules-meta\.xml/gm)]; + for (const match of sharingRulesNamesMatches) { + uxLog("log", this, c.grey(`Retrieve the whole ${match[1]} SharingRules...`)); + const retrieveCommand = `sf project retrieve start -m "SharingRules:${match[1]}" -o ${scratchOrgAlias} --wait 60`; + await execCommand(retrieveCommand, this, { + fail: true, + output: true, + debug: debug, + }); + } + } } -export async function forceSourceDeploy( +export async function smartDeploy( packageXmlFile: string, check = false, - testlevel = "RunLocalTests", + testlevel = 'RunLocalTests', debugMode = false, commandThis: any = this, - options: any = {}, + options: { + targetUsername: string; + conn: any; // Connection from Salesforce + testClasses: string; + postDestructiveChanges?: string; + preDestructiveChanges?: string; + delta?: boolean; + destructiveChangesAfterDeployment?: boolean; + extraCommands?: any[] + } ): Promise { - elapseStart("all deployments"); + elapseStart('all deployments'); let quickDeploy = false; + + // Check package.xml emptiness + const packageXmlIsEmpty = !fs.existsSync(packageXmlFile) || await isPackageXmlEmpty(packageXmlFile); + + // Check if destructive changes files exist and have content + const hasDestructiveChanges = ( + (!!options.preDestructiveChanges && fs.existsSync(options.preDestructiveChanges) && + !(await isPackageXmlEmpty(options.preDestructiveChanges))) || + (!!options.postDestructiveChanges && fs.existsSync(options.postDestructiveChanges) && + !(await isPackageXmlEmpty(options.postDestructiveChanges))) + ); + + // Check if files exist but are empty + const hasEmptyDestructiveChanges = ( + (!!options.preDestructiveChanges && fs.existsSync(options.preDestructiveChanges) && + await isPackageXmlEmpty(options.preDestructiveChanges)) || + (!!options.postDestructiveChanges && fs.existsSync(options.postDestructiveChanges) && + await isPackageXmlEmpty(options.postDestructiveChanges)) + ); + + // Special case: both package.xml and destructive changes files exist but are empty + if (packageXmlIsEmpty && hasEmptyDestructiveChanges && !hasDestructiveChanges) { + uxLog("action", this, c.cyan('Both package.xml and destructive changes files exist but are empty. Nothing to deploy.')); + return { messages: [], quickDeploy, deployXmlCount: 0 }; + } + + // If we have empty package.xml and no destructive changes, there's nothing to do + if (packageXmlIsEmpty && !hasDestructiveChanges) { + uxLog("other", this, 'No deployment or destructive changes to perform'); + return { messages: [], quickDeploy, deployXmlCount: 0 }; + } + + // If we have empty package.xml but destructive changes, log it + if (packageXmlIsEmpty && hasDestructiveChanges) { + uxLog("action", this, c.cyan('Package.xml is empty, but destructive changes are present. Will proceed with deployment of destructive changes.')); + } + const splitDeployments = await buildDeploymentPackageXmls(packageXmlFile, check, debugMode, options); - const messages = []; + const messages: any[] = []; let deployXmlCount = splitDeployments.length; - if (deployXmlCount === 0) { - uxLog(this, "No deployment to perform"); + // If no deployments are planned but we have destructive changes, add a deployment with the existing package.xml + if (deployXmlCount === 0 && hasDestructiveChanges) { + uxLog("action", this, c.cyan('Creating deployment for destructive changes...')); + splitDeployments.push({ + label: 'package-for-destructive-changes', + packageXmlFile: packageXmlFile, + order: options.destructiveChangesAfterDeployment ? 999 : 0, + }); + deployXmlCount = 1; + } else if (deployXmlCount === 0) { + uxLog("other", this, 'No deployment to perform'); return { messages, quickDeploy, deployXmlCount }; } // Replace quick actions with dummy content in case we have dependencies between Flows & QuickActions await replaceQuickActionsWithDummy(); // Run deployment pre-commands - await executePrePostCommands("commandsPreDeploy", true); + await executePrePostCommands('commandsPreDeploy', { success: true, checkOnly: check, conn: options.conn, extraCommands: options.extraCommands }); // Process items of deployment plan - uxLog(this, c.cyan("Processing split deployments build from deployment plan...")); - uxLog(this, c.whiteBright(JSON.stringify(splitDeployments, null, 2))); + uxLog("action", this, c.cyan('Processing split deployments build from deployment plan...')); + uxLog("other", this, c.whiteBright(JSON.stringify(splitDeployments, null, 2))); for (const deployment of splitDeployments) { elapseStart(`deploy ${deployment.label}`); - // Skip this deployment items if there is nothing to deploy in package.xml - if (deployment.packageXmlFile && (await isPackageXmlEmpty(deployment.packageXmlFile, { ignoreStandaloneParentItems: true }))) { + + // Skip this deployment if package.xml is empty AND it's not a special destructive changes deployment + // AND there are no destructive changes + const isDestructiveChangesDeployment = deployment.label === 'package-for-destructive-changes'; + const packageXmlEmpty = await isPackageXmlEmpty(deployment.packageXmlFile, { ignoreStandaloneParentItems: true }); + + if (packageXmlEmpty && !isDestructiveChangesDeployment && !hasDestructiveChanges) { uxLog( + "log", commandThis, - c.cyan( - `Skipped ${c.bold(deployment.label)} deployment because package.xml is empty or contains only standalone parent items.\n${c.grey( - c.italic("This may be related to filtering using package-no-overwrite.xml or packageDeployOnChange.xml"), - )}`, - ), + c.grey( + `Skipped ${c.bold( + deployment.label + )} deployment because package.xml is empty or contains only standalone parent items.\n${c.grey( + c.italic('This may be related to filtering using package-no-overwrite.xml or packageDeployOnChange.xml') + )}` + ) ); deployXmlCount--; elapseEnd(`deploy ${deployment.label}`); continue; } - let message = ""; + let message = ''; // Wait before deployment item process if necessary if (deployment.waitBefore) { - uxLog(commandThis, `Waiting ${deployment.waitBefore} seconds before deployment according to deployment plan`); + uxLog("log", commandThis, `Waiting ${deployment.waitBefore} seconds before deployment according to deployment plan`); await new Promise((resolve) => setTimeout(resolve, deployment.waitBefore * 1000)); } // Deployment of type package.xml file if (deployment.packageXmlFile) { + const nbDeployedItems = await countPackageXmlItems(deployment.packageXmlFile); + + if (nbDeployedItems === 0 && !hasDestructiveChanges) { + uxLog( + "warning", + commandThis, + c.yellow( + `Skipping deployment of ${c.bold(deployment.label)} because package.xml is empty and there are no destructive changes.` + ) + ); + elapseEnd(`deploy ${deployment.label}`); + continue; + } + uxLog( + "action", commandThis, - c.cyan(`${check ? "Simulating deployment of" : "Deploying"} ${c.bold(deployment.label)} package: ${deployment.packageXmlFile} ...`), + c.cyan( + `${check ? 'Simulating deployment of' : 'Deploying'} ${c.bold(deployment.label)} package: ${deployment.packageXmlFile + } (${nbDeployedItems} items)${hasDestructiveChanges ? ' with destructive changes' : ''}...` + ) ); // Try QuickDeploy - if (check === false && (process.env?.SFDX_HARDIS_QUICK_DEPLOY || "") !== "false") { + if (check === false && (process.env?.SFDX_HARDIS_QUICK_DEPLOY || '') !== 'false') { const deploymentCheckId = await GitProvider.getDeploymentCheckId(); if (deploymentCheckId) { const quickDeployCommand = - `sfdx force:source:deploy` + - ` --validateddeployrequestid ${deploymentCheckId} ` + - (options.targetUsername ? ` --targetusername ${options.targetUsername}` : "") + - ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || "60"}` + - ` --verbose` + - (process.env.SFDX_DEPLOY_DEV_DEBUG ? " --dev-debug" : ""); + `sf project deploy quick` + + ` --job-id ${deploymentCheckId} ` + + (options.targetUsername ? ` -o ${options.targetUsername}` : '') + + ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || '120'}` + + (debugMode ? ' --verbose' : '') + + (process.env.SFDX_DEPLOY_DEV_DEBUG ? ' --dev-debug' : ''); const quickDeployRes = await execSfdxJson(quickDeployCommand, commandThis, { output: true, debug: debugMode, fail: false, }); if (quickDeployRes.status === 0) { - uxLog(commandThis, c.green(`Successfully processed QuickDeploy for deploymentId ${deploymentCheckId}`)); - uxLog(commandThis, c.yellow("If you do not want to use QuickDeploy feature, define env variable SFDX_HARDIS_QUICK_DEPLOY=false")); + uxLog("success", commandThis, c.green(`Successfully processed QuickDeploy for deploymentId ${deploymentCheckId}`)); + uxLog( + "warning", + commandThis, + c.yellow( + 'If you do not want to use QuickDeploy feature, define env variable SFDX_HARDIS_QUICK_DEPLOY=false' + ) + ); quickDeploy = true; continue; } else { - uxLog(commandThis, c.yellow(`Unable to perform QuickDeploy for deploymentId ${deploymentCheckId}.\n${quickDeployRes.errorMessage}.`)); - uxLog(commandThis, c.green("Switching back to effective deployment not using QuickDeploy: that's ok :)")); - const isSandboxOrg = await isSandbox(options); - if (isSandboxOrg) { - testlevel = "NoTestRun"; - uxLog(commandThis, c.green("Note: run with NoTestRun to improve perfs as we had previously succeeded to simulate the deployment")); + uxLog( + "warning", + commandThis, + c.yellow( + `Unable to perform QuickDeploy for deploymentId ${deploymentCheckId}.\n${quickDeployRes.errorMessage}.` + ) + ); + uxLog("success", commandThis, c.green("Switching back to effective deployment not using QuickDeploy: that's ok :)")); + const isProdOrg = await isProductionOrg(options.targetUsername || "", options); + if (!isProdOrg) { + testlevel = 'NoTestRun'; + uxLog( + "success", + commandThis, + c.green( + 'Note: run with NoTestRun to improve perfs as we had previously succeeded to simulate the deployment' + ) + ); } } } } // No QuickDeploy Available, or QuickDeploy failing : try full deploy - const branchConfig = await getConfig("branch"); + const branchConfig = await getConfig('branch'); + const reportDir = await getReportDirectory(); const deployCommand = - `sfdx force:source:deploy -x "${deployment.packageXmlFile}"` + - ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || "60"}` + - " --ignorewarnings" + // So it does not fail in for objectTranslations stuff - ` --testlevel ${testlevel}` + - (options.testClasses && testlevel !== "NoTestRun" ? ` --runtests ${options.testClasses}` : "") + - (options.preDestructiveChanges ? ` --predestructivechanges ${options.preDestructiveChanges}` : "") + - (options.postDestructiveChanges ? ` --postdestructivechanges ${options.postDestructiveChanges}` : "") + - (options.targetUsername ? ` --targetusername ${options.targetUsername}` : "") + - (check ? " --checkonly" : "") + - " --verbose" + - (branchConfig?.skipCodeCoverage === true ? "" : " --coverageformatters json-summary") + - (process.env.SFDX_DEPLOY_DEV_DEBUG ? " --dev-debug" : ""); + `sf project deploy` + + // (check && testlevel !== 'NoTestRun' ? ' validate' : ' start') + // Not until validate command is correct and accepts ignore-warnings + ' start' + + // (check && testlevel === 'NoTestRun' ? ' --dry-run' : '') + // validate with NoTestRun does not work, so use --dry-run + (check ? ' --dry-run' : '') + + ` --manifest "${deployment.packageXmlFile}"` + + ' --ignore-warnings' + // So it does not fail in for objectTranslations stuff for example + ' --ignore-conflicts' + // With CICD we are supposed to ignore them + ` --results-dir ${reportDir}` + + ` --test-level ${testlevel}` + + (options.testClasses && testlevel !== 'NoTestRun' ? ` --tests ${options.testClasses}` : '') + + (options.preDestructiveChanges ? ` --pre-destructive-changes ${options.preDestructiveChanges}` : '') + + (options.postDestructiveChanges && !(options.destructiveChangesAfterDeployment === true) ? ` --post-destructive-changes ${options.postDestructiveChanges}` : '') + + (options.targetUsername ? ` -o ${options.targetUsername}` : '') + + (testlevel === 'NoTestRun' || branchConfig?.skipCodeCoverage === true ? '' : ' --coverage-formatters json-summary') + + ((testlevel === 'NoTestRun' || branchConfig?.skipCodeCoverage === true) && process.env?.COVERAGE_FORMATTER_JSON === "true" ? '' : ' --coverage-formatters json') + + (debugMode ? ' --verbose' : '') + + ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || '120'}` + + (process.env.SFDX_DEPLOY_DEV_DEBUG ? ' --dev-debug' : '') + + ` --json`; let deployRes; try { deployRes = await execCommand(deployCommand, commandThis, { - output: true, + output: false, debug: debugMode, fail: true, retry: deployment.retry || null, }); - } catch (e) { - deployRes = await handleDeployError(e, check, branchConfig, commandThis, options, deployment); + if (deployRes.status === 0) { + uxLog("log", commandThis, c.grey(shortenLogLines(JSON.stringify(deployRes)))); + } + } catch (e: any) { + await generateApexCoverageOutputFile(); + + // Special handling for "nothing to deploy" error with destructive changes + if ((e.stdout + e.stderr).includes("No local changes to deploy") && hasDestructiveChanges) { + + uxLog("warning", commandThis, c.yellow(c.bold( + 'Received "Nothing to Deploy" error, but destructive changes are present. ' + + 'This can happen when only destructive changes are being deployed.' + ))); + + // Create a minimal response to avoid terminal freeze + deployRes = { + status: 0, // Treat as success + stdout: JSON.stringify({ + status: 0, + result: { + success: true, + id: "destructiveChangesOnly", + details: { + componentSuccesses: [], + runTestResult: null + } + } + }), + stderr: "" + }; + } else { + deployRes = await handleDeployError(e, check, branchConfig, commandThis, options, deployment); + } + } + + if (typeof deployRes === 'object') { + deployRes.stdout = JSON.stringify(deployRes); } + await generateApexCoverageOutputFile(); // Set deployment id - await getDeploymentId(deployRes.stdout + deployRes.stderr || ""); + await getDeploymentId(deployRes.stdout + deployRes.stderr || ''); // Check org coverage if found in logs - const orgCoveragePercent = await extractOrgCoverageFromLog(deployRes.stdout + deployRes.stderr || ""); + const orgCoveragePercent = await extractOrgCoverageFromLog(deployRes.stdout + deployRes.stderr || ''); if (orgCoveragePercent) { try { - await checkDeploymentOrgCoverage(orgCoveragePercent, { check: check, testlevel: testlevel }); + await checkDeploymentOrgCoverage(Number(orgCoveragePercent), { check: check, testlevel: testlevel }); } catch (errCoverage) { if (check) { await GitProvider.managePostPullRequestComment(); } + killBoringExitHandlers(); throw errCoverage; } } else { // Handle notif message when there is no apex const existingPrData = globalThis.pullRequestData || {}; - const prDataCodeCoverage: any = { - messageKey: existingPrData.messageKey ?? "deployment", - title: (existingPrData.title ?? check) ? "✅ Deployment check success" : "✅ Deployment success", + const prDataCodeCoverage: PullRequestData = { + messageKey: existingPrData.messageKey ?? 'deployment', + title: existingPrData.title ?? check ? '✅ Deployment check success' : '✅ Deployment success', codeCoverageMarkdownBody: - branchConfig?.skipCodeCoverage === true - ? "✅⚠️ Code coverage has been skipped for this level" - : "✅ No code coverage: It seems there is not Apex in this project", - deployStatus: "valid", + testlevel === 'NoTestRun' + ? '⚠️ Apex Tests has not been run thanks to useSmartDeploymentTests' : + branchConfig?.skipCodeCoverage === true + ? '✅⚠️ Code coverage has been skipped for this level' + : '✅ No code coverage: It seems there is not Apex in this project', + deployStatus: 'valid', }; globalThis.pullRequestData = Object.assign(globalThis.pullRequestData || {}, prDataCodeCoverage); } @@ -294,32 +477,34 @@ export async function forceSourceDeploy( await GitProvider.managePostPullRequestComment(); } - let extraInfo = options?.delta === true ? "DELTA Deployment" : "FULL Deployment"; + let extraInfo = options?.delta === true ? 'DELTA Deployment' : 'FULL Deployment'; if (quickDeploy === true) { - extraInfo += " (using Quick Deploy)"; + extraInfo += ' (using Quick Deploy)'; } // Display deployment status if (deployRes.status === 0) { message = - `[sfdx-hardis] Successfully ${check ? "checked deployment of" : "deployed"} ${c.bold(deployment.label)} to target Salesforce org - ` + - extraInfo; - uxLog(commandThis, c.green(message)); + `[sfdx-hardis] Successfully ${check ? 'checked deployment of' : 'deployed'} ${c.bold( + deployment.label + )} to target Salesforce org - ` + extraInfo; + uxLog("success", commandThis, c.green(message)); if (deployRes?.testCoverageNotBlockingActivated === true) { uxLog( + "warning", commandThis, c.yellow( - "There is a code coverage issue, but the check is passing by design because you configured testCoverageNotBlocking: true in your branch .sfdx-hardis.yml", - ), + 'There is a code coverage issue, but the check is passing by design because you configured testCoverageNotBlocking: true in your branch .sfdx-hardis.yml' + ) ); } } else { message = `[sfdx-hardis] Unable to deploy ${c.bold(deployment.label)} to target Salesforce org - ` + extraInfo; - uxLog(commandThis, c.red(c.bold(deployRes.errorMessage))); + uxLog("error", commandThis, c.red(c.bold(deployRes.errorMessage))); await displayDeploymentLink(deployRes.errorMessage, options); } // Restore quickActions after deployment of main package - if (deployment.packageXmlFile.includes("mainPackage.xml")) { + if (deployment.packageXmlFile.includes('mainPackage.xml')) { await restoreQuickActions(); } elapseEnd(`deploy ${deployment.label}`); @@ -331,124 +516,177 @@ export async function forceSourceDeploy( } // Wait after deployment item process if necessary if (deployment.waitAfter) { - uxLog(commandThis, `Waiting ${deployment.waitAfter} seconds after deployment according to deployment plan`); + uxLog("log", commandThis, `Waiting ${deployment.waitAfter} seconds after deployment according to deployment plan`); await new Promise((resolve) => setTimeout(resolve, deployment.waitAfter * 1000)); } messages.push(message); } // Run deployment post commands - await executePrePostCommands("commandsPostDeploy", true); - elapseEnd("all deployments"); + await executePrePostCommands('commandsPostDeploy', { success: true, checkOnly: check, conn: options.conn, extraCommands: options.extraCommands }); + elapseEnd('all deployments'); return { messages, quickDeploy, deployXmlCount }; } -async function handleDeployError(e: any, check: boolean, branchConfig: any, commandThis: any, options: any, deployment: any) { - const output: string = e.stdout + e.stderr; +async function handleDeployError( + e: any, + check: boolean, + branchConfig: any, + commandThis: any, + options: any, + deployment: any +) { + const output: string = (e as any).stdout + (e as any).stderr; // Handle coverage error if ignored if ( check === true && branchConfig?.testCoverageNotBlocking === true && - output.includes("=== Test Success") && - !output.includes("Test Failures") && - output.includes("=== Apex Code Coverage") + (output.includes('=== Test Success') || output.includes('Test Success [')) && + !output.includes('Test Failures') && + (output.includes('=== Apex Code Coverage') || output.includes("Failing: 0")) ) { - uxLog(commandThis, c.yellow(c.bold("Deployment status: Deploy check success & Ignored test coverage error"))); - return { status: 0, stdout: e.stdout, stderr: e.stderr, testCoverageNotBlockingActivated: true }; + uxLog("warning", commandThis, c.yellow(c.bold('Deployment status: Deploy check success & Ignored test coverage error'))); + return { status: 0, stdout: (e as any).stdout, stderr: (e as any).stderr, testCoverageNotBlockingActivated: true }; } // Handle Effective error - const { tips, errLog } = await analyzeDeployErrorLogs(output, true, { check: check }); - uxLog(commandThis, c.red(c.bold("Sadly there has been Deployment error(s)"))); - if (process.env?.SFDX_HARDIS_DEPLOY_ERR_COLORS === "false") { - uxLog(this, "\n" + errLog); + const { errLog } = await analyzeDeployErrorLogs(output, true, { check: check }); + uxLog("error", commandThis, c.red(c.bold('Sadly there has been Deployment error(s)'))); + if (process.env?.SFDX_HARDIS_DEPLOY_ERR_COLORS === 'false') { + uxLog("other", this, '\n' + errLog); } else { - uxLog(this, c.red("\n" + errLog)); + uxLog("error", this, c.red('\n' + errLog)); } - uxLog( - commandThis, - c.yellow(c.bold(`You may${tips.length > 0 ? " also" : ""} copy-paste errors on google to find how to solve the deployment issues :)`)), - ); await displayDeploymentLink(output, options); elapseEnd(`deploy ${deployment.label}`); if (check) { await GitProvider.managePostPullRequestComment(); } - await executePrePostCommands("commandsPostDeploy", false); - throw new SfdxError("Deployment failure. Check messages above"); + await executePrePostCommands('commandsPostDeploy', { success: false, checkOnly: check, conn: options.conn }); + killBoringExitHandlers(); + throw new SfError('Deployment failure. Check messages above'); } -export function truncateProgressLogLines(rawLog: string) { - const rawLogCleaned = rawLog - .replace(/(SOURCE PROGRESS \|.*\n)/gm, "") - .replace(/(MDAPI PROGRESS \|.*\n)/gm, "") - .replace(/(DEPLOY PROGRESS \|.*\n)/gm, ""); +export function shortenLogLines(rawLog: string) { + let rawLogCleaned = rawLog + .replace(/(SOURCE PROGRESS \|.*\n)/gm, '') + .replace(/(MDAPI PROGRESS \|.*\n)/gm, '') + .replace(/(DEPLOY PROGRESS \|.*\n)/gm, '') + .replace(/(Status: In Progress \|.*\n)/gm, ''); + // Truncate JSON if huge log + if (rawLogCleaned.split("\n").length > 1000 && !(process.env?.NO_TRUNCATE_LOGS === "true")) { + const msg = "Result truncated by sfdx-hardis. Define NO_TRUNCATE_LOGS=true tu have full JSON logs"; + const jsonLog = findJsonInString(rawLogCleaned); + if (jsonLog) { + if (jsonLog?.result?.details?.componentSuccesses) { + jsonLog.result.details.componentSuccesses = jsonLog.result.details.componentSuccesses.filter(item => item.changed === true); + jsonLog.truncatedBySfdxHardis = msg; + } + if (jsonLog?.result?.details?.runTestResult) { + delete jsonLog.result.details.runTestResult; + jsonLog.truncatedBySfdxHardis = msg; + } + if (jsonLog?.result?.files) { + jsonLog.result.files = jsonLog.result.files.filter(item => item.state === 'Changed'); + jsonLog.truncatedBySfdxHardis = msg; + } + rawLogCleaned = replaceJsonInString(rawLogCleaned, jsonLog); + } + } return rawLogCleaned; } async function getDeploymentId(rawLog: string) { + // JSON Mode + const jsonLog = findJsonInString(rawLog); + if (jsonLog) { + const deploymentId = jsonLog?.result?.id || null; + if (deploymentId) { + globalThis.pullRequestDeploymentId = deploymentId; + return deploymentId; + } + } + // Text mode const regex = /Deploy ID: (.*)/gm; if (rawLog && rawLog.match(regex)) { - const deploymentId = regex.exec(rawLog)[1]; + const deploymentId = (regex.exec(rawLog) || [])[1]; globalThis.pullRequestDeploymentId = deploymentId; return deploymentId; } + uxLog("warning", this, c.yellow(`Unable to find deploymentId in logs \n${c.grey(rawLog)}`)); return null; } // Display deployment link in target org async function displayDeploymentLink(rawLog: string, options: any) { - if (process?.env?.SFDX_HARDIS_DISPLAY_DEPLOYMENT_LINK === "true") { - let deploymentUrl = "lightning/setup/DeployStatus/home"; + if (process?.env?.SFDX_HARDIS_DISPLAY_DEPLOYMENT_LINK === 'true') { + let deploymentUrl = 'lightning/setup/DeployStatus/home'; const deploymentId = await getDeploymentId(rawLog); if (deploymentId) { const detailedDeploymentUrl = - "/changemgmt/monitorDeploymentsDetails.apexp?" + encodeURIComponent(`retURL=/changemgmt/monitorDeployment.apexp&asyncId=${deploymentId}`); - deploymentUrl = "lightning/setup/DeployStatus/page?address=" + encodeURIComponent(detailedDeploymentUrl); + '/changemgmt/monitorDeploymentsDetails.apexp?' + + encodeURIComponent(`retURL=/changemgmt/monitorDeployment.apexp&asyncId=${deploymentId}`); + deploymentUrl = 'lightning/setup/DeployStatus/page?address=' + encodeURIComponent(detailedDeploymentUrl); } const openRes = await execSfdxJson( - `sf org open -p ${deploymentUrl} --url-only` + (options.targetUsername ? ` --target-org ${options.targetUsername}` : ""), + `sf org open -p ${deploymentUrl} --url-only` + + (options.targetUsername ? ` --target-org ${options.targetUsername}` : ''), this, { fail: true, output: false, - }, + } + ); + uxLog( + "warning", + this, + c.yellowBright(`Open deployment status page in org with url: ${c.bold(c.greenBright(openRes?.result?.url))}`) ); - uxLog(this, c.yellowBright(`Open deployment status page in org with url: ${c.bold(c.greenBright(openRes?.result?.url))}`)); } } // In some case we can not deploy the whole package.xml, so let's split it before :) -async function buildDeploymentPackageXmls(packageXmlFile: string, check: boolean, debugMode: boolean, options: any = {}): Promise { +async function buildDeploymentPackageXmls( + packageXmlFile: string, + check: boolean, + debugMode: boolean, + options: any = {} +): Promise { // Check for empty package.xml if (await isPackageXmlEmpty(packageXmlFile)) { - uxLog(this, "Empty package.xml: nothing to deploy"); + uxLog("other", this, 'Empty package.xml: nothing to deploy'); return []; } const deployOncePackageXml = await buildDeployOncePackageXml(debugMode, options); const deployOnChangePackageXml = await buildDeployOnChangePackageXml(debugMode, options); // Copy main package.xml so it can be dynamically updated before deployment - const tmpDeployDir = await createTempDir(); - const mainPackageXmlCopyFileName = path.join(tmpDeployDir, "calculated-package.xml"); + const tmpDir = await createTempDir(); + const mainPackageXmlCopyFileName = path.join(tmpDir, 'calculated-package.xml'); await fs.copy(packageXmlFile, mainPackageXmlCopyFileName); const mainPackageXmlItem = { - label: "calculated-package-xml", + label: 'calculated-package-xml', packageXmlFile: mainPackageXmlCopyFileName, order: 0, }; - const config = await getConfig("user"); + const config = await getConfig('user'); // Build list of package.xml according to plan if (config.deploymentPlan && !check) { const deploymentItems = [mainPackageXmlItem]; // Work on deploymentPlan packages before deploying them - const skipSplitPackages = (process.env.SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES || "true") !== "false"; + const skipSplitPackages = (process.env.SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES || 'true') !== 'false'; if (skipSplitPackages === true) { - uxLog(this, c.yellow("Do not split package.xml, as SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES=false has not been found in ENV vars")); + uxLog( + "warning", + this, + c.yellow( + 'Do not split package.xml, as SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES=false has not been found in ENV vars' + ) + ); } else { for (const deploymentItem of config.deploymentPlan.packages) { if (deploymentItem.packageXmlFile) { // Copy deployment in temp packageXml file so it can be updated using package-no-overwrite and packageDeployOnChange deploymentItem.packageXmlFile = path.resolve(deploymentItem.packageXmlFile); - const splitPackageXmlCopyFileName = path.join(tmpDeployDir, path.basename(deploymentItem.packageXmlFile)); + const splitPackageXmlCopyFileName = path.join(tmpDir, path.basename(deploymentItem.packageXmlFile)); await fs.copy(deploymentItem.packageXmlFile, splitPackageXmlCopyFileName); deploymentItem.packageXmlFile = splitPackageXmlCopyFileName; // Remove split of packageXml content from main package.xml @@ -456,26 +694,41 @@ async function buildDeploymentPackageXmls(packageXmlFile: string, check: boolean debugMode: debugMode, keepEmptyTypes: true, }); - await applyPackageXmlFiltering(deploymentItem.packageXmlFile, deployOncePackageXml, deployOnChangePackageXml, debugMode); + await applyPackageXmlFiltering( + deploymentItem.packageXmlFile, + deployOncePackageXml, + deployOnChangePackageXml, + debugMode + ); } deploymentItems.push(deploymentItem); } } - await applyPackageXmlFiltering(mainPackageXmlCopyFileName, deployOncePackageXml, deployOnChangePackageXml, debugMode); + await applyPackageXmlFiltering( + mainPackageXmlCopyFileName, + deployOncePackageXml, + deployOnChangePackageXml, + debugMode + ); // Sort in requested order const deploymentItemsSorted = sortArray(deploymentItems, { - by: ["order", "label"], - order: ["asc", "asc"], + by: ['order', 'label'], + order: ['asc', 'asc'], }); return deploymentItemsSorted; } // Return initial package.xml file minus deployOnce and deployOnChange items else { - await applyPackageXmlFiltering(mainPackageXmlCopyFileName, deployOncePackageXml, deployOnChangePackageXml, debugMode); + await applyPackageXmlFiltering( + mainPackageXmlCopyFileName, + deployOncePackageXml, + deployOnChangePackageXml, + debugMode + ); return [ { - label: "calculated-package-xml", + label: 'calculated-package-xml', packageXmlFile: mainPackageXmlCopyFileName, }, ]; @@ -486,41 +739,75 @@ async function buildDeploymentPackageXmls(packageXmlFile: string, check: boolean async function applyPackageXmlFiltering(packageXml, deployOncePackageXml, deployOnChangePackageXml, debugMode) { // Main packageXml: Remove package-no-overwrite.xml items that are already present in target org if (deployOncePackageXml) { - await removePackageXmlContent(packageXml, deployOncePackageXml, false, { debugMode: debugMode, keepEmptyTypes: true }); + await removePackageXmlContent(packageXml, deployOncePackageXml, false, { + debugMode: debugMode, + keepEmptyTypes: true, + }); } //Main packageXml: Remove packageDeployOnChange.xml items that are not different in target org if (deployOnChangePackageXml) { - await removePackageXmlContent(packageXml, deployOnChangePackageXml, false, { debugMode: debugMode, keepEmptyTypes: true }); + await removePackageXmlContent(packageXml, deployOnChangePackageXml, false, { + debugMode: debugMode, + keepEmptyTypes: true, + }); } } // package-no-overwrite.xml items are deployed only if they are not in the target org async function buildDeployOncePackageXml(debugMode = false, options: any = {}) { - if (process.env.SKIP_PACKAGE_DEPLOY_ONCE === "true") { - uxLog(this, c.yellow("Skipped package-no-overwrite.xml management because of env variable SKIP_PACKAGE_DEPLOY_ONCE='true'")); + if (process.env.SKIP_PACKAGE_DEPLOY_ONCE === 'true') { + uxLog( + "warning", + this, + c.yellow("Skipped package-no-overwrite.xml management because of env variable SKIP_PACKAGE_DEPLOY_ONCE='true'") + ); return null; } - let packageNoOverwrite = path.resolve("./manifest/package-no-overwrite.xml"); + // Get default package-no-overwrite + let packageNoOverwrite = path.resolve('./manifest/package-no-overwrite.xml'); if (!fs.existsSync(packageNoOverwrite)) { - packageNoOverwrite = path.resolve("./manifest/packageDeployOnce.xml"); + packageNoOverwrite = path.resolve('./manifest/packageDeployOnce.xml'); + } + const config = await getConfig("branch"); + if (process.env?.PACKAGE_NO_OVERWRITE_PATH || config?.packageNoOverwritePath) { + packageNoOverwrite = process.env.PACKAGE_NO_OVERWRITE_PATH || config?.packageNoOverwritePath; + if (!fs.existsSync(packageNoOverwrite)) { + throw new SfError(`packageNoOverwritePath property or PACKAGE_NO_OVERWRITE_PATH leads not existing file ${packageNoOverwrite}`); + } + uxLog("log", this, c.grey(`Using custom package-no-overwrite file defined at ${packageNoOverwrite}`)); } if (fs.existsSync(packageNoOverwrite)) { - uxLog(this, c.cyan("Handling package-no-overwrite.xml...")); + uxLog("action", this, c.cyan('Handling package-no-overwrite.xml...')); // If package-no-overwrite.xml is not empty, build target org package.xml and remove its content from packageOnce.xml if (!(await isPackageXmlEmpty(packageNoOverwrite))) { const tmpDir = await createTempDir(); // Build target org package.xml - uxLog(this, c.cyan(`Generating full package.xml from target org to identify its items matching with package-no-overwrite.xml ...`)); - const targetOrgPackageXml = path.join(tmpDir, "packageTargetOrg.xml"); + uxLog( + "action", + this, + c.cyan( + `Generating full package.xml from target org to identify its items matching with package-no-overwrite.xml ...` + ) + ); + const targetOrgPackageXml = path.join(tmpDir, 'packageTargetOrg.xml'); await buildOrgManifest(options.targetUsername, targetOrgPackageXml, options.conn); - let calculatedPackageNoOverwrite = path.join(tmpDir, "package-no-overwrite.xml"); + let calculatedPackageNoOverwrite = path.join(tmpDir, 'package-no-overwrite.xml'); await fs.copy(packageNoOverwrite, calculatedPackageNoOverwrite); // Keep in deployOnce.xml only what is necessary to deploy - await removePackageXmlContent(calculatedPackageNoOverwrite, targetOrgPackageXml, true, { debugMode: debugMode, keepEmptyTypes: false }); - await fs.copy(calculatedPackageNoOverwrite, path.join(tmpDir, "calculated-package-no-overwrite.xml")); - calculatedPackageNoOverwrite = path.join(tmpDir, "calculated-package-no-overwrite.xml"); - uxLog(this, c.grey(`calculated-package-no-overwrite.xml with only items that already exist in target org: ${calculatedPackageNoOverwrite}`)); + await removePackageXmlContent(calculatedPackageNoOverwrite, targetOrgPackageXml, true, { + debugMode: debugMode, + keepEmptyTypes: false, + }); + await fs.copy(calculatedPackageNoOverwrite, path.join(tmpDir, 'calculated-package-no-overwrite.xml')); + calculatedPackageNoOverwrite = path.join(tmpDir, 'calculated-package-no-overwrite.xml'); + uxLog( + "log", + this, + c.grey( + `calculated-package-no-overwrite.xml with only items that already exist in target org: ${calculatedPackageNoOverwrite}` + ) + ); // Check if there is still something in calculated-package-no-overwrite.xml if (!(await isPackageXmlEmpty(calculatedPackageNoOverwrite))) { return calculatedPackageNoOverwrite; @@ -532,58 +819,74 @@ async function buildDeployOncePackageXml(debugMode = false, options: any = {}) { // packageDeployOnChange.xml items are deployed only if they have changed in target org export async function buildDeployOnChangePackageXml(debugMode: boolean, options: any = {}) { - if (process.env.SKIP_PACKAGE_DEPLOY_ON_CHANGE === "true") { - uxLog(this, c.yellow("Skipped packageDeployOnChange.xml management because of env variable SKIP_PACKAGE_DEPLOY_ON_CHANGE='true'")); + if (process.env.SKIP_PACKAGE_DEPLOY_ON_CHANGE === 'true') { + uxLog( + "warning", + this, + c.yellow( + "Skipped packageDeployOnChange.xml management because of env variable SKIP_PACKAGE_DEPLOY_ON_CHANGE='true'" + ) + ); return null; } // Check if packageDeployOnChange.xml is defined - const packageDeployOnChangePath = "./manifest/packageDeployOnChange.xml"; + const packageDeployOnChangePath = './manifest/packageDeployOnChange.xml'; if (!fs.existsSync(packageDeployOnChangePath)) { return null; } // Retrieve sfdx sources in local git repo await execCommand( - `sfdx force:source:retrieve -x ${packageDeployOnChangePath}` + (options.targetUsername ? ` -u ${options.targetUsername}` : ""), + `sf project retrieve start --manifest ${packageDeployOnChangePath}` + + (options.targetUsername ? ` --target-org ${options.targetUsername}` : ''), this, { fail: true, output: true, debug: debugMode, - }, + } ); // Do not call delta if no updated file has been retrieved const hasGitLocalUpdates = await gitHasLocalUpdates(); if (hasGitLocalUpdates === false) { - uxLog(this, c.grey("No diff retrieved from packageDeployOnChange.xml")); + uxLog("log", this, c.grey('No diff retrieved from packageDeployOnChange.xml')); return null; } - // "Temporarily" commit updates so sfdx git delta can build diff package.xml - await git().addConfig("user.email", "bot@hardis.com", false, "global"); - await git().addConfig("user.name", "Hardis", false, "global"); - await git().add("--all"); - await git().commit('"temp"', ["--no-verify"]); + // "Temporarily" commit updates so sfdx-git-delta can build diff package.xml + await git().addConfig('user.email', 'bot@hardis.com', false, 'global'); + await git().addConfig('user.name', 'Hardis', false, 'global'); + await git().add('--all'); + await git().commit('"temp"', ['--no-verify']); // Generate package.xml git delta const tmpDir = await createTempDir(); - const gitDeltaCommandRes = await callSfdxGitDelta("HEAD~1", "HEAD", tmpDir, { debug: debugMode }); + const gitDeltaCommandRes = await callSfdxGitDelta('HEAD~1', 'HEAD', tmpDir, { debug: debugMode }); // Now that the diff is computed, we can dump the temporary commit - await git().reset(ResetMode.HARD, ["HEAD~1"]); + await git().reset(ResetMode.HARD, ['HEAD~1']); // Check git delta is ok - const diffPackageXml = path.join(tmpDir, "package", "package.xml"); + const diffPackageXml = path.join(tmpDir, 'package', 'package.xml'); if (gitDeltaCommandRes?.status !== 0 || !fs.existsSync(diffPackageXml)) { - throw new SfdxError("Error while running sfdx git delta:\n" + JSON.stringify(gitDeltaCommandRes)); + throw new SfError('Error while running sfdx-git-delta:\n' + JSON.stringify(gitDeltaCommandRes)); } // Remove from original packageDeployOnChange the items that has not been updated - const packageXmlDeployOnChangeToUse = path.join(tmpDir, "packageDeployOnChange.xml"); + const packageXmlDeployOnChangeToUse = path.join(tmpDir, 'packageDeployOnChange.xml'); await fs.copy(packageDeployOnChangePath, packageXmlDeployOnChangeToUse); - await removePackageXmlContent(packageXmlDeployOnChangeToUse, diffPackageXml, false, { debugMode: debugMode, keepEmptyTypes: false }); - uxLog(this, c.grey(`packageDeployOnChange.xml filtered to keep only metadatas that have changed: ${packageXmlDeployOnChangeToUse}`)); + await removePackageXmlContent(packageXmlDeployOnChangeToUse, diffPackageXml, false, { + debugMode: debugMode, + keepEmptyTypes: false, + }); + uxLog( + "log", + this, + c.grey( + `packageDeployOnChange.xml filtered to keep only metadatas that have changed: ${packageXmlDeployOnChangeToUse}` + ) + ); // Return result return packageXmlDeployOnChangeToUse; } @@ -593,18 +896,27 @@ export async function removePackageXmlContent( packageXmlFile: string, packageXmlFileToRemove: string, removedOnly = false, - options = { debugMode: false, keepEmptyTypes: false }, + options = { debugMode: false, keepEmptyTypes: false } ) { if (removedOnly === false) { - uxLog(this, c.cyan(`Removing ${c.green(path.basename(packageXmlFileToRemove))} items from ${c.green(path.basename(packageXmlFile))}...`)); + uxLog( + "action", + this, + c.cyan( + `Removing ${c.green(path.basename(packageXmlFileToRemove))} items from ${c.green( + path.basename(packageXmlFile) + )}...` + ) + ); } else { uxLog( + "action", this, c.cyan( `Keeping ${c.green(path.basename(packageXmlFileToRemove))} items matching with ${c.green( - path.basename(packageXmlFile), - )} (and remove the rest)...`, - ), + path.basename(packageXmlFile) + )} (and remove the rest)...` + ) ); } await removePackageXmlFilesContent(packageXmlFile, packageXmlFileToRemove, { @@ -616,31 +928,35 @@ export async function removePackageXmlContent( } // Deploy destructive changes -export async function deployDestructiveChanges(packageDeletedXmlFile: string, options: any = { debug: false, check: false }, commandThis: any) { - // Create empty deployment file because of sfdx limitation +export async function deployDestructiveChanges( + packageDeletedXmlFile: string, + options: any = { debug: false, check: false }, + commandThis: any +) { + // Create empty deployment file because of SF CLI limitation // cf https://gist.github.com/benahm/b590ecf575ff3c42265425233a2d727e - uxLog(commandThis, c.cyan(`Deploying destructive changes from file ${path.resolve(packageDeletedXmlFile)}`)); + uxLog("action", commandThis, c.cyan(`Deploying destructive changes from file ${path.resolve(packageDeletedXmlFile)}`)); const tmpDir = await createTempDir(); - const emptyPackageXmlFile = path.join(tmpDir, "package.xml"); + const emptyPackageXmlFile = path.join(tmpDir, 'package.xml'); await fs.writeFile( emptyPackageXmlFile, ` - ${CONSTANTS.API_VERSION} + ${getApiVersion()} `, - "utf8", + 'utf8' ); - await fs.copy(packageDeletedXmlFile, path.join(tmpDir, "destructiveChanges.xml")); + await fs.copy(packageDeletedXmlFile, path.join(tmpDir, 'destructiveChanges.xml')); const deployDelete = - `sfdx force:mdapi:deploy -d ${tmpDir}` + - ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || "60"}` + - ` --testlevel ${options.testLevel || "NoTestRun"}` + - " --ignorewarnings" + // So it does not fail in case metadata is already deleted - (options.targetUsername ? ` --targetusername ${options.targetUsername}` : "") + - (options.check ? " --checkonly" : "") + - (options.debug ? " --verbose" : ""); + `sf project deploy ${options.check ? 'validate' : 'start'} --metadata-dir ${tmpDir}` + + ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || '120'}` + + ` --test-level ${options.testLevel || 'NoTestRun'}` + + ' --ignore-warnings' + // So it does not fail in case metadata is already deleted + (options.targetUsername ? ` --target-org ${options.targetUsername}` : '') + + (options.debug ? ' --verbose' : '') + + ' --json'; // Deploy destructive changes - let deployDeleteRes = null; + let deployDeleteRes: any = {}; try { deployDeleteRes = await execCommand(deployDelete, commandThis, { output: true, @@ -648,52 +964,57 @@ export async function deployDestructiveChanges(packageDeletedXmlFile: string, op fail: true, }); } catch (e) { - const { errLog } = await analyzeDeployErrorLogs(e.stdout + e.stderr, true, {}); - uxLog(this, c.red("Sadly there has been destruction error(s)")); - uxLog(this, c.red("\n" + errLog)); + const { errLog } = await analyzeDeployErrorLogs((e as any).stdout + (e as any).stderr, true, {}); + uxLog("error", this, c.red('Sadly there has been destruction error(s)')); + uxLog("error", this, c.red('\n' + errLog)); uxLog( + "warning", this, c.yellow( c.bold( - "That could be a false positive, as in real deployment, the package.xml deployment will be committed before the use of destructiveChanges.xml", - ), - ), + 'That could be a false positive, as in real deployment, the package.xml deployment will be committed before the use of destructiveChanges.xml' + ) + ) ); - throw new SfdxError("Error while deploying destructive changes"); + killBoringExitHandlers(); + throw new SfError('Error while deploying destructive changes'); } await fs.remove(tmpDir); - let deleteMsg = ""; + let deleteMsg = ''; if (deployDeleteRes.status === 0) { - deleteMsg = `[sfdx-hardis] Successfully ${options.check ? "checked deployment of" : "deployed"} destructive changes to Salesforce org`; - uxLog(commandThis, c.green(deleteMsg)); + deleteMsg = `[sfdx-hardis] Successfully ${options.check ? 'checked deployment of' : 'deployed' + } destructive changes to Salesforce org`; + uxLog("success", commandThis, c.green(deleteMsg)); } else { - deleteMsg = "[sfdx-hardis] Unable to deploy destructive changes to Salesforce org"; - uxLog(commandThis, c.red(deployDeleteRes.errorMessage)); + deleteMsg = '[sfdx-hardis] Unable to deploy destructive changes to Salesforce org'; + uxLog("error", commandThis, c.red(deployDeleteRes.errorMessage)); } } export async function deployMetadatas( options: any = { - deployDir: ".", - testlevel: "RunLocalTests", + deployDir: '.', + testlevel: 'RunLocalTests', check: false, debug: false, - soap: false, targetUsername: null, tryOnce: false, - }, + runTests: null, + } ) { // Perform deployment - const deployCommand = - "sfdx force:mdapi:deploy" + - ` --deploydir ${options.deployDir || "."}` + - ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || "60"}` + - ` --testlevel ${options.testlevel || "RunLocalTests"}` + - ` --apiversion ${options.apiVersion || CONSTANTS.API_VERSION}` + - (options.soap ? " --soapdeploy" : "") + - (options.check ? " --checkonly" : "") + - (options.targetUsername ? ` --targetusername ${options.targetUsername}` : "") + - " --verbose"; + let deployCommand = + `sf project deploy ${options.check ? 'validate' : 'start'}` + + ` --metadata-dir ${options.deployDir || '.'}` + + ` --wait ${process.env.SFDX_DEPLOY_WAIT_MINUTES || '120'}` + + ` --test-level ${options.testlevel || 'RunLocalTests'}` + + ` --api-version ${options.apiVersion || getApiVersion()}` + + (options.targetUsername ? ` --target-org ${options.targetUsername}` : '') + + (options.debug ? ' --verbose' : '') + + ' --json'; + if (options.runTests && options.testlevel == 'RunSpecifiedTests') { + deployCommand += ` --tests ${options.runTests.join(',')}`; + } let deployRes; try { deployRes = await execCommand(deployCommand, this, { @@ -703,24 +1024,29 @@ export async function deployMetadatas( }); } catch (e) { // workaround if --soapdeploy is not available - if (JSON.stringify(e).includes("--soapdeploy") && !options.tryOnce === true) { - uxLog(this, c.yellow("This may be a error with a workaround... let's try it :)")); + if (JSON.stringify(e).includes('--soapdeploy') && !options.tryOnce === true) { + uxLog("warning", this, c.yellow("This may be a error with a workaround... let's try it :)")); try { - deployRes = await execCommand(deployCommand.replace(" --soapdeploy", ""), this, { + deployRes = await execCommand(deployCommand.replace(' --soapdeploy', ''), this, { output: true, debug: options.debug, fail: true, }); } catch (e2) { - if (JSON.stringify(e2).includes("NoTestRun")) { + if (JSON.stringify(e2).includes('NoTestRun')) { // Another workaround: try running tests - uxLog(this, c.yellow("This may be again an error with a workaround... let's make a last attempt :)")); - deployRes = await execCommand(deployCommand.replace(" --soapdeploy", "").replace("NoTestRun", "RunLocalTests"), this, { - output: true, - debug: options.debug, - fail: true, - }); + uxLog("warning", this, c.yellow("This may be again an error with a workaround... let's make a last attempt :)")); + deployRes = await execCommand( + deployCommand.replace(' --soapdeploy', '').replace('NoTestRun', 'RunLocalTests'), + this, + { + output: true, + debug: options.debug, + fail: true, + } + ); } else { + killBoringExitHandlers(); throw e2; } } @@ -735,13 +1061,16 @@ let quickActionsBackUpFolder: string; // Replace QuickAction content with Dummy content that will always pass async function replaceQuickActionsWithDummy() { - if (process.env.CI_DEPLOY_QUICK_ACTIONS_DUMMY === "true") { - uxLog(this, c.cyan("Replacing QuickActions content with Dummy content that will always pass...")); + if (process.env.CI_DEPLOY_QUICK_ACTIONS_DUMMY === 'true') { + uxLog("action", this, c.cyan('Replacing QuickActions content with Dummy content that will always pass...')); quickActionsBackUpFolder = await createTempDir(); - const patternQuickActions = process.cwd() + "/force-app/" + `**/quickActions/*__c.*.quickAction-meta.xml`; - const matchQuickActions = await glob(patternQuickActions, { cwd: process.cwd() }); + const patternQuickActions = process.cwd() + '/force-app/' + `**/quickActions/*__c.*.quickAction-meta.xml`; + const matchQuickActions = await glob(patternQuickActions, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); for (const quickActionFile of matchQuickActions) { - const tmpBackupFile = path.join(quickActionsBackUpFolder, path.resolve(quickActionFile).replace(path.resolve(process.cwd()), "")); + const tmpBackupFile = path.join( + quickActionsBackUpFolder, + path.resolve(quickActionFile).replace(path.resolve(process.cwd()), '') + ); await fs.ensureDir(path.dirname(tmpBackupFile)); await fs.copy(quickActionFile, tmpBackupFile); await fs.writeFile( @@ -754,91 +1083,101 @@ async function replaceQuickActionsWithDummy() { false LightningComponent 100 -`, +` ); - uxLog(this, c.grey("Backuped and replaced " + quickActionFile)); + uxLog("log", this, c.grey('Backuped and replaced ' + quickActionFile)); } } } // Restore original QuickActions async function restoreQuickActions() { - if (process.env.CI_DEPLOY_QUICK_ACTIONS_DUMMY === "true") { - const patternQuickActionsBackup = quickActionsBackUpFolder + "/force-app/" + `**/quickActions/*.quickAction-meta.xml`; + if (process.env.CI_DEPLOY_QUICK_ACTIONS_DUMMY === 'true') { + const patternQuickActionsBackup = + quickActionsBackUpFolder + '/force-app/' + `**/quickActions/*.quickAction-meta.xml`; const matchQuickActions = await glob(patternQuickActionsBackup, { cwd: process.cwd(), + ignore: GLOB_IGNORE_PATTERNS }); for (const quickActionFile of matchQuickActions) { - const prevFileName = path.resolve(quickActionFile).replace(path.resolve(quickActionsBackUpFolder), path.resolve(process.cwd())); + const prevFileName = path + .resolve(quickActionFile) + .replace(path.resolve(quickActionsBackUpFolder), path.resolve(process.cwd())); await fs.copy(quickActionFile, prevFileName); - uxLog(this, c.grey("Restored " + quickActionFile)); + uxLog("log", this, c.grey('Restored ' + quickActionFile)); } } } // Build target org package.xml manifest -export async function buildOrgManifest(targetOrgUsernameAlias, packageXmlOutputFile = null, conn = null) { +export async function buildOrgManifest( + targetOrgUsernameAlias, + packageXmlOutputFile: string | null = null, + conn: any | null = null +) { // Manage file name if (packageXmlOutputFile === null) { const tmpDir = await createTempDir(); - uxLog(this, c.cyan(`Generating full package.xml from target org ${targetOrgUsernameAlias}...`)); - packageXmlOutputFile = path.join(tmpDir, "packageTargetOrg.xml"); + uxLog("action", this, c.cyan(`Generating full package.xml from target org ${targetOrgUsernameAlias}...`)); + packageXmlOutputFile = path.join(tmpDir, 'packageTargetOrg.xml'); } const manifestName = path.basename(packageXmlOutputFile); const manifestDir = path.dirname(packageXmlOutputFile); // Get default org if not sent as argument (should not happen but better safe than sorry) - if (targetOrgUsernameAlias == null || targetOrgUsernameAlias == "") { + if (targetOrgUsernameAlias == null || targetOrgUsernameAlias == '') { const currentOrg = await MetadataUtils.getCurrentOrg(); if (currentOrg == null) { - throw new SfdxError("You should call buildOrgManifest while having a default org set !"); + throw new SfError('You should call buildOrgManifest while having a default org set !'); } targetOrgUsernameAlias = currentOrg.username; } if (isSfdxProject()) { // Use sfdx manifest build in current project await execCommand( - `sfdx force:source:manifest:create` + - ` --manifestname ${manifestName}` + - ` --outputdir ${path.resolve(manifestDir)}` + - ` --includepackages managed,unlocked` + - ` --fromorg ${targetOrgUsernameAlias}`, + `sf project generate manifest` + + ` --name ${manifestName}` + + ` --output-dir ${path.resolve(manifestDir)}` + + ` --include-packages managed,unlocked` + + ` --from-org ${targetOrgUsernameAlias}`, this, { fail: true, debug: process.env.DEBUG, output: true, - }, + } ); } else { const tmpDirSfdxProject = await createTempDir(); await createBlankSfdxProject(tmpDirSfdxProject); // Use sfdx manifest build in dummy project await execCommand( - `sfdx force:source:manifest:create` + - ` --manifestname ${manifestName}` + - ` --outputdir ${path.resolve(manifestDir)}` + - ` --includepackages managed,unlocked` + - ` --fromorg ${targetOrgUsernameAlias}`, + `sf project generate manifest` + + ` --name ${manifestName}` + + ` --output-dir ${path.resolve(manifestDir)}` + + ` --include-packages managed,unlocked` + + ` --from-org ${targetOrgUsernameAlias}`, this, { fail: true, - cwd: path.join(tmpDirSfdxProject, "sfdx-hardis-blank-project"), + cwd: path.join(tmpDirSfdxProject, 'sfdx-hardis-blank-project'), debug: process.env.DEBUG, output: true, - }, + } ); } const packageXmlFull = packageXmlOutputFile; if (!fs.existsSync(packageXmlFull)) { - throw new SfdxError( - c.red("[sfdx-hardis] Unable to generate package.xml. This is probably an auth issue or a Salesforce technical issue, please try again later"), + throw new SfError( + c.red( + '[sfdx-hardis] Unable to generate package.xml. This is probably an auth issue or a Salesforce technical issue, please try again later' + ) ); } - // Add Elements that are not returned by sfdx command + // Add Elements that are not returned by SF CLI command if (conn) { - uxLog(this, c.grey("Looking for package.xml elements that are not returned by manifest create command...")); - const mdTypes = [{ type: "ListView" }, { type: "CustomLabel" }]; - const mdList = await conn.metadata.list(mdTypes, CONSTANTS.API_VERSION); + uxLog("log", this, c.grey('Looking for package.xml elements that are not returned by manifest create command...')); + const mdTypes = [{ type: 'ListView' }, { type: 'CustomLabel' }]; + const mdList = await conn.metadata.list(mdTypes, getApiVersion()); const parsedPackageXml = await parseXmlFile(packageXmlFull); for (const element of mdList) { const matchTypes = parsedPackageXml.Package.types.filter((type) => type.name[0] === element.type); @@ -846,8 +1185,10 @@ export async function buildOrgManifest(targetOrgUsernameAlias, packageXmlOutputF // Add member in existing types const members = matchTypes[0].members || []; members.push(element.fullName); - matchTypes[0].members = members.sort(); - parsedPackageXml.Package.types = parsedPackageXml.Package.types.map((type) => (type.name[0] === matchTypes[0].name ? matchTypes[0] : type)); + matchTypes[0].members = members; + parsedPackageXml.Package.types = parsedPackageXml.Package.types.map((type) => + type.name[0] === matchTypes[0].name ? matchTypes[0] : type + ); } else { // Create new type const newType = { @@ -857,27 +1198,37 @@ export async function buildOrgManifest(targetOrgUsernameAlias, packageXmlOutputF parsedPackageXml.Package.types.push(newType); } } + // Sort members only for the types that were potentially modified + for (const mdType of mdTypes) { // mdTypes is [{ type: 'ListView' }, { type: 'CustomLabel' }] + const typeName = mdType.type; + const matchedType = parsedPackageXml.Package.types.find(t => t.name[0] === typeName); + if (matchedType && matchedType.members && Array.isArray(matchedType.members)) { + matchedType.members = sortCrossPlatform(matchedType.members); + } + } // Complete with missing WaveDataflow Ids build from WaveRecipe Ids - const waveRecipeTypeList = parsedPackageXml.Package.types.filter((type) => type.name[0] === "WaveRecipe"); + const waveRecipeTypeList = parsedPackageXml.Package.types.filter((type) => type.name[0] === 'WaveRecipe'); if (waveRecipeTypeList.length === 1) { const waveRecipeType = waveRecipeTypeList[0]; const waveRecipeTypeMembers = waveRecipeType.members || []; - const waveDataFlowTypeList = parsedPackageXml.Package.types.filter((type) => type.name[0] === "WaveDataflow"); - let waveDataFlowType = { name: ["WaveDataflow"], members: [] }; + const waveDataFlowTypeList = parsedPackageXml.Package.types.filter((type) => type.name[0] === 'WaveDataflow'); + let waveDataFlowType: any = { name: ['WaveDataflow'], members: [] }; if (waveDataFlowTypeList.length === 1) { waveDataFlowType = waveDataFlowTypeList[0]; } for (const recipeId of waveRecipeTypeMembers) { if (!waveDataFlowType.members.includes(recipeId)) { waveDataFlowType.members.push(recipeId); - uxLog(this, c.grey(`- Added WaveDataflow ${recipeId} to match WaveRecipe ${recipeId}`)); + uxLog("log", this, c.grey(`- Added WaveDataflow ${recipeId} to match WaveRecipe ${recipeId}`)); } } - waveDataFlowType.members.sort(); + sortCrossPlatform(waveDataFlowType.members); // Update type if (waveDataFlowTypeList.length === 1) { - parsedPackageXml.Package.types = parsedPackageXml.Package.types.map((type) => (type.name[0] === "WaveDataflow" ? waveDataFlowType : type)); + parsedPackageXml.Package.types = parsedPackageXml.Package.types.map((type) => + type.name[0] === 'WaveDataflow' ? waveDataFlowType : type + ); } // Add type else { @@ -886,36 +1237,112 @@ export async function buildOrgManifest(targetOrgUsernameAlias, packageXmlOutputF } // Delete stuff we don't want - parsedPackageXml.Package.types = parsedPackageXml.Package.types.filter((type) => !["CustomLabels"].includes(type.name[0])); + const filteredTypes = [ + 'CustomLabels', + 'WorkflowFlowAutomation' // Added as a workaround for https://github.com/forcedotcom/cli/issues/3324 + ]; + const typesToRemove = parsedPackageXml.Package.types.filter(type => filteredTypes.includes(type.name[0])); + + if (typesToRemove.length > 0) { + uxLog("log", this, c.grey(`Force filtering out metadata types from org-generated package.xml: ${typesToRemove.map(type => type.name[0]).join(', ')}`)); + parsedPackageXml.Package.types = parsedPackageXml.Package.types.filter( + (type) => !filteredTypes.includes(type.name[0]) + ); + } await writeXmlFile(packageXmlFull, parsedPackageXml); } + const nbRetrievedItems = await countPackageXmlItems(packageXmlFull); + uxLog("action", this, c.cyan(`Full org package.xml contains ${c.bold(nbRetrievedItems)} items`)) return packageXmlFull; } -export async function executePrePostCommands(property: "commandsPreDeploy" | "commandsPostDeploy", success = true) { - const branchConfig = await getConfig("branch"); - const commands = branchConfig[property] || []; +/** + * Creates an empty package.xml file in a temporary directory and returns its path + * Useful for deployment scenarios requiring an empty package.xml (like destructive changes) + * @returns {Promise} Path to the created empty package.xml file + */ +export async function createEmptyPackageXml(): Promise { + // Create temporary directory for the empty package.xml + const tmpDir = await createTempDir(); + const emptyPackageXmlPath = path.join(tmpDir, 'empty-package.xml'); + + // Write empty package.xml with API version from constants + await fs.writeFile( + emptyPackageXmlPath, + ` + + ${getApiVersion()} +`, + 'utf8' + ); + + uxLog("log", this, c.grey(`Created empty package.xml at ${emptyPackageXmlPath}`)); + return emptyPackageXmlPath; +} + +export async function executePrePostCommands(property: 'commandsPreDeploy' | 'commandsPostDeploy', options: { success: boolean, checkOnly: boolean, conn: Connection, extraCommands?: any[] }) { + const branchConfig = await getConfig('branch'); + const commands = [...(branchConfig[property] || []), ...(options.extraCommands || [])]; + if (commands.length === 0) { - uxLog(this, c.grey(`No ${property} found to run`)); + uxLog("log", this, c.grey(`No ${property} found to run`)); return; } - uxLog(this, c.cyan(`Running ${property} found in .sfdx-hardis.yml configuration...`)); + uxLog("action", this, c.cyan(`Processing ${property} found in .sfdx-hardis.yml configuration...`)); for (const cmd of commands) { - if (success === false && cmd.skipIfError === true) { - uxLog(this, c.yellow(`Skipping skipIfError=true command [${cmd.id}]: ${cmd.label}`)); + // If if skipIfError is true and deployment failed + if (options.success === false && cmd.skipIfError === true) { + uxLog("warning", this, c.yellow(`Skipping skipIfError=true command [${cmd.id}]: ${cmd.label}`)); + continue; + } + // Skip if we are in another context than the requested one + const cmdContext = cmd.context || "all"; + if (cmdContext === "check-deployment-only" && options.checkOnly === false) { + uxLog("log", this, c.grey(`Skipping check-deployment-only command as we are in process deployment mode [${cmd.id}]: ${cmd.label}`)); + continue; + } + if (cmdContext === "process-deployment-only" && options.checkOnly === true) { + uxLog("log", this, c.grey(`Skipping process-deployment-only command as we are in check deployment mode [${cmd.id}]: ${cmd.label}`)); + continue; + } + const runOnlyOnceByOrg = cmd.runOnlyOnceByOrg || false; + if (runOnlyOnceByOrg) { + await checkSfdxHardisTraceAvailable(options.conn); + const commandTraceQuery = `SELECT Id,CreatedDate FROM SfdxHardisTrace__c WHERE Type__c='${property}' AND Key__c='${cmd.id}' LIMIT 1`; + const commandTraceRes = await soqlQuery(commandTraceQuery, options.conn); + if (commandTraceRes?.records?.length > 0) { + uxLog("log", this, c.grey(`Skipping command [${cmd.id}]: ${cmd.label} because it has been defined with runOnlyOnceByOrg and has already been run on ${commandTraceRes.records[0].CreatedDate}`)); + continue; + } + } + // Run command + uxLog("action", this, c.cyan(`Running [${cmd.id}]: ${cmd.label}`)); + const commandRes = await execCommand(cmd.command, this, { fail: false, output: true }); + if (commandRes.status === 0 && runOnlyOnceByOrg) { + const hardisTraceRecord = { + Name: property + "--" + cmd.id, + Type__c: property, + Key__c: cmd.id + } + const insertRes = await options.conn.insert("SfdxHardisTrace__c", [hardisTraceRecord]); + if (insertRes[0].success) { + uxLog("success", this, c.green(`Stored SfdxHardisTrace__c entry ${insertRes[0].id} with command [${cmd.id}] so it is not run again in the future (runOnlyOnceByOrg: true)`)); + } + else { + uxLog("error", this, c.red(`Error storing SfdxHardisTrace__c entry :` + JSON.stringify(insertRes, null, 2))); + } } - uxLog(this, c.cyan(`Running [${cmd.id}]: ${cmd.label}`)); - await execCommand(cmd.command, this, { fail: false, output: true }); } } export async function extractOrgCoverageFromLog(stdout) { - let orgCoverage = null; + let orgCoverage: number | null = null; + // Get from output text const fromTest = /Org Wide Coverage *(.*)/.exec(stdout); if (fromTest && fromTest[1]) { - orgCoverage = parseFloat(fromTest[1].replace("%", "")); + orgCoverage = parseFloat(fromTest[1].replace('%', '')); } /* jscpd:ignore-start */ try { @@ -923,44 +1350,87 @@ export async function extractOrgCoverageFromLog(stdout) { return orgCoverage.toFixed(2); } } catch (e) { - uxLog(this, c.yellow(`Warning: unable to convert ${orgCoverage} into string`)); - uxLog(this, c.gray(e.message)); + uxLog("warning", this, c.yellow(`Warning: unable to convert ${orgCoverage} into string`)); + uxLog("error", this, c.grey((e as Error).message)); } /* jscpd:ignore-end */ - // Get from output file + // Get from output file whose name has been found in text output const writtenToPath = /written to (.*coverage)/.exec(stdout); if (writtenToPath && writtenToPath[1]) { const jsonFile = path - .resolve(process.cwd() + path.sep + writtenToPath[1].replace(/\\/g, "/") + path.sep + "coverage-summary.json") - .replace(/\\/g, "/"); - if (fs.existsSync(jsonFile)) { - const coverageInfo = JSON.parse(fs.readFileSync(jsonFile, "utf-8")); - orgCoverage = coverageInfo?.total?.lines?.pct ?? null; - try { - if (orgCoverage && orgCoverage.toFixed(2) > 0.0) { - return orgCoverage.toFixed(2); - } - } catch (e) { - uxLog(this, c.yellow(`Warning: unable to convert ${orgCoverage} into string`)); - uxLog(this, c.gray(e.message)); + .resolve(process.cwd() + path.sep + writtenToPath[1].replace(/\\/g, '/') + path.sep + 'coverage-summary.json') + .replace(/\\/g, '/'); + const result = getCoverageFromJsonFile(jsonFile); + if (result) { + return result; + } + } + + const reportDir = await getReportDirectory(); + const coverageFilesToTest = [ + path.join(reportDir, "coverage", "coverage-summary.json"), + path.join(reportDir, "coverage", "coverage", "coverage-summary.json"), + path.join(process.cwd(), "coverage", "coverage", "coverage-summary.json") + ] + + // Get from output file (1) + for (const coverageFile of coverageFilesToTest) { + const resultFromDefaultFile = getCoverageFromJsonFile(coverageFile); + if (resultFromDefaultFile) { + return resultFromDefaultFile; + } + } + + // Get from JSON Mode (might be best to use output file) + const jsonLog = findJsonInString(stdout); + if (jsonLog && jsonLog?.result?.details?.runTestResult?.codeCoverage?.length > 0) { + let numLocationsNb = 0; + let coveredLocationsNb = 0; + for (const coverageRes of jsonLog.result.details.runTestResult.codeCoverage) { + numLocationsNb = numLocationsNb + coverageRes.numLocations; + if (coverageRes?.numLocationsNotCovered > 0) { + coveredLocationsNb = coveredLocationsNb + (coverageRes.numLocations - coverageRes.numLocationsNotCovered); } } + orgCoverage = (coveredLocationsNb / numLocationsNb) * 100; + uxLog("warning", this, c.yellow("Code coverage has been calculated manually, if the number seems strange to you, you better use option \"--coverage-formatters json-summary\"")); + return orgCoverage.toFixed(2); } uxLog( + "warning", this, c.italic( - c.grey("Unable to get org coverage from results. Maybe try to add --coverageformatters json-summary to your call to force:source:deploy ?"), - ), + c.grey( + 'Unable to get org coverage from results. Maybe try to add --coverage-formatters json-summary to your call to sf project deploy start ?' + ) + ) ); return null; } +function getCoverageFromJsonFile(jsonFile) { + if (fs.existsSync(jsonFile)) { + const coverageInfo = JSON.parse(fs.readFileSync(jsonFile, 'utf-8')); + const orgCoverage = coverageInfo?.total?.lines?.pct ?? null; + try { + if (orgCoverage && Number(orgCoverage.toFixed(2)) > 0.0) { + return orgCoverage.toFixed(2); + } + } catch (e) { + uxLog("warning", this, c.yellow(`Warning: unable to convert ${orgCoverage} into string`)); + uxLog("error", this, c.grey((e as Error).message)); + } + } + return null; +} + // Check if min org coverage is reached export async function checkDeploymentOrgCoverage(orgCoverage: number, options: any) { // RunSpecifiedTests will not return org wide coverage, using dynamic text - const codeCoverageText = !options.testlevel || options.testlevel !== "RunSpecifiedTests" ? "code coverage (org wide)" : "code coverage"; + const codeCoverageText = + !options.testlevel || options.testlevel !== 'RunSpecifiedTests' ? 'code coverage (org wide)' : 'code coverage'; - const config = await getConfig("branch"); + const config = await getConfig('branch'); // Parse and validate minimum coverage setting, defaults to 75% const minCoverageConf = @@ -968,67 +1438,112 @@ export async function checkDeploymentOrgCoverage(orgCoverage: number, options: a process.env.APEX_TESTS_MIN_COVERAGE || config.apexTestsMinCoverageOrgWide || config.apexTestsMinCoverage || - "75.00"; + '75.00'; const minCoverage = parseFloat(minCoverageConf); if (isNaN(minCoverage)) { - throw new SfdxError(`[sfdx-hardis] Invalid minimum coverage configuration: ${minCoverageConf}`); + killBoringExitHandlers(); + throw new SfError(`[sfdx-hardis] Invalid minimum coverage configuration: ${minCoverageConf}`); } if (minCoverage < 75.0) { - throw new SfdxError(`[sfdx-hardis] Good try, hacker, but minimum ${codeCoverageText} can't be less than 75% :)`); + killBoringExitHandlers(); + throw new SfError(`[sfdx-hardis] Good try, hacker, but minimum ${codeCoverageText} can't be less than 75% :)`); } if (orgCoverage < minCoverage) { if (config?.testCoverageNotBlocking === true) { - await updatePullRequestResultCoverage("invalid_ignored", orgCoverage, minCoverage, options); + await updatePullRequestResultCoverage('invalid_ignored', orgCoverage, minCoverage, options); } else { - await updatePullRequestResultCoverage("invalid", orgCoverage, minCoverage, options); - throw new SfdxError(`[sfdx-hardis][apextest] Test run ${codeCoverageText} ${orgCoverage}% should be greater than ${minCoverage}%`); + await updatePullRequestResultCoverage('invalid', orgCoverage, minCoverage, options); + killBoringExitHandlers(); + throw new SfError( + `[sfdx-hardis][apextest] Test run ${codeCoverageText} ${orgCoverage}% should be greater than ${minCoverage}%` + ); } } else { - await updatePullRequestResultCoverage("valid", orgCoverage, minCoverage, options); - uxLog(this, c.cyan(`[apextest] Test run ${codeCoverageText} ${c.bold(c.green(orgCoverage))}% is greater than ${c.bold(minCoverage)}%`)); + await updatePullRequestResultCoverage('valid', orgCoverage, minCoverage, options); + uxLog( + "action", + this, + c.cyan( + `[apextest] Test run ${codeCoverageText} ${c.bold(c.green(orgCoverage))}% is greater than ${c.bold( + minCoverage + )}%` + ) + ); } } async function checkDeploymentErrors(e, options, commandThis = null) { - const { tips, errLog } = await analyzeDeployErrorLogs(e.stdout + e.stderr, true, options); - uxLog(commandThis, c.red(c.bold("Sadly there has been Metadata deployment error(s)..."))); - uxLog(this, c.red("\n" + errLog)); - uxLog( - commandThis, - c.yellow(c.bold(`You may${tips.length > 0 ? " also" : ""} copy-paste errors on google to find how to solve the metadata deployment issues :)`)), - ); - await displayDeploymentLink(e.stdout + e.stderr, options); + const { errLog } = await analyzeDeployErrorLogs((e as any).stdout + (e as any).stderr, true, options); + uxLog("error", commandThis, c.red(c.bold('Sadly there has been Metadata deployment error(s)...'))); + uxLog("error", this, c.red('\n' + errLog)); + await displayDeploymentLink((e as any).stdout + (e as any).stderr, options); // Post pull requests comments if necessary if (options.check) { await GitProvider.managePostPullRequestComment(); } - throw new SfdxError("Metadata deployment failure. Check messages above"); + killBoringExitHandlers(); + throw new SfError('Metadata deployment failure. Check messages above'); } // This data will be caught later to build a pull request message -async function updatePullRequestResultCoverage(coverageStatus: string, orgCoverage: number, orgCoverageTarget: number, options: any) { +async function updatePullRequestResultCoverage( + coverageStatus: string, + orgCoverage: number, + orgCoverageTarget: number, + options: any +) { const existingPrData = globalThis.pullRequestData || {}; - const prDataCodeCoverage: any = { - messageKey: existingPrData.messageKey ?? "deployment", - title: (existingPrData.title ?? options.check) ? "✅ Deployment check success" : "✅ Deployment success", - codeCoverageMarkdownBody: "Code coverage is valid", - deployStatus: existingPrData ?? coverageStatus, + const prDataCodeCoverage: Partial = { + messageKey: existingPrData.messageKey ?? 'deployment', + title: existingPrData.title ?? options.check ? '✅ Deployment check success' : '✅ Deployment success', + codeCoverageMarkdownBody: 'Code coverage is valid', + deployStatus: (coverageStatus === 'valid' || coverageStatus === 'invalid' || coverageStatus === 'unknown') + ? coverageStatus + : existingPrData.deployStatus ?? 'unknown', }; // Code coverage failure - if (coverageStatus === "invalid") { - prDataCodeCoverage.title = existingPrData.deployStatus === "valid" ? "❌ Deployment failed: Code coverage error" : prDataCodeCoverage.title; + if (coverageStatus === 'invalid') { + prDataCodeCoverage.title = + existingPrData.deployStatus === 'valid' ? '❌ Deployment failed: Code coverage error' : prDataCodeCoverage.title; prDataCodeCoverage.codeCoverageMarkdownBody = deployCodeCoverageToMarkdown(orgCoverage, orgCoverageTarget); - prDataCodeCoverage.status = "invalid"; + prDataCodeCoverage.status = 'invalid'; } // Code coverage failure but ignored thanks to config testCoverageNotBlocking - else if (coverageStatus === "invalid_ignored") { + else if (coverageStatus === 'invalid_ignored') { prDataCodeCoverage.title = - existingPrData.deployStatus === "valid" ? "✅⚠️ Deployment success with ignored Code coverage error" : prDataCodeCoverage.title; + existingPrData.deployStatus === 'valid' + ? '✅⚠️ Deployment success with ignored Code coverage error' + : prDataCodeCoverage.title; prDataCodeCoverage.codeCoverageMarkdownBody = deployCodeCoverageToMarkdown(orgCoverage, orgCoverageTarget); } else { prDataCodeCoverage.codeCoverageMarkdownBody = deployCodeCoverageToMarkdown(orgCoverage, orgCoverageTarget); } globalThis.pullRequestData = Object.assign(globalThis.pullRequestData || {}, prDataCodeCoverage); } + +export async function generateApexCoverageOutputFile(): Promise { + try { + const reportDir = await getReportDirectory(); + const coverageFileName = path.join(reportDir, "apex-coverage-results.json"); + let coverageObject: any = null; + // Output from sf project deploy start or similar: get locally generated file + if (fs.existsSync(path.join(reportDir, "coverage", "coverage.json"))) { + coverageObject = JSON.parse(fs.readFileSync(path.join(reportDir, "coverage", "coverage.json"), 'utf8')); + } + // Output from apex run tests: get locally generated file + else if (fs.existsSync(path.join(reportDir, "test-result-codecoverage.json"))) { + coverageObject = JSON.parse(fs.readFileSync(path.join(reportDir, "test-result-codecoverage.json"), 'utf8')); + } + if (coverageObject !== null) { + await fs.writeFile(coverageFileName, JSON.stringify(coverageObject, null, 2), 'utf8'); + uxLog("log", this, c.grey(`Written Apex coverage results in file ${coverageFileName}`)); + if (WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.sendReportFileMessage(coverageFileName, "Coverage Results JSON", "report") + } + } + } catch (e: any) { + uxLog("error", this, c.red(`Error while generating Apex coverage output file: ${e.message}`)); + } +} \ No newline at end of file diff --git a/src/common/utils/emailUtils.ts b/src/common/utils/emailUtils.ts index 4b5a3d2be..23ec9c0c4 100644 --- a/src/common/utils/emailUtils.ts +++ b/src/common/utils/emailUtils.ts @@ -1,13 +1,13 @@ -import { Connection } from "jsforce"; -import { getNested, uxLog } from "."; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; +import { Connection } from '@salesforce/core'; +import { getNested, uxLog } from './index.js'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; export async function sendEmail(emailMessage: EmailMessage) { const conn: Connection = globalThis.jsForceConn || null; if (!conn) { - uxLog(this, c.grey("globalThis.jsForceConn is not set, can not send email")); + uxLog("log", this, c.grey('globalThis.jsForceConn is not set, can not send email')); return; } // Init message @@ -22,28 +22,29 @@ export async function sendEmail(emailMessage: EmailMessage) { utf8 - ${emailMessage.senderDisplayName || "SFDX-HARDIS Notifications"} + ${emailMessage.senderDisplayName || 'SFDX-HARDIS Notifications' + } ${emailMessage.subject} `; // Plain text Body if (emailMessage.body_text) { - soapBody += ` ${sanitizeForXml(emailMessage.body_text || "")}\n`; + soapBody += ` ${sanitizeForXml(emailMessage.body_text || '')}\n`; } else if (emailMessage.body_html) { - soapBody += ` ${sanitizeForXml(emailMessage.body_html || "")}\n`; + soapBody += ` ${sanitizeForXml(emailMessage.body_html || '')}\n`; } // Addresses - if (emailMessage?.to?.length > 0) { - soapBody += buildArrayOfStrings(emailMessage.to, " ", ""); + if (emailMessage?.to?.length && emailMessage?.to?.length > 0) { + soapBody += buildArrayOfStrings(emailMessage.to, ' ', ''); } - if (emailMessage?.cc?.length > 0) { - soapBody += buildArrayOfStrings(emailMessage.cc, " ", ""); + if (emailMessage?.cc?.length && emailMessage?.cc?.length > 0) { + soapBody += buildArrayOfStrings(emailMessage.cc, ' ', ''); } - if (emailMessage?.cci?.length > 0) { - soapBody += buildArrayOfStrings(emailMessage.cci, " ", ""); + if (emailMessage?.cci?.length && emailMessage?.cci?.length > 0) { + soapBody += buildArrayOfStrings(emailMessage.cci, ' ', ''); } // Attachments - if (emailMessage?.attachments?.length > 0) { + if (emailMessage?.attachments?.length && emailMessage?.attachments?.length > 0) { let totalSize = 0; for (const attachment of emailMessage?.attachments || []) { if (fs.existsSync(attachment)) { @@ -51,17 +52,17 @@ export async function sendEmail(emailMessage: EmailMessage) { totalSize += fileSize; if (totalSize > 8e7) { // 10MB - uxLog(this, `[EmailUtils] Skipped attachment ${attachment} to avoid the reach size limit`); + uxLog("other", this, `[EmailUtils] Skipped attachment ${attachment} to avoid the reach size limit`); continue; } const fileName = path.basename(attachment); - const fileBody = fs.readFileSync(attachment).toString("base64"); + const fileBody = fs.readFileSync(attachment).toString('base64'); soapBody += ` \n`; soapBody += ` ${fileName}\n`; soapBody += ` ${fileBody}\n`; soapBody += ` \n`; } else { - uxLog(this, `[EmailUtils] Skipped not found attachment ${attachment}`); + uxLog("other", this, `[EmailUtils] Skipped not found attachment ${attachment}`); } } } @@ -73,26 +74,32 @@ export async function sendEmail(emailMessage: EmailMessage) { `; const soapResponse = await conn.request( { - method: "POST", + method: 'POST', url: `${conn.instanceUrl}/services/Soap/c/${conn.version}`, body: soapBody, headers: { - "Content-Type": "text/xml;charset=utf-8", - Accept: "text/xml;charset=utf-8", + 'Content-Type': 'text/xml;charset=utf-8', + Accept: 'text/xml;charset=utf-8', SOAPAction: '""', }, }, - { responseType: "text/xml" }, + { responseType: 'text/xml' } ); - const resultTag = getNested(soapResponse, ["soapenv:Envelope", "soapenv:Body", "sendEmailResponse", "result", "success"]); - if (resultTag === "true") { + const resultTag = getNested(soapResponse, [ + 'soapenv:Envelope', + 'soapenv:Body', + 'sendEmailResponse', + 'result', + 'success', + ]); + if (resultTag === 'true') { return { success: true, detail: soapResponse }; } return { success: false, detail: soapResponse }; } function buildArrayOfStrings(elements: string[], openingTag: string, closingTag: string): string { - let result = ""; + let result = ''; for (const element of elements) { result += `${openingTag}${element}${closingTag}\n`; } @@ -100,7 +107,12 @@ function buildArrayOfStrings(elements: string[], openingTag: string, closingTag: } function sanitizeForXml(value) { - return String(value).replace(/&/g, "&").replace(//g, ">").replace(/"/g, """).replace(/'/g, "'"); + return String(value) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); } export interface EmailMessage { diff --git a/src/common/utils/fileDownloader.ts b/src/common/utils/fileDownloader.ts new file mode 100644 index 000000000..87bb3244a --- /dev/null +++ b/src/common/utils/fileDownloader.ts @@ -0,0 +1,105 @@ +import { Connection, SfError } from "@salesforce/core"; +import fs from 'fs-extra'; +import ora from "ora"; +import * as path from "path"; +import { createTempDir } from "./index.js"; +import makeFetchHappen, { FetchOptions } from 'make-fetch-happen'; + +export class FileDownloader { + + conn: Connection; + downloadUrl: string; + outputFile: string | null = null; + fetchOptions: any = {}; + label: string; + + constructor(downloadUrl: string, options: { + conn?: any, + outputFile?: string, + fetchOptions?: FetchOptions + label?: 'url' | 'file' | 'both' + }) { + this.conn = options.conn || null; + this.downloadUrl = downloadUrl; + this.outputFile = options.outputFile || null; + this.label = options?.label === 'file' + ? (path.relative(process.cwd(), this.outputFile || "")) || this.downloadUrl + : options?.label === 'both' + ? `${this.outputFile} from ${this.downloadUrl}` + : this.downloadUrl; + // Build fetch options for HTTP calls to retrieve document files + this.fetchOptions = options.fetchOptions || { + method: 'GET', + headers: { + Authorization: 'Bearer ' + this.conn.accessToken, + 'Content-Type': 'blob', + // "X-PrettyPrint": '1' + }, + retry: { + retries: 20, + factor: 3, + randomize: true, + }, + }; + } + + public async download(): Promise<{ success: boolean, outputFile: string, error?: any }> { + const spinnerCustom = ora({ + text: `Downloading ${this.label}...`, + spinner: 'moon', + }).start(); + + if (this.outputFile == null) { + const tempDir = await createTempDir(); + this.outputFile = path.join(tempDir, Math.random().toString(36).substring(7)); + } + + try { + this.fetchOptions.onRetry = (cause: unknown) => { + spinnerCustom.text = `Retrying ${this.label} (${cause})...`; + }; + + const fetchRes = await makeFetchHappen(this.downloadUrl, this.fetchOptions); + if (!fetchRes.ok) { + throw new SfError(`Fetch error: ${JSON.stringify(fetchRes.body)}`); + } + + const stream = fs.createWriteStream(this.outputFile); + const totalSize = Number(fetchRes.headers.get('content-length')); + + let downloadedSize = 0; + + // Set up piping first + fetchRes.body.pipe(stream); + + fetchRes.body.on('data', (chunk) => { + downloadedSize += chunk.length; + const percentComplete = totalSize ? (downloadedSize / totalSize * 100).toFixed(2) : null; + spinnerCustom.text = totalSize + ? `Downloaded ${downloadedSize} bytes of ${totalSize} bytes (${percentComplete}%) of ${this.label}` + : `Downloaded ${downloadedSize} bytes of ${this.label}`; + }); + + // Handle end of download, or error + await new Promise((resolve, reject) => { + fetchRes.body.on("error", () => reject()); + stream.on("error", () => reject()); + stream.on("finish", () => resolve()); + }); + + const fileExists = await fs.exists(this.outputFile); + if (!fileExists) { + throw new SfError(`Download error: Download stream ok but no created file at ${this.outputFile}`); + } + + spinnerCustom.succeed(`Downloaded ${this.label}`); + stream.destroy(); + + } catch (err: any) { + spinnerCustom.fail(`Error while downloading ${this.downloadUrl}: ${err.message}`); + return { success: false, outputFile: this.outputFile, error: err }; + } + + return { success: true, outputFile: this.outputFile }; + } +} \ No newline at end of file diff --git a/src/common/utils/filesUtils.ts b/src/common/utils/filesUtils.ts index cc10ec291..4fb190e44 100644 --- a/src/common/utils/filesUtils.ts +++ b/src/common/utils/filesUtils.ts @@ -1,24 +1,27 @@ // External Libraries and Node.js Modules -import * as fs from "fs-extra"; -import * as path from "path"; -import * as c from "chalk"; -import * as fetch from "@adobe/node-fetch-retry"; -import * as split from "split"; -import { PromisePool } from "@supercharge/promise-pool"; +import fs from 'fs-extra'; +import * as path from 'path'; +import c from 'chalk'; +import open from 'open'; +import * as split from 'split'; +import { PromisePool } from '@supercharge/promise-pool'; +import crypto from 'crypto'; // Salesforce Specific and Other Specific Libraries -import { Connection, SfdxError } from "@salesforce/core"; -import * as Papa from "papaparse"; -import * as ExcelJS from "exceljs"; +import { Connection, SfError } from '@salesforce/core'; +import Papa from 'papaparse'; +import ExcelJS from 'exceljs'; // Project Specific Utilities -import { getCurrentGitBranch, isCI, uxLog } from "."; -import { bulkQuery, soqlQuery } from "./apiUtils"; -import { prompts } from "./prompts"; -import { CONSTANTS, getReportDirectory } from "../../config"; -import { WebSocketClient } from "../websocketClient"; +import { getCurrentGitBranch, isCI, isGitRepo, uxLog } from './index.js'; +import { bulkQuery, soqlQuery, bulkQueryByChunks } from './apiUtils.js'; +import { prompts } from './prompts.js'; +import { getApiVersion, getReportDirectory } from '../../config/index.js'; +import { WebSocketClient } from '../websocketClient.js'; +import { FileDownloader } from './fileDownloader.js'; +import { ApiLimitsManager } from './limitUtils.js'; -export const filesFolderRoot = path.join(".", "scripts", "files"); +export const filesFolderRoot = path.join('.', 'scripts', 'files'); export class FilesExporter { private filesPath: string; @@ -26,11 +29,11 @@ export class FilesExporter { private pollTimeout: number; private recordsChunkSize: number; private startChunkNumber: number; + private parentRecordsChunkSize: number; private commandThis: any; - private fetchOptions: any; private dtl: any = null; // export config - private exportedFilesFolder: string; + private exportedFilesFolder: string = ''; private recordsChunk: any[] = []; private chunksNumber = 1; @@ -40,8 +43,12 @@ export class FilesExporter { private bulkApiRecordsEnded = false; private recordChunksNumber = 0; + private logFile: string; + private hasExistingFiles: boolean; + private resumeExport: boolean; - private totalSoqlRequests = 0; + private totalRestApiCalls = 0; + private totalBulkApiCalls = 0; private totalParentRecords = 0; private parentRecordsWithFiles = 0; private recordsIgnored = 0; @@ -49,32 +56,34 @@ export class FilesExporter { private filesErrors = 0; private filesIgnoredType = 0; private filesIgnoredExisting = 0; - private apiUsedBefore = null; - private apiLimit = null; + private filesIgnoredSize = 0; + private filesValidationErrors = 0; + private filesValidated = 0; // Count of files that went through validation (downloaded or existing) + + // Optimized API Limits Management System + private apiLimitsManager: ApiLimitsManager; constructor( filesPath: string, conn: Connection, - options: { pollTimeout?: number; recordsChunkSize?: number; exportConfig?: any; startChunkNumber?: number }, - commandThis: any, + options: { pollTimeout?: number; recordsChunkSize?: number; exportConfig?: any; startChunkNumber?: number; resumeExport?: boolean }, + commandThis: any ) { this.filesPath = filesPath; this.conn = conn; - this.pollTimeout = options?.pollTimeout || 300000; + this.pollTimeout = options?.pollTimeout || 600000; this.recordsChunkSize = options?.recordsChunkSize || 1000; + this.parentRecordsChunkSize = 100000; this.startChunkNumber = options?.startChunkNumber || 0; + this.resumeExport = options?.resumeExport || false; + this.hasExistingFiles = fs.existsSync(path.join(this.filesPath, 'export')); this.commandThis = commandThis; if (options.exportConfig) { this.dtl = options.exportConfig; } - // Build fetch options for HTTP calls to retrieve document files - this.fetchOptions = { - method: "GET", - headers: { - Authorization: "Bearer " + this.conn.accessToken, - "Content-Type": "blob", - }, - }; + + // Initialize the optimized API limits manager + this.apiLimitsManager = new ApiLimitsManager(conn, commandThis); } async processExport() { @@ -82,68 +91,254 @@ export class FilesExporter { if (this.dtl === null) { this.dtl = await getFilesWorkspaceDetail(this.filesPath); } - uxLog(this.commandThis, c.cyan(`Exporting files from ${c.green(this.dtl.full_label)} ...`)); - uxLog(this.commandThis, c.italic(c.grey(this.dtl.description))); + uxLog("action", this.commandThis, c.cyan(`Initializing files export using workspace ${c.green(this.dtl.full_label)} ...`)); + uxLog("log", this.commandThis, c.italic(c.grey(this.dtl.description))); + // Make sure export folder for files is existing - this.exportedFilesFolder = path.join(this.filesPath, "export"); + this.exportedFilesFolder = path.join(this.filesPath, 'export'); await fs.ensureDir(this.exportedFilesFolder); + // Handle resume/restart mode + if (!this.resumeExport) { + if (this.hasExistingFiles) { + // Restart mode: clear the output folder + uxLog("action", this.commandThis, c.yellow(`Restart mode: clearing output folder ${this.exportedFilesFolder}`)); + await fs.emptyDir(this.exportedFilesFolder); + } + } else { + uxLog("action", this.commandThis, c.cyan(`Resume mode: existing files will be validated and skipped if valid`)); + } + await this.calculateApiConsumption(); - this.startQueue(); - await this.processParentRecords(); + + const reportDir = await getReportDirectory(); + const reportExportDir = path.join(reportDir, 'files-export-log'); + const now = new Date(); + const dateStr = now.toISOString().replace(/T/, '_').replace(/:/g, '-').replace(/\..+/, ''); + this.logFile = path.join(reportExportDir, `files-export-log-${this.dtl.name}-${dateStr}.csv`); + + // Initialize CSV log file with headers + await this.initializeCsvLog(); + + // Phase 1: Calculate total files count for accurate progress tracking + uxLog("action", this.commandThis, c.cyan("Estimating total files to download...")); + const totalFilesCount = await this.calculateTotalFilesCount(); + uxLog("log", this.commandThis, c.grey(`Estimated ${totalFilesCount} files to download`)); + + // Phase 2: Process downloads with accurate progress tracking + await this.processDownloadsWithProgress(totalFilesCount); + + const result = await this.buildResult(); + return result; + } + + // Phase 1: Calculate total files count using efficient COUNT() queries + private async calculateTotalFilesCount(): Promise { + let totalFiles = 0; + + // Get parent records count to estimate batching + const countSoqlQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT COUNT() FROM'); + await this.waitIfApiLimitApproached('REST'); + this.totalRestApiCalls++; + const countSoqlQueryRes = await soqlQuery(countSoqlQuery, this.conn); + const totalParentRecords = countSoqlQueryRes.totalSize; + + // Count Attachments - use COUNT() query with IN clause batching for memory efficiency + const attachmentBatchSize = 200; + + // Estimate Attachments count by sampling + const sampleSize = Math.min(attachmentBatchSize, totalParentRecords); + if (sampleSize > 0) { + // Get sample of parent IDs + const sampleQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT Id FROM') + ` LIMIT ${sampleSize}`; + await this.waitIfApiLimitApproached('REST'); + this.totalRestApiCalls++; + const sampleParents = await soqlQuery(sampleQuery, this.conn); + + if (sampleParents.records.length > 0) { + const sampleParentIds = sampleParents.records.map((record: any) => `'${record.Id}'`).join(','); + const attachmentCountQuery = `SELECT COUNT() FROM Attachment WHERE ParentId IN (${sampleParentIds})`; + await this.waitIfApiLimitApproached('REST'); + this.totalRestApiCalls++; + const attachmentCountRes = await soqlQuery(attachmentCountQuery, this.conn); + + // Extrapolate from sample + const avgAttachmentsPerRecord = attachmentCountRes.totalSize / sampleParents.records.length; + totalFiles += Math.round(avgAttachmentsPerRecord * totalParentRecords); + } + } + + // Count ContentVersions - use COUNT() query with sampling for memory efficiency + if (sampleSize > 0) { + const sampleQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT Id FROM') + ` LIMIT ${sampleSize}`; + const sampleParents = await soqlQuery(sampleQuery, this.conn); + + if (sampleParents.records.length > 0) { + const sampleParentIds = sampleParents.records.map((record: any) => `'${record.Id}'`).join(','); + + // Count ContentDocumentLinks for sample + const linkCountQuery = `SELECT COUNT() FROM ContentDocumentLink WHERE LinkedEntityId IN (${sampleParentIds})`; + this.totalRestApiCalls++; + const linkCountRes = await soqlQuery(linkCountQuery, this.conn); + + // Extrapolate from sample (ContentVersions ≈ ContentDocumentLinks for latest versions) + const avgContentVersionsPerRecord = linkCountRes.totalSize / sampleParents.records.length; + totalFiles += Math.round(avgContentVersionsPerRecord * totalParentRecords); + } + } + + return Math.max(totalFiles, 1); // Ensure at least 1 for progress tracking + } + + // Phase 2: Process downloads with accurate file-based progress tracking + private async processDownloadsWithProgress(estimatedFilesCount: number) { + let filesProcessed = 0; + let totalFilesDiscovered = 0; // Track actual files discovered + let actualTotalFiles = estimatedFilesCount; // Start with estimation, will be adjusted as we discover actual files + + // Start progress tracking with estimated total files count + WebSocketClient.sendProgressStartMessage('Exporting files', actualTotalFiles); + + // Progress callback function with total adjustment capability + const progressCallback = (filesCompleted: number, filesDiscoveredInChunk?: number) => { + filesProcessed += filesCompleted; + + // If we discovered files in this chunk, update our tracking + if (filesDiscoveredInChunk !== undefined) { + totalFilesDiscovered += filesDiscoveredInChunk; + // Update total to use actual discovered count + remaining estimation + const processedChunks = this.recordChunksNumber; + const totalChunks = this.chunksNumber; + const remainingChunks = totalChunks - processedChunks; + + if (remainingChunks > 0) { + // Estimate remaining files based on actual discovery rate + const avgFilesPerChunk = totalFilesDiscovered / processedChunks; + const estimatedRemainingFiles = Math.round(avgFilesPerChunk * remainingChunks); + actualTotalFiles = totalFilesDiscovered + estimatedRemainingFiles; + } else { + // All chunks processed, use actual total + actualTotalFiles = totalFilesDiscovered; + } + + // Get API usage for display (non-blocking) + this.getApiUsageStatus().then(apiUsage => { + uxLog("other", this, c.grey(`Discovered ${filesDiscoveredInChunk} files in chunk, updated total estimate to ${actualTotalFiles} ${apiUsage.message}`)); + }).catch(() => { + uxLog("other", this, c.grey(`Discovered ${filesDiscoveredInChunk} files in chunk, updated total estimate to ${actualTotalFiles}`)); + }); + } + + WebSocketClient.sendProgressStepMessage(filesProcessed, actualTotalFiles); + }; + + // Use modified queue system with progress tracking + this.startQueue(progressCallback); + await this.processParentRecords(progressCallback); await this.queueCompleted(); - return await this.buildResult(); + + // End progress tracking with final total + WebSocketClient.sendProgressEndMessage(actualTotalFiles); } - // Calculate API consumption + // Calculate API consumption and validate limits - optimized with new ApiLimitsManager private async calculateApiConsumption() { - const countSoqlQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, "SELECT COUNT() FROM"); - this.totalSoqlRequests++; + // Initialize the API limits manager + await this.apiLimitsManager.initialize(); + + const countSoqlQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT COUNT() FROM'); + await this.apiLimitsManager.trackApiCall('REST'); + this.totalRestApiCalls++; const countSoqlQueryRes = await soqlQuery(countSoqlQuery, this.conn); this.chunksNumber = Math.round(countSoqlQueryRes.totalSize / this.recordsChunkSize); - const estimatedApiCalls = Math.round(this.chunksNumber * 2) + 1; - this.apiUsedBefore = (this.conn as any)?.limitInfo?.apiUsage?.used ? (this.conn as any).limitInfo.apiUsage.used - 1 : this.apiUsedBefore; - this.apiLimit = (this.conn as any)?.limitInfo?.apiUsage?.limit; - // Check if there are enough API calls available - if (this.apiLimit - this.apiUsedBefore < estimatedApiCalls + 1000) { - throw new SfdxError( - `You don't have enough API calls available (${c.bold(this.apiLimit - this.apiUsedBefore)}) to perform this export that could consume ${c.bold( - estimatedApiCalls, - )} API calls`, + + // Get current usage for API consumption estimation + const currentUsage = this.apiLimitsManager.getCurrentUsage(); + + // More accurate API consumption estimation: + // - 1 Bulk API v2 call for main parent records query + // - Multiple REST API calls for Attachment queries (batches of 200) + // - Multiple Bulk API v2 calls for ContentDocumentLink and ContentVersion queries + const estimatedRestApiCalls = Math.round(this.chunksNumber * (countSoqlQueryRes.totalSize / 200)) + 5; // Attachment batches + counting queries + const estimatedBulkApiCalls = Math.round(this.chunksNumber * 3) + 1; // Parent records + ContentDocumentLink + ContentVersion per chunk + + // Check REST API limit with safety buffer + const restApiSafetyBuffer = 500; + if (currentUsage.restRemaining < estimatedRestApiCalls + restApiSafetyBuffer) { + throw new SfError( + `You don't have enough REST API calls available (${c.bold( + currentUsage.restRemaining + )}) to perform this export that could consume ${c.bold(estimatedRestApiCalls)} REST API calls` + ); + } + + // Check Bulk API v2 limit with safety buffer + const bulkApiSafetyBuffer = 100; + if (currentUsage.bulkRemaining < estimatedBulkApiCalls + bulkApiSafetyBuffer) { + throw new SfError( + `You don't have enough Bulk API v2 calls available (${c.bold( + currentUsage.bulkRemaining + )}) to perform this export that could consume ${c.bold(estimatedBulkApiCalls)} Bulk API v2 calls` ); } + // Request user confirmation if (!isCI) { const warningMessage = c.cyanBright( `This export of files could run on ${c.bold(c.yellow(countSoqlQueryRes.totalSize))} records, in ${c.bold( - c.yellow(this.chunksNumber), - )} chunks, and consume up to ${c.bold(c.yellow(estimatedApiCalls))} API calls on the ${c.bold( - c.yellow(this.apiLimit - this.apiUsedBefore), - )} remaining API calls. Do you want to proceed ?`, + c.yellow(this.chunksNumber) + )} chunks, and consume up to ${c.bold(c.yellow(estimatedRestApiCalls))} REST API calls (${c.bold(c.yellow(currentUsage.restRemaining))} remaining) and ${c.bold(c.yellow(estimatedBulkApiCalls))} Bulk API v2 calls (${c.bold(c.yellow(currentUsage.bulkRemaining))} remaining). Do you want to proceed ?` ); - const promptRes = await prompts({ type: "confirm", message: warningMessage }); + const promptRes = await prompts({ + type: 'confirm', + message: warningMessage, + description: 'Proceed with the operation despite API usage warnings' + }); if (promptRes.value !== true) { - throw new SfdxError("Command cancelled by user"); + throw new SfError('Command cancelled by user'); } if (this.startChunkNumber === 0) { - uxLog(this, c.yellow(c.italic("Use --startchunknumber command line argument if you do not want to start from first chunk"))); + uxLog( + "warning", + this, + c.yellow( + c.italic('Use --startchunknumber command line argument if you do not want to start from first chunk') + ) + ); } } } + // Monitor API usage during operations using the optimized ApiLimitsManager + private async waitIfApiLimitApproached(operationType: 'REST' | 'BULK') { + await this.apiLimitsManager.trackApiCall(operationType); + } + + // Get current API usage percentages for display + private async getApiUsageStatus(): Promise<{ rest: number; bulk: number; message: string }> { + return this.apiLimitsManager.getUsageStatus(); + } + // Run chunks one by one, and don't wait to have all the records fetched to start it - private startQueue() { + private startQueue(progressCallback?: (filesCompleted: number, filesDiscoveredInChunk?: number) => void) { this.queueInterval = setInterval(async () => { if (this.recordsChunkQueueRunning === false && this.recordsChunkQueue.length > 0) { this.recordsChunkQueueRunning = true; - const recordChunk = this.recordsChunkQueue.shift(); - await this.processRecordsChunk(recordChunk); + const queueItem = this.recordsChunkQueue.shift(); + // Handle both old format (array) and new format (object with records and progressCallback) + const recordChunk = Array.isArray(queueItem) ? queueItem : queueItem.records; + const chunkProgressCallback = Array.isArray(queueItem) ? progressCallback : queueItem.progressCallback; + await this.processRecordsChunk(recordChunk, chunkProgressCallback); this.recordsChunkQueueRunning = false; // Manage last chunk - } else if (this.bulkApiRecordsEnded === true && this.recordsChunkQueue.length === 0 && this.recordsChunk.length > 0) { + } else if ( + this.bulkApiRecordsEnded === true && + this.recordsChunkQueue.length === 0 && + this.recordsChunk.length > 0 + ) { const recordsToProcess = [...this.recordsChunk]; this.recordsChunk = []; - this.recordsChunkQueue.push(recordsToProcess); + this.recordsChunkQueue.push({ records: recordsToProcess, progressCallback }); } }, 1000); } @@ -162,7 +357,7 @@ export class FilesExporter { resolve(true); } if (globalThis.sfdxHardisFatalError === true) { - uxLog(this, c.red("Fatal error while processing chunks queue")); + uxLog("error", this, c.red('Fatal error while processing chunks queue')); process.exit(1); } }, 1000); @@ -171,187 +366,586 @@ export class FilesExporter { this.queueInterval = null; } - private async processParentRecords() { + private async processParentRecords(progressCallback?: (filesCompleted: number, filesDiscoveredInChunk?: number) => void) { // Query parent records using SOQL defined in export.json file - uxLog(this, c.grey("Bulk query: " + c.italic(this.dtl.soqlQuery))); - this.totalSoqlRequests++; + await this.waitIfApiLimitApproached('BULK'); + this.totalBulkApiCalls++; this.conn.bulk.pollTimeout = this.pollTimeout || 600000; // Increase timeout in case we are on a bad internet connection or if the bulk api batch is queued - await this.conn.bulk - .query(this.dtl.soqlQuery) - .on("record", async (record) => { - this.totalParentRecords++; - const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, record[this.dtl.outputFolderNameField] || record.Id)); - if (this.dtl.overwriteParentRecords !== true && fs.existsSync(parentRecordFolderForFiles)) { - uxLog(this, c.grey(`Skipped record - ${record[this.dtl.outputFolderNameField] || record.Id} - Record files already downloaded`)); - this.recordsIgnored++; - return; - } - await this.addToRecordsChunk(record); - }) - .on("error", (err) => { - throw new SfdxError(c.red("Bulk query error:" + err)); - }) - .on("end", () => { - this.bulkApiRecordsEnded = true; - }); + + // Use bulkQueryByChunks to handle large queries + const queryRes = await bulkQueryByChunks(this.dtl.soqlQuery, this.conn, this.parentRecordsChunkSize); + for (const record of queryRes.records) { + this.totalParentRecords++; + const parentRecordFolderForFiles = path.resolve( + path.join(this.exportedFilesFolder, record[this.dtl.outputFolderNameField] || record.Id) + ); + if (this.dtl.overwriteParentRecords !== true && fs.existsSync(parentRecordFolderForFiles)) { + uxLog( + "log", + this, + c.grey( + `Skipped record - ${record[this.dtl.outputFolderNameField] || record.Id} - Record files already downloaded` + ) + ); + this.recordsIgnored++; + continue; + } + await this.addToRecordsChunk(record, progressCallback); + } + this.bulkApiRecordsEnded = true; } - private async addToRecordsChunk(record: any) { + private async addToRecordsChunk(record: any, progressCallback?: (filesCompleted: number, filesDiscoveredInChunk?: number) => void) { this.recordsChunk.push(record); // If chunk size is reached , process the chunk of records if (this.recordsChunk.length === this.recordsChunkSize) { const recordsToProcess = [...this.recordsChunk]; this.recordsChunk = []; - this.recordsChunkQueue.push(recordsToProcess); + this.recordsChunkQueue.push({ records: recordsToProcess, progressCallback }); } } - private async processRecordsChunk(records: any[]) { + private async processRecordsChunk(records: any[], progressCallback?: (filesCompleted: number, filesDiscoveredInChunk?: number) => void) { this.recordChunksNumber++; if (this.recordChunksNumber < this.startChunkNumber) { - uxLog(this, c.cyan(`Skip parent records chunk #${this.recordChunksNumber} because it is lesser than ${this.startChunkNumber}`)); - return; - } - uxLog(this, c.cyan(`Processing parent records chunk #${this.recordChunksNumber} on ${this.chunksNumber} (${records.length} records) ...`)); - // Request all ContentDocumentLink related to all records of the chunk - const linkedEntityIdIn = records.map((record: any) => `'${record.Id}'`).join(","); - const linkedEntityInQuery = `SELECT ContentDocumentId,LinkedEntityId FROM ContentDocumentLink WHERE LinkedEntityId IN (${linkedEntityIdIn})`; - this.totalSoqlRequests++; - const contentDocumentLinks = await bulkQuery(linkedEntityInQuery, this.conn); - if (contentDocumentLinks.records.length === 0) { - uxLog(this, c.grey("No ContentDocumentLinks found for the parent records in this chunk")); + uxLog( + "action", + this, + c.cyan( + `Skip parent records chunk #${this.recordChunksNumber} because it is lesser than ${this.startChunkNumber}` + ) + ); return; } - // Retrieve all ContentVersion related to ContentDocumentLink - const contentDocIdIn = contentDocumentLinks.records.map((contentDocumentLink: any) => `'${contentDocumentLink.ContentDocumentId}'`).join(","); - const contentVersionSoql = `SELECT Id,ContentDocumentId,Description,FileExtension,FileType,PathOnClient,Title FROM ContentVersion WHERE ContentDocumentId IN (${contentDocIdIn}) AND IsLatest = true`; - this.totalSoqlRequests++; - const contentVersions = await bulkQuery(contentVersionSoql, this.conn); - - // ContentDocument object can be linked to multiple other objects even with same type (for example: same attachment can be linked to multiple EmailMessage objects). - // Because of this when we fetch ContentVersion for ContentDocument it can return less results than there is ContentDocumentLink objects to link. - // To fix this we create a list of ContentVersion and ContentDocumentLink pairs. - // This way we have multiple pairs and we will download ContentVersion objects for each linked object. - const versionsAndLinks = []; - contentVersions.records.forEach((contentVersion) => { - contentDocumentLinks.records.forEach((contentDocumentLink) => { - if (contentDocumentLink.ContentDocumentId === contentVersion.ContentDocumentId) { - versionsAndLinks.push({ - contentVersion: contentVersion, - contentDocumentLink: contentDocumentLink, + let actualFilesInChunk = 0; + + uxLog( + "action", + this, + c.cyan( + `Processing parent records chunk #${this.recordChunksNumber} on ${this.chunksNumber} (${records.length} records) ...` + ) + ); + // Process records in batches of 200 for Attachments and 1000 for ContentVersions to avoid hitting the SOQL query limit + const attachmentBatchSize = 200; + const contentVersionBatchSize = 1000; + for (let i = 0; i < records.length; i += attachmentBatchSize) { + const batch = records.slice(i, i + attachmentBatchSize); + // Request all Attachment related to all records of the batch using REST API + const parentIdIn = batch.map((record: any) => `'${record.Id}'`).join(','); + const attachmentQuery = `SELECT Id, Name, ContentType, ParentId, BodyLength FROM Attachment WHERE ParentId IN (${parentIdIn})`; + await this.waitIfApiLimitApproached('REST'); + this.totalRestApiCalls++; + const attachments = await this.conn.query(attachmentQuery); + actualFilesInChunk += attachments.records.length; // Count actual files discovered + + if (attachments.records.length > 0) { + // Download attachments using REST API + await PromisePool.withConcurrency(5) + .for(attachments.records) + .process(async (attachment: any) => { + try { + await this.downloadAttachmentFile(attachment, batch); + // Call progress callback if available + if (progressCallback) { + progressCallback(1); + } + } catch (e) { + this.filesErrors++; + uxLog("warning", this, c.red('Download file error: ' + attachment.Name + '\n' + e)); + } + }); + } else { + uxLog("log", this, c.grey(`No Attachments found for the ${batch.length} parent records in this batch`)); + } + } + for (let i = 0; i < records.length; i += contentVersionBatchSize) { + const batch = records.slice(i, i + contentVersionBatchSize); + // Request all ContentDocumentLink related to all records of the batch + const linkedEntityIdIn = batch.map((record: any) => `'${record.Id}'`).join(','); + const linkedEntityInQuery = `SELECT ContentDocumentId,LinkedEntityId FROM ContentDocumentLink WHERE LinkedEntityId IN (${linkedEntityIdIn})`; + await this.waitIfApiLimitApproached('BULK'); + this.totalBulkApiCalls++; + uxLog("log", this, c.grey(`Querying ContentDocumentLinks for ${linkedEntityInQuery.length} parent records in this batch...`)); + const contentDocumentLinks = await bulkQueryByChunks(linkedEntityInQuery, this.conn, this.parentRecordsChunkSize); + if (contentDocumentLinks.records.length > 0) { + // Retrieve all ContentVersion related to ContentDocumentLink + const contentDocIdIn = contentDocumentLinks.records.map((link: any) => `'${link.ContentDocumentId}'`); + // Loop on contentDocIdIn by contentVersionBatchSize + for (let j = 0; j < contentDocIdIn.length; j += contentVersionBatchSize) { + const contentDocIdBatch = contentDocIdIn.slice(j, j + contentVersionBatchSize).join(','); + // Log the progression of contentDocIdBatch + uxLog( + "action", + this, + c.cyan( + `Processing ContentDocumentId chunk #${Math.ceil((j + 1) / contentVersionBatchSize)} on ${Math.ceil( + contentDocIdIn.length / contentVersionBatchSize + )}` + ) + ); + // Request all ContentVersion related to all records of the batch + const contentVersionSoql = `SELECT Id,ContentDocumentId,Description,FileExtension,FileType,PathOnClient,Title,ContentSize,Checksum FROM ContentVersion WHERE ContentDocumentId IN (${contentDocIdBatch}) AND IsLatest = true`; + await this.waitIfApiLimitApproached('BULK'); + this.totalBulkApiCalls++; + const contentVersions = await bulkQueryByChunks(contentVersionSoql, this.conn, this.parentRecordsChunkSize); + // ContentDocument object can be linked to multiple other objects even with same type (for example: same attachment can be linked to multiple EmailMessage objects). + // Because of this when we fetch ContentVersion for ContentDocument it can return less results than there is ContentDocumentLink objects to link. + // To fix this we create a list of ContentVersion and ContentDocumentLink pairs. + // This way we have multiple pairs and we will download ContentVersion objects for each linked object. + const versionsAndLinks: any[] = []; + contentVersions.records.forEach((contentVersion) => { + contentDocumentLinks.records.forEach((contentDocumentLink) => { + if (contentDocumentLink.ContentDocumentId === contentVersion.ContentDocumentId) { + versionsAndLinks.push({ + contentVersion: contentVersion, + contentDocumentLink: contentDocumentLink, + }); + } + }); }); + actualFilesInChunk += versionsAndLinks.length; // Count actual ContentVersion files discovered + uxLog("log", this, c.grey(`Downloading ${versionsAndLinks.length} found files...`)) + // Download files + await PromisePool.withConcurrency(5) + .for(versionsAndLinks) + .process(async (versionAndLink: any) => { + try { + await this.downloadContentVersionFile( + versionAndLink.contentVersion, + batch, + versionAndLink.contentDocumentLink + ); + // Call progress callback if available + if (progressCallback) { + progressCallback(1); + } + } catch (e) { + this.filesErrors++; + uxLog("warning", this, c.red('Download file error: ' + versionAndLink.contentVersion.Title + '\n' + e)); + } + }); } - }); + } else { + uxLog("log", this, c.grey('No ContentDocumentLinks found for the parent records in this batch')); + } + } + + // At the end of chunk processing, report the actual files discovered in this chunk + if (progressCallback && actualFilesInChunk > 0) { + // This will help adjust the total progress based on actual discovered files + progressCallback(0, actualFilesInChunk); // Report actual files found in this chunk + } + } + + // Initialize CSV log file with headers + private async initializeCsvLog() { + await fs.ensureDir(path.dirname(this.logFile)); + const headers = 'Status,Folder,File Name,Extension,File Size (KB),Error Detail,ContentDocument Id,ContentVersion Id,Attachment Id,Validation Status,Download URL\n'; + await fs.writeFile(this.logFile, headers, 'utf8'); + uxLog("log", this, c.grey(`CSV log file initialized: ${this.logFile}`)); + WebSocketClient.sendReportFileMessage(this.logFile, "Exported files report (CSV)", 'report'); + } + + // Helper method to extract file information from output path + private extractFileInfo(outputFile: string) { + const fileName = path.basename(outputFile); + const extension = path.extname(fileName); + const folderPath = path.dirname(outputFile) + .replace(process.cwd(), '') + .replace(this.exportedFilesFolder, '') + .replace(/\\/g, '/') + .replace(/^\/+/, ''); + + return { fileName, extension, folderPath }; + } + + // Helper method to log skipped files + private async logSkippedFile( + outputFile: string, + errorDetail: string, + contentDocumentId: string = '', + contentVersionId: string = '', + attachmentId: string = '', + downloadUrl: string = '' + ) { + const { fileName, extension, folderPath } = this.extractFileInfo(outputFile); + await this.writeCsvLogEntry('skipped', folderPath, fileName, extension, 0, errorDetail, contentDocumentId, contentVersionId, attachmentId, 'Skipped', downloadUrl); + } + + // Helper method to calculate MD5 checksum of a file + private async calculateMD5(filePath: string): Promise { + const hash = crypto.createHash('md5'); + const stream = fs.createReadStream(filePath); + + return new Promise((resolve, reject) => { + stream.on('error', reject); + stream.on('data', chunk => hash.update(chunk)); + stream.on('end', () => resolve(hash.digest('hex'))); }); + } - // Download files - await PromisePool.withConcurrency(5) - .for(versionsAndLinks) - .process(async (versionAndLink: any) => { - try { - await this.downloadContentVersionFile(versionAndLink.contentVersion, records, versionAndLink.contentDocumentLink); - } catch (e) { - this.filesErrors++; - uxLog(this, c.red("Download file error: " + versionAndLink.contentVersion.Title + "\n" + e)); + // Helper method to validate downloaded file + private async validateDownloadedFile( + outputFile: string, + expectedSize: number, + expectedChecksum?: string, + ): Promise<{ valid: boolean; actualSize: number; actualChecksum?: string; error?: string }> { + try { + // Check if file exists + if (!fs.existsSync(outputFile)) { + return { valid: false, actualSize: 0, error: 'File does not exist' }; + } + + // Get actual file size + const stats = await fs.stat(outputFile); + const actualSize = stats.size; + + // Validate file size if expected size is provided + if (actualSize !== expectedSize) { + return { + valid: false, + actualSize, + error: `Size mismatch: expected ${expectedSize} bytes, got ${actualSize} bytes` + }; + } + + // Validate checksum if expected checksum is provided + if (expectedChecksum) { + const actualChecksum = await this.calculateMD5(outputFile); + if (actualChecksum.toLowerCase() !== expectedChecksum.toLowerCase()) { + return { + valid: false, + actualSize, + actualChecksum, + error: `Checksum mismatch: expected ${expectedChecksum}, got ${actualChecksum}` + }; } - }); + return { valid: true, actualSize, actualChecksum }; + } + + return { valid: true, actualSize }; + } catch (error) { + return { + valid: false, + actualSize: 0, + error: `Validation error: ${(error as Error).message}` + }; + } + } + + // Write a CSV entry for each file processed (fileSize in KB) + private async writeCsvLogEntry( + status: 'success' | 'failed' | 'skipped' | 'invalid', + folder: string, + fileName: string, + extension: string, + fileSizeKB: number, + errorDetail: string = '', + contentDocumentId: string = '', + contentVersionId: string = '', + attachmentId: string = '', + validationStatus: string = '', + downloadUrl: string = '' + ) { + try { + // Escape CSV values to handle commas, quotes, and newlines + const escapeCsvValue = (value: string | number): string => { + const strValue = String(value); + if (strValue.includes(',') || strValue.includes('"') || strValue.includes('\n')) { + return `"${strValue.replace(/"/g, '""')}"`; + } + return strValue; + }; + + const csvLine = [ + escapeCsvValue(status), + escapeCsvValue(folder), + escapeCsvValue(fileName), + escapeCsvValue(extension), + escapeCsvValue(fileSizeKB), + escapeCsvValue(errorDetail), + escapeCsvValue(contentDocumentId), + escapeCsvValue(contentVersionId), + escapeCsvValue(attachmentId), + escapeCsvValue(validationStatus), + escapeCsvValue(downloadUrl) + ].join(',') + '\n'; + + await fs.appendFile(this.logFile, csvLine, 'utf8'); + } catch (e) { + uxLog("warning", this, c.yellow(`Error writing to CSV log: ${(e as Error).message}`)); + } + } + + private async downloadFile( + fetchUrl: string, + outputFile: string, + contentDocumentId: string = '', + contentVersionId: string = '', + attachmentId: string = '', + expectedSize: number, + expectedChecksum?: string, + ) { + // In resume mode, check if file already exists and is valid + if (this.resumeExport && fs.existsSync(outputFile)) { + const { fileName, extension, folderPath } = this.extractFileInfo(outputFile); + let fileSizeKB = 0; + + try { + const stats = await fs.stat(outputFile); + fileSizeKB = Math.round(stats.size / 1024); // Convert bytes to KB + + // Validate existing file (always have validation data: checksum for ContentVersion, size for Attachment) + const validation = await this.validateDownloadedFile(outputFile, expectedSize, expectedChecksum); + + if (validation.valid) { + this.filesValidated++; // Count only valid files + // File exists and is valid - skip download + const fileDisplay = path.join(folderPath, fileName).replace(/\\/g, '/'); + uxLog("success", this, c.grey(`Skipped (valid existing file) ${fileDisplay}`)); + this.filesIgnoredExisting++; + + // Write success entry to CSV log + await this.writeCsvLogEntry('success', folderPath, fileName, extension, fileSizeKB, 'Existing valid file', contentDocumentId, contentVersionId, attachmentId, 'Valid (existing)', fetchUrl); + return; + } else { + // File exists but is invalid - will re-download + uxLog("log", this, c.yellow(`Existing file ${fileName} is invalid (${validation.error}) - re-downloading`)); + } + } catch (e) { + uxLog("warning", this, c.yellow(`Could not validate existing file ${fileName}: ${(e as Error).message}`)); + // Continue with download if we can't validate existing file + } + } + + // Proceed with normal download process + const downloadResult = await new FileDownloader(fetchUrl, { conn: this.conn, outputFile: outputFile, label: 'file' }).download(); + + // Extract file information for CSV logging + const { fileName, extension, folderPath } = this.extractFileInfo(outputFile); + let fileSizeKB = 0; + let errorDetail = ''; + let validationError = ''; // Store validation error separately + let validationStatus = ''; + let isValidFile = false; // Track if file is both downloaded and valid + + // Get file size if download was successful + if (downloadResult.success && fs.existsSync(outputFile)) { + try { + const stats = await fs.stat(outputFile); + fileSizeKB = Math.round(stats.size / 1024); // Convert bytes to KB + + // Perform file validation (always have validation data: checksum for ContentVersion, size for Attachment) + const validation = await this.validateDownloadedFile(outputFile, expectedSize, expectedChecksum); + + if (validation.valid) { + this.filesValidated++; // Count only valid files + validationStatus = 'Valid'; + isValidFile = true; + uxLog("success", this, c.green(`✓ Validation passed for ${fileName}`)); + } else { + validationStatus = 'Invalid'; + validationError = validation.error || 'Unknown validation error'; + isValidFile = false; + this.filesValidationErrors++; + uxLog("warning", this, c.yellow(`⚠ Validation failed for ${fileName}: ${validation.error}`)); + } + } catch (e) { + uxLog("warning", this, c.yellow(`Could not get file size for ${fileName}: ${(e as Error).message}`)); + validationStatus = 'Invalid'; + validationError = (e as Error).message; + isValidFile = false; + } + } else if (!downloadResult.success) { + errorDetail = downloadResult.error || 'Unknown download error'; + validationStatus = 'Download failed'; + isValidFile = false; + } + + // Use file folder and file name for log display + const fileDisplay = path.join(folderPath, fileName).replace(/\\/g, '/'); + + // Log based on download success AND validation success + if (downloadResult.success && isValidFile) { + uxLog("success", this, c.grey(`Downloaded ${fileDisplay}`)); + this.filesDownloaded++; + + // Write success entry to CSV log with Salesforce IDs and validation status + await this.writeCsvLogEntry('success', folderPath, fileName, extension, fileSizeKB, '', contentDocumentId, contentVersionId, attachmentId, validationStatus, fetchUrl); + } else if (downloadResult.success && !isValidFile) { + // File was downloaded but validation failed + uxLog("warning", this, c.red(`Invalid ${fileDisplay} - validation failed`)); + this.filesErrors++; + + // Write invalid entry to CSV log with validation error details + await this.writeCsvLogEntry('invalid', folderPath, fileName, extension, fileSizeKB, validationError, contentDocumentId, contentVersionId, attachmentId, validationStatus, fetchUrl); + } else { + // Download failed + uxLog("warning", this, c.red(`Error ${fileDisplay}`)); + this.filesErrors++; + + // Write failed entry to CSV log with Salesforce IDs and validation status + await this.writeCsvLogEntry('failed', folderPath, fileName, extension, fileSizeKB, errorDetail, contentDocumentId, contentVersionId, attachmentId, validationStatus, fetchUrl); + } + } + + private async downloadAttachmentFile(attachment: any, records: any[]) { + // Check file size filter (BodyLength is in bytes) + const fileSizeKB = attachment.BodyLength ? Math.round(attachment.BodyLength / 1024) : 0; + if (this.dtl.fileSizeMin && this.dtl.fileSizeMin > 0 && fileSizeKB < this.dtl.fileSizeMin) { + uxLog("log", this, c.grey(`Skipped - ${attachment.Name} - File size (${fileSizeKB} KB) below minimum (${this.dtl.fileSizeMin} KB)`)); + this.filesIgnoredSize++; + + // Log skipped file to CSV + const parentAttachment = records.filter((record) => record.Id === attachment.ParentId)[0]; + const attachmentParentFolderName = (parentAttachment[this.dtl.outputFolderNameField] || parentAttachment.Id).replace( + /[/\\?%*:|"<>]/g, + '-' + ); + const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, attachmentParentFolderName)); + const outputFile = path.join(parentRecordFolderForFiles, attachment.Name.replace(/[/\\?%*:|"<>]/g, '-')); + const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/Attachment/${attachment.Id}/Body`; + await this.logSkippedFile(outputFile, `File size (${fileSizeKB} KB) below minimum (${this.dtl.fileSizeMin} KB)`, '', '', attachment.Id, fetchUrl); + return; + } + + // Retrieve initial record to build output files folder name + const parentAttachment = records.filter((record) => record.Id === attachment.ParentId)[0]; + // Build record output files folder (if folder name contains slashes or antislashes, replace them by spaces) + const attachmentParentFolderName = (parentAttachment[this.dtl.outputFolderNameField] || parentAttachment.Id).replace( + /[/\\?%*:|"<>]/g, + '-' + ); + const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, attachmentParentFolderName)); + // Define name of the file + const outputFile = path.join(parentRecordFolderForFiles, attachment.Name.replace(/[/\\?%*:|"<>]/g, '-')); + // Create directory if not existing + await fs.ensureDir(parentRecordFolderForFiles); + // Download file locally with validation (Attachments have BodyLength but no checksum) + const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/Attachment/${attachment.Id}/Body`; + await this.downloadFile(fetchUrl, outputFile, '', '', attachment.Id, Number(attachment.BodyLength), undefined); } - private async downloadContentVersionFile(contentVersion, records, contentDocumentLink) { + private async downloadContentVersionFile(contentVersion: any, records: any[], contentDocumentLink: any) { + // Check file size filter (ContentSize is in bytes) + const fileSizeKB = contentVersion.ContentSize ? Math.round(contentVersion.ContentSize / 1024) : 0; + if (this.dtl.fileSizeMin && this.dtl.fileSizeMin > 0 && fileSizeKB < this.dtl.fileSizeMin) { + uxLog("log", this, c.grey(`Skipped - ${contentVersion.Title} - File size (${fileSizeKB} KB) below minimum (${this.dtl.fileSizeMin} KB)`)); + this.filesIgnoredSize++; + + // Log skipped file to CSV + const parentRecord = records.filter((record) => record.Id === contentDocumentLink.LinkedEntityId)[0]; + const parentFolderName = (parentRecord[this.dtl.outputFolderNameField] || parentRecord.Id).replace( + /[/\\?%*:|"<>]/g, + '-' + ); + const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, parentFolderName)); + const outputFile = path.join(parentRecordFolderForFiles, contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-')); + const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`; + await this.logSkippedFile(outputFile, `File size (${fileSizeKB} KB) below minimum (${this.dtl.fileSizeMin} KB)`, contentVersion.ContentDocumentId, contentVersion.Id, '', fetchUrl); + return; + } + // Retrieve initial record to build output files folder name const parentRecord = records.filter((record) => record.Id === contentDocumentLink.LinkedEntityId)[0]; // Build record output files folder (if folder name contains slashes or antislashes, replace them by spaces) - const parentFolderName = (parentRecord[this.dtl.outputFolderNameField] || parentRecord.Id).replace(/[/\\?%*:|"<>]/g, "-"); + const parentFolderName = (parentRecord[this.dtl.outputFolderNameField] || parentRecord.Id).replace( + /[/\\?%*:|"<>]/g, + '-' + ); const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, parentFolderName)); // Define name of the file let outputFile = // Id - this.dtl?.outputFileNameFormat === "id" + this.dtl?.outputFileNameFormat === 'id' ? path.join(parentRecordFolderForFiles, contentVersion.Id) : // Title + Id - this.dtl?.outputFileNameFormat === "title_id" - ? path.join(parentRecordFolderForFiles, `${contentVersion.Title.replace(/[/\\?%*:|"<>]/g, "-")}_${contentVersion.Id}`) + this.dtl?.outputFileNameFormat === 'title_id' + ? path.join( + parentRecordFolderForFiles, + `${contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-')}_${contentVersion.Id}` + ) : // Id + Title - this.dtl?.outputFileNameFormat === "id_title" - ? path.join(parentRecordFolderForFiles, `${contentVersion.Id}_${contentVersion.Title.replace(/[/\\?%*:|"<>]/g, "-")}`) + this.dtl?.outputFileNameFormat === 'id_title' + ? path.join( + parentRecordFolderForFiles, + `${contentVersion.Id}_${contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-')}` + ) : // Title - path.join(parentRecordFolderForFiles, contentVersion.Title.replace(/[/\\?%*:|"<>]/g, "-")); + path.join(parentRecordFolderForFiles, contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-')); // Add file extension if missing in file title, and replace .snote by .html if (contentVersion.FileExtension && path.extname(outputFile) !== contentVersion.FileExtension) { - outputFile = outputFile + "." + (contentVersion.FileExtension !== "snote" ? contentVersion.FileExtension : "html"); + outputFile = + outputFile + '.' + (contentVersion.FileExtension !== 'snote' ? contentVersion.FileExtension : 'html'); } // Check file extension - if (this.dtl.fileTypes !== "all" && !this.dtl.fileTypes.includes(contentVersion.FileType)) { - uxLog(this, c.grey(`Skipped - ${outputFile.replace(this.exportedFilesFolder, "")} - File type ignored`)); + if (this.dtl.fileTypes !== 'all' && !this.dtl.fileTypes.includes(contentVersion.FileType)) { + uxLog("log", this, c.grey(`Skipped - ${outputFile.replace(this.exportedFilesFolder, '')} - File type ignored`)); this.filesIgnoredType++; + + // Log skipped file to CSV + const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`; + await this.logSkippedFile(outputFile, 'File type ignored', contentVersion.ContentDocumentId, contentVersion.Id, '', fetchUrl); return; } - // Check file overwrite - if (this.dtl.overwriteFiles !== true && fs.existsSync(outputFile)) { - uxLog(this, c.yellow(`Skipped - ${outputFile.replace(this.exportedFilesFolder, "")} - File already existing`)); + // Check file overwrite (unless in resume mode where downloadFile handles existing files) + if (this.dtl.overwriteFiles !== true && !this.resumeExport && fs.existsSync(outputFile)) { + uxLog("warning", this, c.yellow(`Skipped - ${outputFile.replace(this.exportedFilesFolder, '')} - File already existing`)); this.filesIgnoredExisting++; + + // Log skipped file to CSV + const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`; + await this.logSkippedFile(outputFile, 'File already exists', contentVersion.ContentDocumentId, contentVersion.Id, '', fetchUrl); return; } // Create directory if not existing await fs.ensureDir(parentRecordFolderForFiles); - // Download file locally - const fetchUrl = `${this.conn.instanceUrl}/services/data/v${CONSTANTS.API_VERSION}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`; - try { - const fetchRes = await fetch(fetchUrl, this.fetchOptions); - if (fetchRes.ok !== true) { - throw new SfdxError(`Fetch error - ${fetchUrl} - + ${JSON.stringify(fetchRes.body)}`); - } - // Wait for file to be written - const stream = fs.createWriteStream(outputFile); - fetchRes.body.pipe(stream); - /* - await new Promise(resolve => { - stream.on('finish', function() { - resolve(true); - }); - }) */ - uxLog(this, c.green(`Success - ${path.relative(process.cwd(), outputFile)}`)); - this.filesDownloaded++; - } catch (err) { - // Download failure - uxLog(this, c.red(`Error - ${path.relative(process.cwd(), outputFile)} - ${err}`)); - this.filesErrors++; - } + // Download file locally with validation (ContentVersion has both Checksum and ContentSize) + const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`; + await this.downloadFile(fetchUrl, outputFile, contentVersion.ContentDocumentId, contentVersion.Id, '', Number(contentVersion.ContentSize), contentVersion.Checksum); } - // Build stats & result private async buildResult() { - const connAny = this.conn as any; - const apiCallsRemaining = connAny?.limitInfo?.apiUsage?.used - ? (connAny?.limitInfo?.apiUsage?.limit || 0) - (connAny?.limitInfo?.apiUsage?.used || 0) - : null; - uxLog(this, c.cyan(`API limit: ${c.bold(connAny?.limitInfo?.apiUsage?.limit || null)}`)); - uxLog(this, c.cyan(`API used before process: ${c.bold(this.apiUsedBefore)}`)); - uxLog(this, c.cyan(`API used after process: ${c.bold(connAny?.limitInfo?.apiUsage?.used || null)}`)); - uxLog(this, c.cyan(`API calls remaining for today: ${c.bold(apiCallsRemaining)}`)); - uxLog(this, c.cyan(`Total SOQL requests: ${c.bold(this.totalSoqlRequests)}`)); - uxLog(this, c.cyan(`Total parent records found: ${c.bold(this.totalParentRecords)}`)); - uxLog(this, c.cyan(`Total parent records with files: ${c.bold(this.parentRecordsWithFiles)}`)); - uxLog(this, c.cyan(`Total parent records ignored because already existing: ${c.bold(this.recordsIgnored)}`)); - uxLog(this, c.cyan(`Total files downloaded: ${c.bold(this.filesDownloaded)}`)); - uxLog(this, c.cyan(`Total file download errors: ${c.bold(this.filesErrors)}`)); - uxLog(this, c.cyan(`Total file skipped because of type constraint: ${c.bold(this.filesIgnoredType)}`)); - uxLog(this, c.cyan(`Total file skipped because previously downloaded: ${c.bold(this.filesIgnoredExisting)}`)); - - return { - totalParentRecords: this.totalParentRecords, - parentRecordsWithFiles: this.parentRecordsWithFiles, - filesDownloaded: this.filesDownloaded, - filesErrors: this.filesErrors, - recordsIgnored: this.recordsIgnored, - filesIgnoredType: this.filesIgnoredType, - filesIgnoredExisting: this.filesIgnoredExisting, - apiLimit: connAny?.limitInfo?.apiUsage?.limit || null, - apiUsedBefore: this.apiUsedBefore, - apiUsedAfter: connAny?.limitInfo?.apiUsage?.used || null, - apiCallsRemaining, + // Get final API usage from the limits manager + const finalUsage = await this.apiLimitsManager.getFinalUsage(); + + // Display final API usage summary + try { + const finalApiUsage = await this.getApiUsageStatus(); + uxLog("success", this, c.green(`Export completed! Final API usage: ${finalApiUsage.message}`)); + } catch (error) { + uxLog("warning", this, c.yellow(`Could not retrieve final API usage: ${(error as Error).message}`)); + } + + const result = { + stats: { + filesValidated: this.filesValidated, + filesDownloaded: this.filesDownloaded, + filesErrors: this.filesErrors, + filesIgnoredType: this.filesIgnoredType, + filesIgnoredExisting: this.filesIgnoredExisting, + filesIgnoredSize: this.filesIgnoredSize, + filesValidationErrors: this.filesValidationErrors, + totalRestApiCalls: this.totalRestApiCalls, + totalBulkApiCalls: this.totalBulkApiCalls, + totalParentRecords: this.totalParentRecords, + parentRecordsWithFiles: this.parentRecordsWithFiles, + recordsIgnored: this.recordsIgnored, + restApiUsedBefore: finalUsage.restUsed, + restApiUsedAfter: finalUsage.restUsed, + restApiLimit: finalUsage.restLimit, + restApiCallsRemaining: finalUsage.restRemaining, + bulkApiUsedBefore: finalUsage.bulkUsed, + bulkApiUsedAfter: finalUsage.bulkUsed, + bulkApiLimit: finalUsage.bulkLimit, + bulkApiCallsRemaining: finalUsage.bulkRemaining, + }, + logFile: this.logFile }; + await createXlsxFromCsv(this.logFile, { fileTitle: "Exported files report" }, result); + return result; } } @@ -361,18 +955,87 @@ export class FilesImporter { private commandThis: any; private dtl: any = null; // export config - private exportedFilesFolder: string; + private exportedFilesFolder: string = ''; private handleOverwrite = false; + private logFile: string; + + // Statistics tracking + private totalFolders = 0; + private totalFiles = 0; + private filesUploaded = 0; + private filesOverwritten = 0; + private filesErrors = 0; + private filesSkipped = 0; + + // Optimized API Limits Management System + private apiLimitsManager: ApiLimitsManager; - constructor(filesPath: string, conn: Connection, options: { exportConfig?: any; handleOverwrite?: boolean }, commandThis: any) { + constructor( + filesPath: string, + conn: Connection, + options: { exportConfig?: any; handleOverwrite?: boolean }, + commandThis: any + ) { this.filesPath = filesPath; - this.exportedFilesFolder = path.join(this.filesPath, "export"); + this.exportedFilesFolder = path.join(this.filesPath, 'export'); this.handleOverwrite = options?.handleOverwrite === true; this.conn = conn; this.commandThis = commandThis; if (options.exportConfig) { this.dtl = options.exportConfig; } + + // Initialize the optimized API limits manager + this.apiLimitsManager = new ApiLimitsManager(conn, commandThis); + + // Initialize log file path + const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, -5); + this.logFile = path.join(this.filesPath, `import-log-${timestamp}.csv`); + } + + // Initialize CSV log file with headers + private async initializeCsvLog() { + await fs.ensureDir(path.dirname(this.logFile)); + const headers = 'Status,Folder,File Name,Extension,File Size (KB),Error Detail,ContentVersion Id\n'; + await fs.writeFile(this.logFile, headers, 'utf8'); + uxLog("log", this.commandThis, c.grey(`CSV log file initialized: ${this.logFile}`)); + WebSocketClient.sendReportFileMessage(this.logFile, "Imported files report (CSV)", 'report'); + } + + // Helper method to extract file information from file path + private extractFileInfo(filePath: string, folderName: string) { + const fileName = path.basename(filePath); + const extension = path.extname(fileName); + + return { fileName, extension, folderPath: folderName }; + } + + // Write a CSV entry for each file processed (fileSize in KB) + private async writeCsvLogEntry(status: 'success' | 'failed' | 'skipped' | 'overwritten', folder: string, fileName: string, extension: string, fileSizeKB: number, errorDetail: string = '', contentVersionId: string = '') { + try { + // Escape CSV values to handle commas, quotes, and newlines + const escapeCsvValue = (value: string | number): string => { + const strValue = String(value); + if (strValue.includes(',') || strValue.includes('"') || strValue.includes('\n')) { + return `"${strValue.replace(/"/g, '""')}"`; + } + return strValue; + }; + + const csvLine = [ + escapeCsvValue(status), + escapeCsvValue(folder), + escapeCsvValue(fileName), + escapeCsvValue(extension), + escapeCsvValue(fileSizeKB), + escapeCsvValue(errorDetail), + escapeCsvValue(contentVersionId) + ].join(',') + '\n'; + + await fs.appendFile(this.logFile, csvLine, 'utf8'); + } catch (e) { + uxLog("warning", this.commandThis, c.yellow(`Error writing to CSV log: ${(e as Error).message}`)); + } } async processImport() { @@ -380,42 +1043,72 @@ export class FilesImporter { if (this.dtl === null) { this.dtl = await getFilesWorkspaceDetail(this.filesPath); } - uxLog(this.commandThis, c.cyan(`Importing files from ${c.green(this.dtl.full_label)} ...`)); - uxLog(this.commandThis, c.italic(c.grey(this.dtl.description))); + uxLog("action", this.commandThis, c.cyan(`Importing files from ${c.green(this.dtl.full_label)} ...`)); + uxLog("log", this.commandThis, c.italic(c.grey(this.dtl.description))); // Get folders and files const allRecordFolders = fs.readdirSync(this.exportedFilesFolder).filter((file) => { return fs.statSync(path.join(this.exportedFilesFolder, file)).isDirectory(); }); - let totalFilesNumber = 0; + + this.totalFolders = allRecordFolders.length; + + // Count total files for (const folder of allRecordFolders) { - totalFilesNumber += fs.readdirSync(path.join(this.exportedFilesFolder, folder)).length; + this.totalFiles += fs.readdirSync(path.join(this.exportedFilesFolder, folder)).length; } - await this.calculateApiConsumption(totalFilesNumber); + // Initialize API usage tracking with total file count + await this.calculateApiConsumption(this.totalFiles); + + // Initialize CSV logging + await this.initializeCsvLog(); + + // Start progress tracking + WebSocketClient.sendProgressStartMessage("Importing files", this.totalFiles); // Query parent objects to find Ids corresponding to field value used as folder name const parentObjectsRes = await bulkQuery(this.dtl.soqlQuery, this.conn); const parentObjects = parentObjectsRes.records; - let successNb = 0; - let errorNb = 0; + + let processedFiles = 0; for (const recordFolder of allRecordFolders) { - uxLog(this, c.grey(`Processing record ${recordFolder} ...`)); + uxLog("log", this.commandThis, c.grey(`Processing record ${recordFolder} ...`)); const recordFolderPath = path.join(this.exportedFilesFolder, recordFolder); + // List files in folder const files = fs.readdirSync(recordFolderPath).filter((file) => { return fs.statSync(path.join(this.exportedFilesFolder, recordFolder, file)).isFile(); }); + // Find Id of parent object using folder name - const parentRecordIds = parentObjects.filter((parentObj) => parentObj[this.dtl.outputFolderNameField] === recordFolder); + const parentRecordIds = parentObjects.filter( + (parentObj) => parentObj[this.dtl.outputFolderNameField] === recordFolder + ); + if (parentRecordIds.length === 0) { - uxLog(this, c.red(`Unable to find Id for ${this.dtl.outputFolderNameField}=${recordFolder}`)); + uxLog("error", this.commandThis, c.red(`Unable to find Id for ${this.dtl.outputFolderNameField}=${recordFolder}`)); + + // Log all files in this folder as skipped + for (const file of files) { + const { fileName, extension } = this.extractFileInfo(file, recordFolder); + const filePath = path.join(recordFolderPath, file); + const fileSizeKB = fs.existsSync(filePath) ? Math.round(fs.statSync(filePath).size / 1024) : 0; + + await this.writeCsvLogEntry('skipped', recordFolder, fileName, extension, fileSizeKB, 'Parent record not found', ''); + this.filesSkipped++; + processedFiles++; + + // Update progress + WebSocketClient.sendProgressStepMessage(processedFiles, this.totalFiles); + } continue; } + const parentRecordId = parentRecordIds[0].Id; - let existingDocuments = []; + let existingDocuments: any[] = []; // Collect existing documents if we handle file overwrite if (this.handleOverwrite) { const existingDocsQuery = `SELECT Id, ContentDocumentId, Title FROM ContentVersion WHERE FirstPublishLocationId = '${parentRecordId}'`; @@ -424,74 +1117,146 @@ export class FilesImporter { } for (const file of files) { - const fileData = fs.readFileSync(path.join(recordFolderPath, file)); - const contentVersionParams: any = { - Title: file, - PathOnClient: file, - VersionData: fileData.toString("base64"), - }; - const matchingExistingDocs = existingDocuments.filter((doc) => doc.Title === file); - if (matchingExistingDocs.length > 0) { - contentVersionParams.ContentDocumentId = matchingExistingDocs[0].ContentDocumentId; - uxLog(this, c.grey(`Overwriting file ${file} ...`)); - } else { - contentVersionParams.FirstPublishLocationId = parentRecordId; - uxLog(this, c.grey(`Uploading file ${file} ...`)); - } + const filePath = path.join(recordFolderPath, file); + const { fileName, extension } = this.extractFileInfo(file, recordFolder); + const fileSizeKB = fs.existsSync(filePath) ? Math.round(fs.statSync(filePath).size / 1024) : 0; + try { - const insertResult = await this.conn.sobject("ContentVersion").create(contentVersionParams); - if (!insertResult.success) { - uxLog(this, c.red(`Unable to upload file ${file}`)); - errorNb++; + const fileData = fs.readFileSync(filePath); + const contentVersionParams: any = { + Title: file, + PathOnClient: file, + VersionData: fileData.toString('base64'), + }; + + const matchingExistingDocs = existingDocuments.filter((doc) => doc.Title === file); + let isOverwrite = false; + + if (matchingExistingDocs.length > 0) { + contentVersionParams.ContentDocumentId = matchingExistingDocs[0].ContentDocumentId; + uxLog("log", this.commandThis, c.grey(`Overwriting file ${file} ...`)); + isOverwrite = true; + } else { + contentVersionParams.FirstPublishLocationId = parentRecordId; + uxLog("log", this.commandThis, c.grey(`Uploading file ${file} ...`)); + } + + const insertResult = await this.conn.sobject('ContentVersion').create(contentVersionParams); + + if (Array.isArray(insertResult) && insertResult.length === 0) { + uxLog("error", this.commandThis, c.red(`Unable to upload file ${file}`)); + await this.writeCsvLogEntry('failed', recordFolder, fileName, extension, fileSizeKB, 'Upload failed', ''); + this.filesErrors++; + } else if (Array.isArray(insertResult) && !insertResult[0].success) { + uxLog("error", this.commandThis, c.red(`Unable to upload file ${file}`)); + await this.writeCsvLogEntry('failed', recordFolder, fileName, extension, fileSizeKB, insertResult[0].errors?.join(', ') || 'Upload failed', ''); + this.filesErrors++; } else { - successNb++; + // Extract ContentVersion ID from successful insert result + const contentVersionId = Array.isArray(insertResult) && insertResult.length > 0 + ? insertResult[0].id + : (insertResult as any).id || ''; + + if (isOverwrite) { + uxLog("success", this.commandThis, c.grey(`Overwritten ${file}`)); + await this.writeCsvLogEntry('overwritten', recordFolder, fileName, extension, fileSizeKB, '', contentVersionId); + this.filesOverwritten++; + } else { + uxLog("success", this.commandThis, c.grey(`Uploaded ${file}`)); + await this.writeCsvLogEntry('success', recordFolder, fileName, extension, fileSizeKB, '', contentVersionId); + this.filesUploaded++; + } } } catch (e) { - uxLog(this, c.red(`Unable to upload file ${file}: ${e.message}`)); - errorNb++; + const errorDetail = (e as Error).message; + uxLog("error", this.commandThis, c.red(`Unable to upload file ${file}: ${errorDetail}`)); + await this.writeCsvLogEntry('failed', recordFolder, fileName, extension, fileSizeKB, errorDetail, ''); + this.filesErrors++; } + + processedFiles++; + // Update progress + WebSocketClient.sendProgressStepMessage(processedFiles, this.totalFiles); } } - uxLog(this, c.green(`Uploaded ${successNb} files`)); - if (errorNb > 0) { - uxLog(this, c.yellow(`Errors during the upload of ${successNb} files`)); - } - return { successNb: successNb, errorNb: errorNb }; + // End progress tracking + WebSocketClient.sendProgressEndMessage(this.totalFiles); + + // Build and return result + return await this.buildResult(); } - // Calculate API consumption + // Build stats & result + private async buildResult() { + // Get final API usage from the limits manager + const finalUsage = await this.apiLimitsManager.getFinalUsage(); + + const result = { + stats: { + filesUploaded: this.filesUploaded, + filesOverwritten: this.filesOverwritten, + filesErrors: this.filesErrors, + filesSkipped: this.filesSkipped, + totalFolders: this.totalFolders, + totalFiles: this.totalFiles, + restApiUsedBefore: finalUsage.restUsed, + restApiUsedAfter: finalUsage.restUsed, + restApiLimit: finalUsage.restLimit, + restApiCallsRemaining: finalUsage.restRemaining, + bulkApiUsedBefore: finalUsage.bulkUsed, + bulkApiUsedAfter: finalUsage.bulkUsed, + bulkApiLimit: finalUsage.bulkLimit, + bulkApiCallsRemaining: finalUsage.bulkRemaining, + }, + logFile: this.logFile + }; + await createXlsxFromCsv(this.logFile, { fileTitle: "Imported files report" }, result); + return result; + } + + // Calculate API consumption using the optimized ApiLimitsManager private async calculateApiConsumption(totalFilesNumber) { + // Initialize the API limits manager + await this.apiLimitsManager.initialize(); + const bulkCallsNb = 1; if (this.handleOverwrite) { totalFilesNumber = totalFilesNumber * 2; } + // Check if there are enough API calls available // Request user confirmation if (!isCI) { const warningMessage = c.cyanBright( `Files import consumes one REST API call per uploaded file. - (Estimation: ${bulkCallsNb} Bulks calls and ${totalFilesNumber} REST calls) Do you confirm you want to proceed ?`, + (Estimation: ${bulkCallsNb} Bulks calls and ${totalFilesNumber} REST calls) Do you confirm you want to proceed ?` ); - const promptRes = await prompts({ type: "confirm", message: warningMessage }); + const promptRes = await prompts({ + type: 'confirm', + message: warningMessage, + description: 'Confirm file import operation which will consume API calls' + }); if (promptRes.value !== true) { - throw new SfdxError("Command cancelled by user"); + throw new SfError('Command cancelled by user'); } } } } -export async function selectFilesWorkspace(opts = { selectFilesLabel: "Please select a files folder to export" }) { +export async function selectFilesWorkspace(opts = { selectFilesLabel: 'Please select a files folder to export' }) { if (!fs.existsSync(filesFolderRoot)) { - throw new SfdxError("There is no files root folder 'scripts/files' in your workspace. Create it and define a files export configuration"); + throw new SfError( + "There is no files root folder 'scripts/files' in your workspace. Create it and define a files export configuration" + ); } const filesFolders = fs .readdirSync(filesFolderRoot, { withFileTypes: true }) .filter((dirent) => dirent.isDirectory()) - .map((dirent) => path.join(".", "scripts", "files", dirent.name)); + .map((dirent) => path.join('.', 'scripts', 'files', dirent.name)); if (filesFolders.length === 0) { - throw new SfdxError("There is no file exports folder in your workspace"); + throw new SfError('There is no file exports folder in your workspace'); } const choices: any = []; for (const filesFolder of filesFolders) { @@ -505,32 +1270,42 @@ export async function selectFilesWorkspace(opts = { selectFilesLabel: "Please se } } const filesDirResult = await prompts({ - type: "select", - name: "value", + type: 'select', + name: 'value', message: c.cyanBright(opts.selectFilesLabel), + description: 'Select the files workspace configuration to use for this operation', choices: choices, }); return filesDirResult.value; } export async function getFilesWorkspaceDetail(filesWorkspace: string) { - const exportFile = path.join(filesWorkspace, "export.json"); + const exportFile = path.join(filesWorkspace, 'export.json'); if (!fs.existsSync(exportFile)) { - uxLog(this, c.yellow(`Your File export folder ${c.bold(filesWorkspace)} must contain an ${c.bold("export.json")} configuration file`)); + uxLog( + "warning", + this, + c.yellow( + `Your File export folder ${c.bold(filesWorkspace)} must contain an ${c.bold('export.json')} configuration file` + ) + ); return null; } - const exportFileJson = JSON.parse(await fs.readFile(exportFile, "utf8")); - const folderName = filesWorkspace.replace(/\\/g, "/").match(/([^/]*)\/*$/)[1]; + const exportFileJson = JSON.parse(await fs.readFile(exportFile, 'utf8')); + const folderName = (filesWorkspace.replace(/\\/g, '/').match(/([^/]*)\/*$/) || '')[1]; const hardisLabel = exportFileJson.sfdxHardisLabel || folderName; const hardisDescription = exportFileJson.sfdxHardisDescription || filesWorkspace; - const soqlQuery = exportFileJson.soqlQuery || ""; - const fileTypes = exportFileJson.fileTypes || "all"; - const outputFolderNameField = exportFileJson.outputFolderNameField || "Name"; - const outputFileNameFormat = exportFileJson.outputFileNameFormat || "title"; - const overwriteParentRecords = exportFileJson.overwriteParentRecords === false ? false : exportFileJson.overwriteParentRecords || true; + const soqlQuery = exportFileJson.soqlQuery || ''; + const fileTypes = exportFileJson.fileTypes || 'all'; + const outputFolderNameField = exportFileJson.outputFolderNameField || 'Name'; + const outputFileNameFormat = exportFileJson.outputFileNameFormat || 'title'; + const overwriteParentRecords = + exportFileJson.overwriteParentRecords === false ? false : exportFileJson.overwriteParentRecords || true; const overwriteFiles = exportFileJson.overwriteFiles || false; + const fileSizeMin = exportFileJson.fileSizeMin || 0; return { - full_label: `[${folderName}]${folderName != hardisLabel ? `: ${hardisLabel}` : ""}`, + full_label: `[${folderName}]${folderName != hardisLabel ? `: ${hardisLabel}` : ''}`, + name: folderName, label: hardisLabel, description: hardisDescription, soqlQuery: soqlQuery, @@ -539,73 +1314,98 @@ export async function getFilesWorkspaceDetail(filesWorkspace: string) { outputFileNameFormat: outputFileNameFormat, overwriteParentRecords: overwriteParentRecords, overwriteFiles: overwriteFiles, + fileSizeMin: fileSizeMin, }; } export async function promptFilesExportConfiguration(filesExportConfig: any, override = false) { - const questions = []; + const questions: any[] = []; if (override === false) { questions.push( ...[ { - type: "text", - name: "filesExportPath", - message: c.cyanBright('Please input the files export config folder name (PascalCase format). Ex: "OpportunitiesPDF"'), + type: 'text', + name: 'filesExportPath', + message: c.cyanBright( + 'Please input the files export config folder name (PascalCase format)' + ), + description: 'The folder name that will be created to store the export configuration and downloaded files', + placeholder: 'Ex: OpportunitiesPDF', }, { - type: "text", - name: "sfdxHardisLabel", - message: c.cyanBright("Please input a label for the files export configuration"), + type: 'text', + name: 'sfdxHardisLabel', + message: c.cyanBright('Please input a label for the files export configuration'), + description: 'A human-readable label that will identify this export configuration', initial: filesExportConfig.sfdxHardisLabel, }, { - type: "text", - name: "sfdxHardisDescription", - message: c.cyanBright("Please input a description of the files export configuration"), + type: 'text', + name: 'sfdxHardisDescription', + message: c.cyanBright('Please input a description of the files export configuration'), + description: 'A detailed description explaining what this export configuration does', initial: filesExportConfig.sfdxHardisDescription, }, - ], + ] ); } questions.push( ...[ { - type: "text", - name: "soqlQuery", - message: "Please input the main SOQL Query to fetch the parent records of files (ContentVersions). Ex: SELECT Id,Name from Opportunity", + type: 'text', + name: 'soqlQuery', + message: + 'Please input the main SOQL Query to fetch the parent records of files (ContentVersions)', + description: 'SOQL query that retrieves the parent records to which files are attached', + placeholder: 'Ex: SELECT Id,Name from Opportunity', initial: filesExportConfig.soqlQuery, }, { - type: "text", - name: "outputFolderNameField", - message: "Please input the field to use to build the name of the folder containing downloaded files", + type: 'text', + name: 'outputFolderNameField', + message: 'Please input the field to use to build the name of the folder containing downloaded files', + description: 'Field name from the SOQL query result that will be used as folder name for organizing files', + placeholder: 'Ex: Name', initial: filesExportConfig.outputFolderNameField, }, { - type: "select", - name: "outputFileNameFormat", + type: 'select', + name: 'outputFileNameFormat', choices: [ - { value: "title", title: "title" }, - { value: "title_id", title: "title_id" }, - { value: "id_title", title: "id_title" }, - { value: "id", title: "id" }, + { value: 'title', title: 'title (ex: "Cloudity New Project")' }, + { value: 'title_id', title: 'title_id (ex: "Cloudity New Project_006bR00000Bet7WQAR")' }, + { value: 'id_title', title: 'id_title (ex: "006bR00000Bet7WQAR_Cloudity New Project")' }, + { value: 'id', title: 'id (ex: "006bR00000Bet7WQAR")' }, ], - message: "Please select the format of output files names", + message: 'Please select the format of output files names', + description: 'Choose how downloaded file names should be formatted', initial: filesExportConfig.outputFileNameFormat, }, { - type: "confirm", - name: "overwriteParentRecords", - message: "Do you want to try to download files attached to a parent records whose folder is already existing in local folders ?", + type: 'confirm', + name: 'overwriteParentRecords', + message: + 'Do you want to try to download files attached to a parent records whose folder is already existing in local folders ?', + description: 'Allow downloading files for records that already have a local folder', initial: filesExportConfig.overwriteParentRecords, }, { - type: "confirm", - name: "overwriteFiles", - message: "Do you want to overwrite file that has already been previously downloaded ?", + type: 'confirm', + name: 'overwriteFiles', + message: 'Do you want to overwrite file that has already been previously downloaded ?', + description: 'Replace existing local files with newly downloaded versions', initial: filesExportConfig.overwriteFiles, }, - ], + { + type: 'number', + name: 'fileSizeMin', + message: 'Please input the minimum file size in KB (0 = no minimum)', + description: 'Only files with size greater than or equal to this value will be downloaded (in kilobytes)', + placeholder: 'Ex: 10', + initial: filesExportConfig.fileSizeMin || 0, + min: 0, + }, + ] ); const resp = await prompts(questions); @@ -618,6 +1418,7 @@ export async function promptFilesExportConfiguration(filesExportConfig: any, ove outputFileNameFormat: resp.outputFileNameFormat, overwriteParentRecords: resp.overwriteParentRecords, overwriteFiles: resp.overwriteFiles, + fileSizeMin: resp.fileSizeMin, }); return filesConfig; } @@ -628,16 +1429,16 @@ export async function countLinesInFile(file: string) { return await new Promise((resolve) => { fs.createReadStream(file) .pipe(split()) - .on("data", () => { + .on('data', () => { lineCount++; }) - .on("end", () => { + .on('end', () => { if (readError) { return; } resolve(lineCount - 1); }) - .on("error", (error) => { + .on('error', (error) => { readError = true; resolve(error); }); @@ -652,13 +1453,26 @@ export async function countLinesInFile(file: string) { * It then joins the report directory, file name prefix, and branch name to form the full path of the report. * * @param {string} fileNamePrefix - The prefix for the file name. + * @param {string} outputFile - The output file path. If null, a new path is generated. + * @param {Object} [options] - Additional options for generating the report path. + * @param {boolean} [options.withDate=false] - Whether to append a timestamp to the file name. * @returns {Promise} - A Promise that resolves to the full path of the report. */ -export async function generateReportPath(fileNamePrefix: string, outputFile: string): Promise { +export async function generateReportPath(fileNamePrefix: string, outputFile: string, options: { withDate: boolean } = { withDate: false }): Promise { if (outputFile == null) { const reportDir = await getReportDirectory(); - const branchName = process.env.CI_COMMIT_REF_NAME || (await getCurrentGitBranch({ formatted: true })) || "Missing CI_COMMIT_REF_NAME variable"; - return path.join(reportDir, `${fileNamePrefix}-${branchName.split("/").pop()}.csv`); + const branchName = + (!isGitRepo()) ? 'no-git' : + process.env.CI_COMMIT_REF_NAME || + (await getCurrentGitBranch({ formatted: true })) || + 'branch-not-found'; + let newOutputFile = path.join(reportDir, `${fileNamePrefix}-${branchName.split('/').pop()}.csv`); + if (options.withDate) { + // Add date time info + const date = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').split('.')[0]; + newOutputFile = path.join(reportDir, `${fileNamePrefix}-${branchName.split('/').pop()}-${date}.csv`); + } + return newOutputFile; } else { await fs.ensureDir(path.dirname(outputFile)); return outputFile; @@ -674,46 +1488,71 @@ export async function generateReportPath(fileNamePrefix: string, outputFile: str * @param {string} outputPath - The path where the CSV file will be written. * @returns {Promise} - A Promise that resolves when the operation is complete. */ -export async function generateCsvFile(data: any[], outputPath: string): Promise { +export async function generateCsvFile( + data: any[], + outputPath: string, + options: { fileTitle?: string, csvFileTitle?: string, xlsFileTitle?: string, noExcel?: boolean } +): Promise { const result: any = {}; try { const csvContent = Papa.unparse(data); - await fs.writeFile(outputPath, csvContent, "utf8"); - uxLog(this, c.italic(c.cyan(`Please see detailed CSV log in ${c.bold(outputPath)}`))); + await fs.writeFile(outputPath, csvContent, 'utf8'); + uxLog("action", this, c.cyan(c.italic(`Please see detailed CSV log in ${c.bold(outputPath)}`))); result.csvFile = outputPath; - WebSocketClient.requestOpenFile(outputPath); - if (data.length > 0) { - try { - // Generate mirror XSLX file - const xlsDirName = path.join(path.dirname(outputPath), "xls"); - const xslFileName = path.basename(outputPath).replace(".csv", ".xlsx"); - const xslxFile = path.join(xlsDirName, xslFileName); - await fs.ensureDir(xlsDirName); - await csvToXls(outputPath, xslxFile); - uxLog(this, c.italic(c.cyan(`Please see detailed XSLX log in ${c.bold(xslxFile)}`))); - result.xlsxFile = xslxFile; - } catch (e2) { - uxLog(this, c.yellow("Error while generating XSLX log file:\n" + e2.message + "\n" + e2.stack)); - } + if (!WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.requestOpenFile(outputPath); + } + const csvFileTitle = options?.fileTitle ? `${options.fileTitle} (CSV)` : options?.csvFileTitle ?? "Report (CSV)"; + WebSocketClient.sendReportFileMessage(outputPath, csvFileTitle, "report"); + if (data.length > 0 && !options?.noExcel) { + await createXlsxFromCsv(outputPath, options, result); } else { - uxLog(this, c.grey(`No XLS file generated as ${outputPath} is empty`)); + uxLog("other", this, c.grey(`No XLS file generated as ${outputPath} is empty`)); } } catch (e) { - uxLog(this, c.yellow("Error while generating CSV log file:\n" + e.message + "\n" + e.stack)); + uxLog("warning", this, c.yellow('Error while generating CSV log file:\n' + (e as Error).message + '\n' + (e as Error).stack)); } return result; } +async function createXlsxFromCsv(outputPath: string, options: { fileTitle?: string; csvFileTitle?: string; xlsFileTitle?: string; noExcel?: boolean; }, result: any) { + try { + const xlsDirName = path.join(path.dirname(outputPath), 'xls'); + const xslFileName = path.basename(outputPath).replace('.csv', '.xlsx'); + const xslxFile = path.join(xlsDirName, xslFileName); + await fs.ensureDir(xlsDirName); + await csvToXls(outputPath, xslxFile); + uxLog("action", this, c.cyan(c.italic(`Please see detailed XLSX log in ${c.bold(xslxFile)}`))); + const xlsFileTitle = options?.fileTitle ? `${options.fileTitle} (XLSX)` : options?.xlsFileTitle ?? "Report (XLSX)"; + WebSocketClient.sendReportFileMessage(xslxFile, xlsFileTitle, "report"); + result.xlsxFile = xslxFile; + if (!isCI && !(process.env.NO_OPEN === 'true') && !WebSocketClient.isAliveWithLwcUI()) { + try { + uxLog("other", this, c.italic(c.grey(`Opening XLSX file ${c.bold(xslxFile)}... (define NO_OPEN=true to disable this)`))); + await open(xslxFile, { wait: false }); + } catch (e) { + uxLog("warning", this, c.yellow('Error while opening XLSX file:\n' + (e as Error).message + '\n' + (e as Error).stack)); + } + } + } catch (e2) { + uxLog( + "warning", + this, + c.yellow('Error while generating XLSX log file:\n' + (e2 as Error).message + '\n' + (e2 as Error).stack) + ); + } +} + async function csvToXls(csvFile: string, xslxFile: string) { const workbook = new ExcelJS.Workbook(); const worksheet = await workbook.csv.readFile(csvFile); // Set filters - worksheet.autoFilter = "A1:Z1"; + worksheet.autoFilter = 'A1:Z1'; // Adjust column size (only if the file is not too big, to avoid performances issues) if (worksheet.rowCount < 5000) { worksheet.columns.forEach((column) => { - const lengths = column.values.map((v) => v.toString().length); - const maxLength = Math.max(...lengths.filter((v) => typeof v === "number")); + const lengths = (column.values || []).map((v) => (v || '').toString().length); + const maxLength = Math.max(...lengths.filter((v) => typeof v === 'number')); column.width = maxLength; }); } diff --git a/src/common/utils/flowVisualiser/flowParser.ts b/src/common/utils/flowVisualiser/flowParser.ts new file mode 100644 index 000000000..f7adbec42 --- /dev/null +++ b/src/common/utils/flowVisualiser/flowParser.ts @@ -0,0 +1,434 @@ +import { NODE_CONFIG } from "./renderConfig.js"; + +import { XMLParser } from "fast-xml-parser"; +import { CONSTANTS } from "../../../config/index.js"; +import { getCurrentGitBranch } from "../index.js"; +import farmhash from 'farmhash'; +import { buildCustomMarkdownTable, buildGenericMarkdownTable, flowNodeToMarkdown, handleFilterItems, handleInputParameters, handleprocessMetadataValues, handleSchedule, handleScheduledPaths, mdEndSection, simplifyNode, stringifyValue } from "./nodeFormatUtils.js"; + +interface FlowMap { + "description"?: string; + "label"?: string; + "processType"?: string; // TODO + "start"?: any; + "status"?: "Active" | "Draft"; + [propName: string]: any; +} + +const FLOW_NODE_TYPES = [ + 'actionCalls', + 'assignments', + 'customErrors', + 'collectionProcessors', + 'decisions', + 'loops', + 'recordCreates', + 'recordDeletes', + 'recordLookups', + 'recordUpdates', + 'screens', + 'subflows', + 'transforms' +]; + +export async function parseFlow(xml: string, renderAs: "mermaid" | "plantuml" = "mermaid", options: any = {}): Promise<{ flowMap: FlowMap, uml: string }> { + try { + const flowObj = new XMLParser().parse(xml).Flow; + const flowMap = await createFlowMap(flowObj); + if (Object.keys(flowMap).length === 0) { + throw new Error("no-renderable-content-found"); + } + if (renderAs === "mermaid") { + return { + flowMap: flowMap, + uml: await generateMermaidContent(flowMap, flowObj, options) + }; + } + throw new Error("unknown-renderAs-" + renderAs); + } catch (error) { + console.error("salesforce-flow-visualiser", error); + throw (error); + } +} + +/*=================================================================== + * P R I V A T E + *=================================================================*/ +async function createFlowMap(flowObj: any): Promise { + const flowMap: FlowMap = {}; + for (const property in flowObj) { + // Common first descriptive elements + if (['description', 'environments', 'formulas', 'interviewLabel', 'label', 'processType', 'status', 'textTemplates'].includes(property)) { + flowMap[property] = flowObj[property]; + if (property === "formulas" && !Array.isArray(flowObj[property])) { + flowMap["formulas"] = [flowObj["formulas"]]; + } + } + // Start element + else if (property === 'start') { + flowMap[property] = flowObj[property]; + flowMap[property].type = property; + flowMap[property].nextNode = flowObj[property].connector?.targetReference; + flowMap[property].scheduledPaths = (!flowMap[property].scheduledPaths) ? [] : (flowMap[property].scheduledPaths.length) ? flowMap[property].scheduledPaths : [flowMap[property].scheduledPaths]; + flowMap[property].flowNodeDescription = flowObj[property] + } + else { + // If only one entry (e.g one loop) then it will be an object, not an Array, so make it an Array of one + if (!flowObj[property].length) { + flowObj[property] = [flowObj[property]] + } + // Loop through array and create an mapped entry for each + for (const el of flowObj[property]) { + if (el.name) { + let nextNode; + let tmpRules; + switch (property) { + case 'decisions': + nextNode = (el.defaultConnector) ? el.defaultConnector.targetReference : "END"; + tmpRules = (el.rules.length) ? el.rules : [el.rules]; + el.rules2 = tmpRules.map((ruleEl: any) => { + return { + name: ruleEl.name, + label: ruleEl.label, + nextNode: ruleEl.connector, + nextNodeLabel: el.defaultConnectorLabel, + } + }); + break; + case 'loops': + nextNode = (el.noMoreValuesConnector) ? el.noMoreValuesConnector.targetReference : "END"; + break; + default: + if (el.connector) { + nextNode = el.connector.targetReference; + } + break; + } + + if ((NODE_CONFIG)[property]) { + const mappedEl = { + name: el.name, + label: el.label, + type: property, + nextNode: nextNode, + faultPath: el.faultConnector?.targetReference, + nextNodeLabel: el.defaultConnectorLabel, + nextValueConnector: (el.nextValueConnector) ? + el.nextValueConnector.targetReference : null, + rules: el.rules2, + elementSubtype: el.elementSubtype, + actionType: el.actionType, + flowNodeDescription: el + } + flowMap[el.name] = mappedEl; + flowMap[el.name].flowNodeDescription.type = property; + } else if (property === 'variables') { + flowMap.variables = flowObj[property]; + } else if (property === 'constants') { + flowMap.constants = flowObj[property]; + } + } + } + } + } + return (flowMap); +} + +function getFlowType(flowMap: FlowMap): string { + if (flowMap.processType === 'Flow') { + return "Screen Flow"; + } + // Avoid crash if flowMap.start is not set + else if (!flowMap.start) { + return flowMap.processType || "ERROR: no processType"; + } + else { + switch (flowMap.start.triggerType) { + case "Scheduled": + return "Scheduled Flow" + case "RecordAfterSave": + return "Record Triggered Flow: After Save (" + flowMap.start.object + ")"; + case "RecordBeforeSave": + return "Record Triggered Flow: Before Save (" + flowMap.start.object + ")"; + case "PlatformEvent": + return "Platform Event Triggered flow (" + flowMap.start.object + ")"; + default: + return flowMap.processType || "ERROR: no processType"; + } + } +} + +/*=================================================================== + * M E R M A I D + *=================================================================*/ +async function generateMermaidContent(flowMap: FlowMap, flowObj: any, options: any): Promise { + // console.log("options", options) + const flowType = getFlowType(flowMap); + const title = `# ${flowMap['label']}\n\n`; + const { generalInfoMd, startFingerPrint, startNodeLabel, startElementReference } = getGeneralInfoMd(flowObj, flowMap); + const variables = getVariablesMd(flowMap.variables || []); + const constants = getConstantsMd(flowMap.constants || []); + const formulas = getFormulasMd(flowMap.formulas || []); + const textTemplates = getTemplatesMd(flowMap.textTemplates || []); + const mdStart = "## Flow Diagram\n\n```mermaid\n"; + const { nodeDefStr, nodeDetailMd } = await getNodeDefStr(flowMap, flowType, startFingerPrint, startNodeLabel, startElementReference, options); + const mdClasses = getMermaidClasses() + "\n\n"; + const mdBody = await getMermaidBody(flowMap) + "\n\n"; + const mdEnd = "```\n\n\n\n"; + const currentBranch = await getCurrentGitBranch(); + const footer = `\n\n___\n\n_Documentation generated from branch ${currentBranch} by [sfdx-hardis](${CONSTANTS.DOC_URL_ROOT}), featuring [salesforce-flow-visualiser](https://github.com/toddhalfpenny/salesforce-flow-visualiser)_`; + const mdDiagram = + "%% If you read this, your Markdown visualizer does not handle MermaidJS syntax.\n" + + "%% - If you are in VsCode, install extension `Markdown Preview Mermaid Support` at https://marketplace.visualstudio.com/items?itemName=bierner.markdown-mermaid\n" + + "%% - If you are using sfdx-hardis, try to define env variable `MERMAID_MODES=cli,docker` ,then run again the command to regenerate markdown with SVG images.\n" + + "%% - If you are within mkdocs-material, define mermaid plugin in `mkdocs.yml` as described in https://squidfunk.github.io/mkdocs-material/extensions/mermaid/\n" + + "%% - At last resort, you can copy-paste this MermaidJS code in https://mermaid.live/ to see the Flow Diagram\n\n" + + + "flowchart TB\n" + + nodeDefStr + + mdBody + + mdClasses + if (options.wrapInMarkdown === false) { + return (mdDiagram); + } else { + return (title + mdStart + mdDiagram + mdEnd + generalInfoMd + variables + formulas + constants + textTemplates + nodeDetailMd + footer); + } +} + +async function getMermaidBody(flowMap: FlowMap): Promise { + let bodyStr = ""; + const endNodeIds: string[] = []; + for (const property in flowMap) { + const node = flowMap[property]; + const type = node.type; + let nextNode = node.nextNode ? node.nextNode : "END" + if (nextNode === "END") { + nextNode = "END_" + node.name; + } + let faultNode = node.faultPath ? node.faultPath : "END" + if (faultNode === "END") { + faultNode = "END_" + node.name; + } + let loopNextNode; + switch (type) { + case 'actionCalls': + case 'assignments': + case 'collectionProcessors': + case 'customErrors': + case 'recordCreates': + case 'recordDeletes': + case 'recordLookups': + case 'recordUpdates': + case 'screens': + case 'transforms': + bodyStr += node.name + " --> " + nextNode + "\n"; + manageAddEndNode(nextNode, endNodeIds); + if (node.faultPath) { + bodyStr += node.name + " -. Fault .->" + faultNode + "\n"; + manageAddEndNode(faultNode, endNodeIds); + } + break; + case 'start': + if (!nextNode.startsWith("END")) { + // 'start' may not have a default path + const defaultPathLabel = (node.scheduledPaths.length > 0) ? '|"Run Immediately"|' : ""; + bodyStr += "START --> " + defaultPathLabel + " " + nextNode + "\n"; + } + // scheduled paths + for (const path of node.scheduledPaths) { + path.label = (path.label) ? path.label : 'Run Immediately'; + if (path?.connector?.targetReference) { + bodyStr += 'START --> |"' + path.label + '"| ' + path.connector.targetReference + "\n"; + } + else if (nextNode) { + bodyStr += 'START --> |"' + path.label + '"| ' + nextNode + "\n"; + } + } + + break; + case 'decisions': + // rules + for (const rule of node.rules) { + if (rule.nextNode?.targetReference) { + bodyStr += node.name + ' --> |"' + rule.label + '"| ' + rule.nextNode.targetReference + "\n"; + } + } + + // default + bodyStr += node.name + ' --> |"' + node.nextNodeLabel + '"| ' + nextNode + "\n"; + manageAddEndNode(nextNode, endNodeIds); + break; + case 'loops': + loopNextNode = node.nextValueConnector; + bodyStr += node.name + ' --> |"For Each"|' + loopNextNode + "\n"; + bodyStr += node.name + ' ---> |"After Last"|' + node.nextNode + "\n"; + break; + case 'subflows': + bodyStr += node.name + " --> " + nextNode + "\n"; + manageAddEndNode(nextNode, endNodeIds); + } + } + for (const endNodeId of [...new Set(endNodeIds)]) { + bodyStr += `${endNodeId}(( END )):::endClass\n`; + } + return (bodyStr); +} + +function manageAddEndNode(nextOrFaultNode: string, endNodeIds: string[]) { + if (nextOrFaultNode.startsWith("END")) { + endNodeIds.push(nextOrFaultNode); + } +} + +async function getNodeDefStr(flowMap: FlowMap, flowType: string, startFingerPrint: number, startNodeLabel: string, startElementReference: string, options: any): Promise { + let nodeDetailMd = "## Flow Nodes Details\n\n" + if (options?.collapsedDetails) { + nodeDetailMd += "
NODES CONTENT (expand to view)\n\n" + } + let nodeDefStr = ""; + if (!["InvocableProcess", "Workflow"].includes(flowType) || (startNodeLabel !== 'START')) { + nodeDefStr += `START(["${startNodeLabel}"]):::startClass\n` + nodeDefStr += `click START "#general-information" "${startFingerPrint}"\n\n`; + if (startElementReference) { + nodeDefStr += `START --> ${startElementReference}\n` + } + } + const allproperties = Object.keys(flowMap); + for (const property of allproperties) { + const type = flowMap?.[property]?.type; + let label: string = ((NODE_CONFIG)[type]) ? (NODE_CONFIG)[type].label : ""; + let icon: string = ((NODE_CONFIG)[type]) ? (NODE_CONFIG)[type].mermaidIcon : null; + let nodeSimplified; + let tooltipClassMermaid; + if (type === 'actionCalls') { + icon = ((NODE_CONFIG)[type].mermaidIcon[flowMap[property].actionType]) ? + (NODE_CONFIG)[type].mermaidIcon[flowMap[property].actionType] : + (NODE_CONFIG)[type].mermaidIcon.submit; + } + else if (type === 'collectionProcessors') { + icon = ((NODE_CONFIG)[type].mermaidIcon[flowMap[property].elementSubtype]) ? + (NODE_CONFIG)[type].mermaidIcon[flowMap[property].elementSubtype] : + (NODE_CONFIG)[type].mermaidIcon.submit; + + label = ((NODE_CONFIG)[type].label[flowMap[property].elementSubtype]) ? + (NODE_CONFIG)[type].label[flowMap[property].elementSubtype] : + (NODE_CONFIG)[type].label; + } + // Create Mermaid Lines + if (FLOW_NODE_TYPES.includes(type)) { + // Mermaid node + nodeDefStr += property + (NODE_CONFIG)[type].mermaidOpen + '"' + icon + " " + label + "
" + flowMap[property].label + '"' + (NODE_CONFIG)[type].mermaidClose + ':::' + type + "\n" + // Remove not relevant properties from node display + nodeSimplified = simplifyNode(flowMap[property]?.flowNodeDescription || flowMap[property]); + // Mermaid compare node + tooltipClassMermaid = `click ${property} "#${property.toLowerCase()}" "${farmhash.fingerprint32(JSON.stringify(nodeSimplified))}"`; + nodeDefStr += tooltipClassMermaid + "\n\n" + // Markdown details + nodeDetailMd += `### ${property}\n\n` + flowNodeToMarkdown(nodeSimplified, allproperties); + } + } + if (options?.collapsedDetails) { + nodeDetailMd += "
\n\n" + } + return { + nodeDefStr: nodeDefStr, + nodeDetailMd: nodeDetailMd + "\n\n" + }; +} + +function getGeneralInfoMd(flowObj: any, flowMap: FlowMap) { + let flowObjCopy = Object.assign({}, flowObj); + // Remove sections that are somewhere else + for (const nodeKey of [...["constants", "formulas", "variables"], ...FLOW_NODE_TYPES]) { + delete flowObjCopy[nodeKey]; + } + const metadataValue = handleprocessMetadataValues(flowObjCopy, Object.keys(flowMap)); + // Remove nodes that will be processed after + for (const nodeKey of Object.keys(flowObjCopy)) { + if (typeof flowObjCopy?.[nodeKey] === "object" && flowObjCopy?.[nodeKey]?.name !== 'null') { + delete flowObjCopy[nodeKey]; + } + } + const startFingerPrint = farmhash.fingerprint32(JSON.stringify(flowObjCopy)); + handleInputParameters(flowObjCopy, Object.keys(flowMap)); + let startNodeLabel = "START"; + let detailTablesMd = "" + let startElementReference = "" + if (flowObj.start) { + const startObjCopy = simplifyNode(Object.assign({}, flowObj.start.flowNodeDescription || flowObj.start)); + delete startObjCopy.flowNodeDescription; + flowObjCopy = Object.assign({}, startObjCopy, flowObjCopy); + delete flowObjCopy.start + delete flowObjCopy.type + detailTablesMd += handleSchedule(flowObjCopy, Object.keys(flowMap)); + detailTablesMd += handleScheduledPaths(flowObjCopy, Object.keys(flowMap)); + detailTablesMd += handleFilterItems(flowObjCopy, Object.keys(flowMap)); + if (flowObjCopy.processType === "Flow") { + startNodeLabel = "START
" + "Screen Flow" + } + else if (flowObjCopy.processType === "AutoLaunchedFlow") { + startNodeLabel = "START
" + "AutoLaunched Flow
" + + (flowObjCopy.triggerType ? "Type: " + stringifyValue(flowObjCopy.triggerType, "triggerType", Object.keys(flowObjCopy)) + "
" : '') + } + else { + startNodeLabel = "START
" + + (flowObjCopy.object ? "" + stringifyValue(flowObjCopy.object, "object", Object.keys(flowObjCopy)) + "
" : '') + + (flowObjCopy.triggerType ? "Type: " + stringifyValue(flowObjCopy.triggerType, "triggerType", Object.keys(flowObjCopy)) + "
" : '') + + (flowObjCopy.recordTriggerType ? "On: " + stringifyValue(flowObjCopy.recordTriggerType, "recordTriggerType", Object.keys(flowObjCopy)) + "
" : ''); + } + } + else if (metadataValue && metadataValue.TriggerType && metadataValue.ObjectType) { + startNodeLabel = "START
" + "" + metadataValue.ObjectType + "
" + + (metadataValue.TriggerType ? "Type: " + stringifyValue(metadataValue.TriggerType, "triggerType", Object.keys(flowObjCopy)) + "
" : '') + startElementReference = flowObj.startElementReference; + } + else if (flowObj.startElementReference) { + startElementReference = flowObj.startElementReference; + } + const generalInfoMd = mdEndSection(buildGenericMarkdownTable(flowObjCopy, ["allFields"], "## General Information", Object.keys(flowMap)) + detailTablesMd); + if (startNodeLabel.endsWith("
")) { + startNodeLabel = startNodeLabel.slice(0, -5); + } + return { + generalInfoMd: generalInfoMd, + startNodeLabel: startNodeLabel, + startFingerPrint: startFingerPrint, + startElementReference: startElementReference + } +} + +function getVariablesMd(vars: any[]): string { + if (vars && vars.length > 0) { + return mdEndSection(buildCustomMarkdownTable(vars, ["name", "dataType", "isCollection", "isInput", "isOutput", "objectType", "description"], "## Variables", [])); + } + return ""; +} + +function getConstantsMd(constants: any[]): string { + if (constants && constants.length > 0) { + return mdEndSection(buildCustomMarkdownTable(constants, ["name", "dataType", "value", "description"], "## Constants", [])); + } + return ""; +} + +function getFormulasMd(formulas: any[]): string { + if (formulas && formulas.length > 0) { + return mdEndSection(buildCustomMarkdownTable(formulas, ["name", "dataType", "expression", "description"], "## Formulas", [])); + } + return ""; +} + +function getTemplatesMd(textTemplates: any[]): string { + if (textTemplates && textTemplates.length > 0) { + return mdEndSection(buildCustomMarkdownTable(textTemplates, ["name", "text", "description"], "## Text Templates", [])); + } + return ""; +} + +function getMermaidClasses(): string { + let classStr = ""; + for (const property in NODE_CONFIG) { + classStr += "classDef " + property + " fill:" + (NODE_CONFIG)[property].background + ",color:" + (NODE_CONFIG)[property].color + ",text-decoration:none,max-height:100px\n"; + } + return classStr; +} diff --git a/src/common/utils/flowVisualiser/nodeFormatUtils.ts b/src/common/utils/flowVisualiser/nodeFormatUtils.ts new file mode 100644 index 000000000..e62d557eb --- /dev/null +++ b/src/common/utils/flowVisualiser/nodeFormatUtils.ts @@ -0,0 +1,398 @@ +import * as yaml from 'js-yaml'; +import moment from 'moment'; +import * as fs from 'fs'; +import path from 'path'; + +const FIELDS_WITH_VALUES_TO_FORMAT = [ + "actionType", + "fieldType", + "inputsOnNextNavToAssocScrn", + "processType", + "recordTriggerType", + "triggerType", + "regionContainerType", + "runInMode", + "type" +]; + +const FIELDS_WITH_VALUES_TO_FORMAT_ENUM = { + "status": { + "Draft": "⚠️ Draft", + "Inactive": "⚠️ Inactive", + "InvalidDraft": "⚠️ Invalid Draft" + } +} + +const FIELDS_PREFERRED_ORDER_START = [ + "type", + "object", + "processType", + "triggerType", + "recordTriggerType", + "label", + "status", + "actionType", + "actionName", + "dataType", + "objectType" +]; + +const FIELDS_PREFERRED_ORDER_END = [ + "connector", + "nextNode", + "noMoreValuesConnector", + "conditionLogic", + "filterLogic", + +]; + +const FIELDS_WITH_COLUMN_CENTERED = [ + "dataType", + "objectType", + "operator", + "isCollection", + "isInput", + "isOutput", + "rightValue", + "startDate", + "startTime", + "value" +] + +export function simplifyNode(flowNode: any): any { + const nodeCopy = Object.assign({}, flowNode); + for (const nodeKey of Object.keys(nodeCopy)) { + if (["locationX", "locationY"].includes(nodeKey)) { + delete nodeCopy[nodeKey] + } + else if (nodeCopy[nodeKey] === null || nodeCopy[nodeKey] === undefined) { + delete nodeCopy[nodeKey] + } + } + return nodeCopy; +} + +export function flowNodeToMarkdown(flowNodeIn: any, allProperties: string[]): string { + const flowNode = Object.assign({}, flowNodeIn); + delete flowNode["name"]; + const additionalTables: any[] = []; + + // Properties that can be found on multiple flow node types + handleprocessMetadataValues(flowNode, allProperties); + handleInputParameters(flowNode, allProperties); + const conditionsTable = handleConditions(flowNode, allProperties); + additionalTables.push(conditionsTable); + const filterItemsTable = handleFilterItems(flowNode, allProperties); + additionalTables.push(filterItemsTable); + const inputAssignmentsTable = handleInputAssignments(flowNode, allProperties); + additionalTables.push(inputAssignmentsTable); + const assignmentItemsTable = handleAssignmentItems(flowNode, allProperties); + additionalTables.push(assignmentItemsTable); + const scheduledPathsTable = handleScheduledPaths(flowNode, allProperties); + additionalTables.push(scheduledPathsTable); + + // Special case of decisions + if (flowNode.type === "decisions") { + const rules = getElementAsArray(flowNode, "rules"); + delete flowNode.rules; + delete flowNode.rules2; + for (const rule of rules) { + const ruleNode = Object.assign({}, rule); + delete ruleNode.name; + delete ruleNode.label; + const ruleConditionsTable = handleConditions(ruleNode, allProperties); + const ruleTable = buildGenericMarkdownTable(ruleNode, ["allFields"], `#### Rule ${rule.name} (${rule.label})`, allProperties); + additionalTables.push(...[ruleTable, mdEndSection(ruleConditionsTable)]) + } + } + // Special case of transforms + else if (flowNode.type === "transforms") { + const transformValues = flowNode.transformValues || {}; + const transformValueActions = getElementAsArray(transformValues, "transformValueActions"); + delete flowNode.transformValues; + const transformsTable = buildCustomMarkdownTable(transformValueActions, ["transformType", "value", "outputFieldApiName"], "#### Transform actions", allProperties); + additionalTables.push(transformsTable); + } + else if (flowNode.type === "screens") { + handleFields(flowNode, allProperties, "", additionalTables); + } + else if (flowNode.type === "start") { + delete flowNode.type; + } + // Build final markdown for Node + let table = buildGenericMarkdownTable(flowNode, ["allFields"], "", allProperties); + for (const additionalTable of additionalTables) { + if (additionalTable !== "") { + table += additionalTable + "\n\n"; + } + } + return mdEndSection(table); +} + +function handleFields(flowNode: any, allProperties: string[], parentField: string = "", additionalTables: any[]) { + const fields = getElementAsArray(flowNode, "fields"); + delete flowNode.fields; + for (const field of fields) { + const fieldNode = Object.assign({}, field); + const fieldName = "" + (field.name || field.objectFieldReference); + delete fieldNode.name; + if (parentField) { + fieldNode.parentField = parentField; + allProperties.push(parentField); + } + handleInputParameters(fieldNode, allProperties); + const fieldsBefore = getElementAsArray(fieldNode, "fields"); + delete fieldNode.fields; + const fieldTable = buildGenericMarkdownTable(fieldNode, ["allFields"], `#### ${fieldName}`, allProperties); + // Handle recursive loop + if (fieldsBefore) { + fieldNode.name = fieldName; + fieldNode.fields = fieldsBefore; + handleFields(fieldNode, allProperties, fieldName, additionalTables); + additionalTables.push(...[mdEndSection(fieldTable)]); + } + } +} + +function handleConditions(ruleNode: any, allProperties: string[]) { + const conditions = getElementAsArray(ruleNode, "conditions"); + if (conditions.length === 0) { + return "" + } + let id = 0; + const conditionsValues = conditions.map((item: any) => { + id++; + return { + conditionId: id, + leftValueReference: item.leftValueReference, + operator: stringifyOperator(item.operator), + rightValue: (item.operator === "IsNull" && item.rightValue === "false") ? "" : stringifyValue(item.rightValue, "", allProperties) + }; + }); + delete ruleNode.conditions; + /* let descriptiveLine = ""; + if (ruleNode.conditionLogic) { + descriptiveLine += "\n\nConditions logic: **" + ruleNode.conditionLogic + "**\n\n"; + delete ruleNode.conditionLogic; + } */ + return buildCustomMarkdownTable(conditionsValues, ["conditionId", "leftValueReference", "operator", "rightValue"], "", allProperties); +} + +function handleInputAssignments(flowNode: any, allProperties: string[]): string { + const inputAssignmentsItems = getElementAsArray(flowNode, "inputAssignments"); + if (inputAssignmentsItems.length === 0) { + return "" + } + const inputAssignmentsItemsValues = inputAssignmentsItems.map((item: any) => { + return { + field: item.field, + value: stringifyValue(item.value, item.field, allProperties) + }; + }); + delete flowNode.inputAssignments; + return buildCustomMarkdownTable(inputAssignmentsItemsValues, ["field", "value"], "#### Input Assignments", allProperties); +} + +export function handleSchedule(flowNode: any, allProperties: string[]): string { + const scheduleItems = getElementAsArray(flowNode, "schedule"); + if (scheduleItems.length === 0) { + return "" + } + const scheduleItemsValues = scheduleItems.map((item: any) => { + const startDateFormatted = moment(item.startDate).format("ll"); + const startTimeFormatted = item?.startTime?.endsWith("Z") ? item.startTime.slice(0, 5) : item.startTime; + return { + frequency: item.frequency, + startDate: !startDateFormatted.includes("Invalid") ? startDateFormatted : item.startDate, + startTime: !startTimeFormatted.includes("Invalid") ? startTimeFormatted : item.startTime, + }; + }); + delete flowNode.schedule; + return buildCustomMarkdownTable(scheduleItemsValues, ["frequency", "startDate", "startTime"], "#### Schedules", allProperties); +} + +export function handleFilterItems(flowNode: any, allProperties: string[]): string { + const filterItems = getElementAsArray(flowNode, "filters"); + if (filterItems.length === 0) { + return "" + } + let id = 0; + const filterItemsValues = filterItems.map((item: any) => { + id++; + return { + filterId: id, + field: item.field, + operator: stringifyOperator(item.operator), + value: item.operator === "IsNull" ? "" : stringifyValue(item.value, item.field, allProperties) + }; + }); + delete flowNode.filters; + let descriptiveLine = ""; + if (flowNode.filterLogic) { + descriptiveLine += " (logic: **" + flowNode.filterLogic + "**)"; + delete flowNode.filterLogic; + } + return buildCustomMarkdownTable(filterItemsValues, ["filterId", "field", "operator", "value"], "#### Filters" + descriptiveLine, allProperties); +} + +function handleAssignmentItems(flowNode: any, allProperties: string[]) { + const assignmentItems = getElementAsArray(flowNode, "assignmentItems"); + if (assignmentItems.length === 0) { + return ""; + } + const assignmentItemsValues = assignmentItems.map((item: any) => { + const value = item?.value?.elementReference || stringifyValue(item.value, "", allProperties); + return { + assignToReference: item.assignToReference, + operator: stringifyOperator(item.operator), + value: stringifyValue(value, "", allProperties) + }; + }); + delete flowNode.assignmentItems; + return buildCustomMarkdownTable(assignmentItemsValues, ["assignToReference", "operator", "value"], "#### Assignments", allProperties); +} + +export function handleScheduledPaths(flowNode: any, allProperties: string[]) { + const scheduledPaths = getElementAsArray(flowNode, "scheduledPaths"); + delete flowNode.scheduledPaths; + if (scheduledPaths.length === 0) { + return ""; + } + return buildCustomMarkdownTable(scheduledPaths, ["label", "name", "offsetNumber", "offsetUnit", "recordField", "timeSource", "connector"], "#### Scheduled Paths", allProperties); +} + +export function handleInputParameters(flowNode: any, allProperties: string[]) { + const inputParameters = getElementAsArray(flowNode, "inputParameters"); + for (const inputParam of inputParameters) { + const inputParamName = `${inputParam.name} (input)`; + flowNode[inputParamName] = stringifyValue(inputParam.value, inputParam.name, allProperties); + } + delete flowNode.inputParameters; +} + +export function handleprocessMetadataValues(flowNode: any, allProperties: string[]) { + const metadataValues: any = {}; + const processMetadataValues = getElementAsArray(flowNode, "processMetadataValues"); + for (const processMetadataValue of processMetadataValues) { + const inputParamName = `${processMetadataValue.name} (PM)`; + flowNode[inputParamName] = stringifyValue(processMetadataValue.value, processMetadataValue.name, allProperties); + metadataValues[processMetadataValue.name] = flowNode[inputParamName]; + } + delete flowNode.processMetadataValues; + return metadataValues; +} + +export function buildGenericMarkdownTable(item: any, fields: string[], title: string = "", allProperties: string[]): string { + if (fields[0] === "allFields") { + fields = Object.keys(item); + // Reorder fields according to preferences + const fieldOrderFromStart = FIELDS_PREFERRED_ORDER_START.slice().reverse() + for (const field of fieldOrderFromStart) { + if (fields.includes(field)) { + fields.splice(fields.indexOf(field), 1); + fields.unshift(field); + } + } + for (const field of FIELDS_PREFERRED_ORDER_END) { + if (fields.includes(field)) { + fields.splice(fields.indexOf(field), 1); + fields.push(field); + } + } + } + // Add link to Apex class doc if existing + if (item?.actionType === "apex" && item.actionName && fs.existsSync(path.join("docs", "apex", `${item.actionName}.md`))) { + item.actionName = `[${item.actionName}](../apex/${item.actionName}.md)` + } + // Add link to SObject doc if existing + if (item?.sobjectType && fs.existsSync(path.join("docs", "objects", `${item.sobjectType}.md`))) { + item.sobjectType = `[${item.sobjectType}](../objects/${item.sobjectType}.md)` + } + let table = title ? `${title}\n\n` : '' + table += `|||\n|:---|:---|\n`; + for (const field of fields) { + if (item[field] !== undefined && item[field] !== null) { + table += `|${prettifyFieldName(field)}|${stringifyValue(item[field], field, allProperties)}|\n` + } + } + return table + "\n\n"; +} + +export function buildCustomMarkdownTable(items: any, fields: string[], title: string = "", allProperties: string[]): string { + let table = title ? `${title}\n\n` : '' + table += "|" + fields.map(field => prettifyFieldName(field)).join("|") + "|\n"; + table += "|" + fields.map(field => FIELDS_WITH_COLUMN_CENTERED.includes(field) ? ":--:" : ":-- ").join("|") + " |\n"; + for (const item of items) { + const fieldValues = fields.map(field => stringifyValue(item[field], field, allProperties)); + table += "|" + fieldValues.join("|") + "|\n"; + } + return table + "\n\n"; +} + +export function stringifyOperator(operatorIn): string { + return prettifyFieldName(operatorIn); +} + +export function stringifyValue(valueIn: any, field: string, allProperties: string[]): string { + const valueType = typeof valueIn; + // String + let valueStringified = valueType === "string" ? + valueIn.split("\n").join("
") : + // String value + (valueType === "object" && valueIn.stringValue && Object.keys(valueIn).length === 1) ? + valueIn.stringValue : + // Boolean value + (valueType === "object" && (valueIn.booleanValue !== undefined) && Object.keys(valueIn).length === 1) ? + valueIn.booleanValue : + // Number value + (valueType === "object" && valueIn.numberValue && Object.keys(valueIn).length === 1) ? + valueIn.numberValue : + // Target reference + (valueType === "object" && valueIn.targetReference && Object.keys(valueIn).length === 1) ? + valueIn.targetReference : + // Element reference + (valueType === "object" && valueIn.elementReference && Object.keys(valueIn).length === 1) ? + valueIn.elementReference : + // Element reference + (valueType === "object" && field === 'template' && valueIn?.name) ? + valueIn.name : + // Undefined or empty array or empty object + (valueType === "undefined" || (Array.isArray(valueIn) && valueIn.length === 0) || (valueType === "object" && Object.keys(valueIn).length === 0)) ? + '' : + // Default YAML for array & object + (Array.isArray(valueIn) || valueType === "object") ? + yaml.dump(valueIn).replace(/"/gm, "").replace(/^(\s+)/gm, match => ' '.repeat(match.length)).split("\n").join("
") : + // Default + String(valueIn).split("\n").join("
"); + // Final updates if necessary + if (allProperties.includes(valueStringified)) { + valueStringified = `[${valueStringified}](#${valueStringified.toLowerCase()})` + } + else if (FIELDS_WITH_VALUES_TO_FORMAT_ENUM[field] && FIELDS_WITH_VALUES_TO_FORMAT_ENUM[field][valueStringified]) { + valueStringified = FIELDS_WITH_VALUES_TO_FORMAT_ENUM[field][valueStringified]; + } + else if (FIELDS_WITH_VALUES_TO_FORMAT.includes(field)) { + valueStringified = prettifyFieldName(valueStringified); + if (field === "type" && valueStringified.endsWith("s")) { + valueStringified = valueStringified.slice(0, -1); + } + } + else { + valueStringified = valueStringified === "true" ? "✅" : valueStringified === "false" ? "⬜" : valueStringified; + } + return valueStringified; +} + +export function prettifyFieldName(field: string): string { + return field.replace(/([A-Z])/g, " $1").replace(/^./, str => str.toUpperCase()).replace("( P M)", "(PM)").replace("S Object", "SObject"); +} + +export function mdEndSection(sectionString: string) { + if (!sectionString) + return sectionString + "\n\n___\n\n"; + return sectionString; +} + +export function getElementAsArray(node: any, key: string) { + return Array.isArray(node[key]) ? node[key] : typeof node[key] === "object" ? [node[key]] : []; +} \ No newline at end of file diff --git a/src/common/utils/flowVisualiser/renderConfig.ts b/src/common/utils/flowVisualiser/renderConfig.ts new file mode 100644 index 000000000..886155adb --- /dev/null +++ b/src/common/utils/flowVisualiser/renderConfig.ts @@ -0,0 +1,136 @@ +export const NODE_CONFIG = { + 'actionCalls': { + background: "#D4E4FC", // Light blue + color: "black", + label: "", + icon: "<&pulse>", + mermaidIcon: { + "apex": "⚙️", + "emailAlert": "📧", + "emailSimple": "📧", + "submit": "⚡" + }, + mermaidClose: ")", + mermaidOpen: "(" + }, + 'assignments': { + background: "#FBEED7", // Light beige + color: "black", + label: "", + icon: "<&menu>", + mermaidIcon: "🟰", + mermaidClose: "/]", + mermaidOpen: "[\\" + }, + 'collectionProcessors': { + background: "#F0E3FA", // Light lavender + color: "black", + label: { + "FilterCollectionProcessor": "Collection Filter", + "SortCollectionProcessor": "Collection Sort", + }, + icon: "<&pulse>", + mermaidIcon: { + "FilterCollectionProcessor": "🔽", + "SortCollectionProcessor": "🔃", + }, + mermaidClose: "}}", + mermaidOpen: "{{" + }, + 'customErrors': { + background: "#FFE9E9", // Pale blush + color: "black", + label: "", + icon: "<&pencil>", + mermaidIcon: "🚫", + mermaidClose: ")", + mermaidOpen: "(" + }, + 'decisions': { + background: "#FDEAF6", // Light pink + color: "black", + label: "", + icon: "<&fork>", + mermaidIcon: "🔀", + mermaidClose: "}", + mermaidOpen: "{" + }, + 'loops': { + background: "#FDEAF6", // Light pink (harmonized with decisions) + label: "", + color: "black", + mermaidIcon: "🔁", + mermaidClose: "}}", + mermaidOpen: "{{" + }, + 'recordCreates': { + background: "#FFF8C9", // Light periwinkle (harmonized with recordCreates and recordDeletes) + color: "black", + label: "", + icon: "<&medical-cross>", + mermaidIcon: "➕", + mermaidClose: ")]", + mermaidOpen: "[(" + }, + 'recordDeletes': { + background: "#FFF8C9", // Light periwinkle (harmonized with recordCreates and recordDeletes) + color: "black", + label: "", + icon: "<&medical-cross>", + mermaidIcon: "🗑️", + mermaidClose: ")]", + mermaidOpen: "[(" + }, + 'recordLookups': { + background: "#EDEAFF", // Pale yellow + color: "black", + label: "", + icon: "<&medical-cross>", + mermaidIcon: "🔍", + mermaidClose: ")]", + mermaidOpen: "[(" + }, + 'recordUpdates': { + background: "#FFF8C9", // Light periwinkle (harmonized with recordCreates and recordDeletes) + color: "black", + label: "", + icon: "<&pencil>", + mermaidIcon: "🛠️", + mermaidClose: ")]", + mermaidOpen: "[(" + }, + 'screens': { + background: "#DFF6FF", // Pale sky blue + color: "black", + label: "", + icon: "<&pencil>", + mermaidIcon: "💻", + mermaidClose: "])", + mermaidOpen: "([" + }, + 'subflows': { + background: "#D4E4FC", // Light blue (harmonized with actionCalls) + color: "black", + label: "Subflow", + icon: "<&pencil>", + mermaidIcon: "🔗", + mermaidClose: "]]", + mermaidOpen: "[[" + }, + "startClass": { + background: "#D9F2E6", // Light turquoise (between green and blue) + color: "black" + }, + "endClass": { + background: "#F9BABA", // Slightly shinier pale re + color: "black" + }, + 'transforms': { + background: "#FDEAF6", // Light pink + color: "black", + label: "", + mermaidIcon: "♻️", + mermaidClose: "}}", + mermaidOpen: "{{" + }, +}; diff --git a/src/common/utils/gitUtils.ts b/src/common/utils/gitUtils.ts index bcd740640..c4210582f 100644 --- a/src/common/utils/gitUtils.ts +++ b/src/common/utils/gitUtils.ts @@ -1,7 +1,9 @@ -import { getConfig } from "../../config"; -import { prompts } from "./prompts"; -import * as c from "chalk"; -import * as sortArray from "sort-array"; +import { getConfig } from '../../config/index.js'; +import { prompts } from './prompts.js'; +import c from 'chalk'; +import fs from "fs-extra"; +import * as path from "path"; +import sortArray from 'sort-array'; import { arrayUniqueByKey, arrayUniqueByKeys, @@ -12,51 +14,74 @@ import { getGitRepoRoot, git, uxLog, -} from "."; -import { GitProvider } from "../gitProvider"; -import { Ticket, TicketProvider } from "../ticketProvider"; -import { DefaultLogFields, ListLogLine } from "simple-git"; +} from './index.js'; +import { CommonPullRequestInfo, GitProvider } from '../gitProvider/index.js'; +import { Ticket, TicketProvider } from '../ticketProvider/index.js'; +import { DefaultLogFields, ListLogLine } from 'simple-git'; +import { flowDiffToMarkdownForPullRequest } from '../gitProvider/utilsMarkdown.js'; +import { MessageAttachment } from '@slack/types'; +import { getBranchMarkdown, getNotificationButtons, getOrgMarkdown } from './notifUtils.js'; +import { NotifProvider, UtilsNotifs } from '../notifProvider/index.js'; +import { setConnectionVariables } from './orgUtils.js'; +import { WebSocketClient } from '../websocketClient.js'; +import { countPackageXmlItems } from './xmlUtils.js'; export async function selectTargetBranch(options: { message?: string } = {}) { + const gitUrl = (await git().listRemote(['--get-url']))?.trim() || ''; const message = options.message || - "What will be the target branch of your new task ? (the branch where you will make your merge request after the task is completed)"; - const config = await getConfig("user"); + `What will be the target branch of your new User Story ? (the branch where you will make your ${GitProvider.getMergeRequestName(gitUrl)} after the User Story is completed)`; + const config = await getConfig('user'); const availableTargetBranches = config.availableTargetBranches || null; // There is only once choice so return it if (availableTargetBranches === null && config.developmentBranch) { - uxLog(this, c.cyan(`Selected target branch is ${c.green(config.developmentBranch)}`)); + uxLog("action", this, c.cyan(`Automatically selected target branch is ${c.green(config.developmentBranch)}`)); return config.developmentBranch; } // Request info to build branch name. ex features/config/MYTASK const response = await prompts([ { - type: availableTargetBranches ? "select" : "text", - name: "targetBranch", + type: availableTargetBranches ? 'select' : 'text', + name: 'targetBranch', message: c.cyanBright(message), + description: availableTargetBranches ? 'Choose the target branch for this operation' : 'Enter the name of the target branch', + placeholder: availableTargetBranches ? undefined : 'Ex: integration', choices: availableTargetBranches ? availableTargetBranches.map((branch) => { - return { title: branch, value: branch }; - }) + return { + title: branch.includes(',') ? branch.split(',').join(' - ') : branch, + value: branch.includes(',') ? branch.split(',')[0] : branch, + }; + }) : [], - initial: config.developmentBranch || "integration", + initial: config.developmentBranch || 'integration', }, ]); - const targetBranch = response.targetBranch || "integration"; + const targetBranch = response.targetBranch || 'integration'; return targetBranch; } export async function getGitDeltaScope(currentBranch: string, targetBranch: string) { try { - await git().fetch(["origin", `${targetBranch}:${targetBranch}`]); + await git().fetch(['origin', `${targetBranch}:${targetBranch}`]); } catch (e) { - uxLog(this, c.gray(`[Warning] Unable to fetch target branch ${targetBranch} to prepare call to sfdx-git-delta\n` + JSON.stringify(e))); + uxLog( + "other", + this, + `[Warning] Unable to fetch target branch ${targetBranch} to prepare call to sfdx-git-delta\n` + + JSON.stringify(e) + ); } try { - await git().fetch(["origin", `${currentBranch}:${currentBranch}`]); + await git().fetch(['origin', `${currentBranch}:${currentBranch}`]); } catch (e) { - uxLog(this, c.gray(`[Warning] Unable to fetch current branch ${currentBranch} to prepare call to sfdx-git-delta\n` + JSON.stringify(e))); + uxLog( + "other", + this, + `[Warning] Unable to fetch current branch ${currentBranch} to prepare call to sfdx-git-delta\n` + + JSON.stringify(e) + ); } const logResult = await git().log([`${targetBranch}..${currentBranch}`]); const toCommit = logResult.latest; @@ -64,87 +89,152 @@ export async function getGitDeltaScope(currentBranch: string, targetBranch: stri const mergeBaseCommandResult = await execCommand(mergeBaseCommand, this, { fail: true, }); - const masterBranchLatestCommit = mergeBaseCommandResult.stdout.replace("\n", "").replace("\r", ""); + const masterBranchLatestCommit = mergeBaseCommandResult.stdout.replace('\n', '').replace('\r', ''); return { fromCommit: masterBranchLatestCommit, toCommit: toCommit, logResult: logResult }; } export async function callSfdxGitDelta(from: string, to: string, outputDir: string, options: any = {}) { - const sgdHelp = (await execCommand(" sfdx sgd:source:delta --help", this, { fail: false, output: false, debug: options?.debugMode || false })) - .stdout; - const packageXmlGitDeltaCommand = - `sfdx sgd:source:delta --from "${from}" --to "${to}" --output ${outputDir}` + - (sgdHelp.includes("--ignore-whitespace") ? " --ignore-whitespace" : ""); + const packageXmlGitDeltaCommand = `sf sgd:source:delta --from "${from}" --to "${to}" --output ${outputDir} --ignore-whitespace`; const gitDeltaCommandRes = await execSfdxJson(packageXmlGitDeltaCommand, this, { output: true, fail: false, debug: options?.debugMode || false, cwd: await getGitRepoRoot(), }); + // Send results to UI if there is one + if (WebSocketClient.isAliveWithLwcUI()) { + const deltaPackageXml = path.join(outputDir, 'package', 'package.xml'); + const deltaPackageXmlExists = await fs.exists(deltaPackageXml); + if (deltaPackageXmlExists) { + const deltaNumberOfItems = await countPackageXmlItems(deltaPackageXml); + if (deltaNumberOfItems > 0) { + WebSocketClient.sendReportFileMessage(deltaPackageXml, `Git Delta package.xml (${deltaNumberOfItems})`, "report"); + } + } + const deltaDestructiveChangesXml = path.join(outputDir, 'destructiveChanges', 'destructiveChanges.xml'); + const deltaDestructiveChangesXmlExists = await fs.exists(deltaDestructiveChangesXml); + if (deltaDestructiveChangesXmlExists) { + const deltaDestructiveChangesNumberOfItems = await countPackageXmlItems(deltaDestructiveChangesXml); + if (deltaDestructiveChangesNumberOfItems > 0) { + WebSocketClient.sendReportFileMessage(deltaDestructiveChangesXml, `Git Delta destructiveChanges.xml (${deltaDestructiveChangesNumberOfItems})`, "report"); + } + } + } return gitDeltaCommandRes; } -export async function computeCommitsSummary(checkOnly, pullRequestInfo: any) { - uxLog(this, c.cyan("Computing commits summary...")); +export async function computeCommitsSummary(checkOnly, pullRequestInfo: CommonPullRequestInfo | null = null) { + uxLog("action", this, c.cyan('Computing commits summary...')); const currentGitBranch = await getCurrentGitBranch(); let logResults: (DefaultLogFields & ListLogLine)[] = []; + let previousTargetBranchCommit = ""; if (checkOnly || GitProvider.isDeployBeforeMerge()) { - const prInfo = await GitProvider.getPullRequestInfo(); - const deltaScope = await getGitDeltaScope(prInfo?.sourceBranch || currentGitBranch, prInfo?.targetBranch || process.env.FORCE_TARGET_BRANCH); + const prInfo = await GitProvider.getPullRequestInfo({ useCache: true }); + const deltaScope = await getGitDeltaScope( + prInfo?.sourceBranch || currentGitBranch || "", + prInfo?.targetBranch || process.env.FORCE_TARGET_BRANCH || "" + ); logResults = [...deltaScope.logResult.all]; + previousTargetBranchCommit = deltaScope.fromCommit; } else { const logRes = await git().log([`HEAD^..HEAD`]); + previousTargetBranchCommit = "HEAD^" logResults = [...logRes.all]; } - logResults = arrayUniqueByKeys(logResults, ["message", "body"]).reverse(); - let commitsSummary = "## Commits summary\n\n"; - const manualActions = []; + logResults = arrayUniqueByKeys(logResults, ['message', 'body']).reverse(); + let commitsSummary = '## Commits summary\n\n'; + const manualActions: any[] = []; const tickets: Ticket[] = []; for (const logResult of logResults) { - commitsSummary += "**" + logResult.message + "**, by " + logResult.author_name; + commitsSummary += '**' + logResult.message + '**, by ' + logResult.author_name; if (logResult.body) { - commitsSummary += "
" + logResult.body + "\n\n"; - await collectTicketsAndManualActions(currentGitBranch + "\n" + logResult.message + "\n" + logResult.body, tickets, manualActions, { + commitsSummary += '
' + logResult.body + '\n\n'; + await collectTicketsAndManualActions( + currentGitBranch + '\n' + logResult.message + '\n' + logResult.body, + tickets, + manualActions, + { + commits: [logResult], + } + ); + } else { + await collectTicketsAndManualActions(currentGitBranch + '\n' + logResult.message, tickets, manualActions, { commits: [logResult], }); - } else { - await collectTicketsAndManualActions(currentGitBranch + "\n" + logResult.message, tickets, manualActions, { commits: [logResult] }); - commitsSummary += "\n\n"; + commitsSummary += '\n\n'; } } // Tickets and references can also be in PR description if (pullRequestInfo) { - const prText = (pullRequestInfo.title || "") + (pullRequestInfo.description || ""); - await collectTicketsAndManualActions(currentGitBranch + "\n" + prText, tickets, manualActions, { pullRequestInfo: pullRequestInfo }); + const prText = (pullRequestInfo.title || '') + (pullRequestInfo.description || ''); + await collectTicketsAndManualActions(currentGitBranch + '\n' + prText, tickets, manualActions, { + pullRequestInfo: pullRequestInfo, + }); } // Unify and sort tickets - const ticketsSorted = sortArray(arrayUniqueByKey(tickets, "id"), { by: ["id"], order: ["asc"] }); - uxLog(this, c.grey(`[TicketProvider] Found ${ticketsSorted.length} tickets in commit bodies`)); + const ticketsSorted: Ticket[] = sortArray(arrayUniqueByKey(tickets, 'id'), { by: ['id'], order: ['asc'] }); + uxLog("log", this, c.grey(`[TicketProvider] Found ${ticketsSorted.length} tickets in commit bodies`)); // Try to contact Ticketing servers to gather more info await TicketProvider.collectTicketsInfo(ticketsSorted); // Add manual actions in markdown const manualActionsSorted = [...new Set(manualActions)].reverse(); if (manualActionsSorted.length > 0) { - let manualActionsMarkdown = "## Manual actions\n\n"; + let manualActionsMarkdown = '## Manual actions\n\n'; for (const manualAction of manualActionsSorted) { - manualActionsMarkdown += "- " + manualAction + "\n"; + manualActionsMarkdown += '- ' + manualAction + '\n'; } - commitsSummary = manualActionsMarkdown + "\n\n" + commitsSummary; + commitsSummary = manualActionsMarkdown + '\n\n' + commitsSummary; } // Add tickets in markdown if (ticketsSorted.length > 0) { - let ticketsMarkdown = "## Tickets\n\n"; + let ticketsMarkdown = '## Tickets\n\n'; for (const ticket of ticketsSorted) { if (ticket.foundOnServer) { - ticketsMarkdown += "- [" + ticket.id + "](" + ticket.url + ") " + ticket.subject + "\n"; + ticketsMarkdown += '- [' + ticket.id + '](' + ticket.url + ') ' + ticket.subject; + if (ticket.statusLabel) { + ticketsMarkdown += ' (' + ticket.statusLabel + ')'; + } + ticketsMarkdown += '\n' } else { - ticketsMarkdown += "- [" + ticket.id + "](" + ticket.url + ")\n"; + ticketsMarkdown += '- [' + ticket.id + '](' + ticket.url + ')\n'; } } - commitsSummary = ticketsMarkdown + "\n\n" + commitsSummary; + commitsSummary = ticketsMarkdown + '\n\n' + commitsSummary; + } + + // Add Flow diff in Markdown + let flowDiffMarkdown: any = {}; + if ((checkOnly || GitProvider.isDeployBeforeMerge()) && !(process.env?.SFDX_DISABLE_FLOW_DIFF === "true")) { + const flowList: string[] = []; + for (const logResult of logResults) { + const updatedFiles = await getCommitUpdatedFiles(logResult.hash); + for (const updatedFile of updatedFiles) { + if (updatedFile.endsWith(".flow-meta.xml")) { + if (fs.existsSync(updatedFile)) { + const flowName = path.basename(updatedFile, ".flow-meta.xml"); + flowList.push(flowName); + } + else { + uxLog("warning", this, c.yellow(`[FlowGitDiff] Unable to find Flow file ${updatedFile} (probably has been deleted)`)); + } + } + } + } + const flowListUnique = [...new Set(flowList)].sort(); + // Truncate flows to the only 30 ones, to avoid flooding the pull request comments + let truncatedNb = 0; + const maxFlowsToShow = parseInt(process.env?.MAX_FLOW_DIFF_TO_SHOW || "30"); + if (flowListUnique.length > maxFlowsToShow) { + truncatedNb = flowListUnique.length - maxFlowsToShow; + flowListUnique.splice(maxFlowsToShow, flowListUnique.length - maxFlowsToShow); + uxLog("warning", this, c.yellow(`[FlowGitDiff] Truncated flow list to 30 flows to avoid flooding Pull Request comments`)); + uxLog("warning", this, c.yellow(`[FlowGitDiff] If you want to see the diff of truncated flows, use VsCode SFDX Hardis extension :)`)); + } + flowDiffMarkdown = await flowDiffToMarkdownForPullRequest(flowListUnique, previousTargetBranchCommit, (logResults.at(-1) || logResults[0]).hash, truncatedNb); } return { @@ -152,6 +242,7 @@ export async function computeCommitsSummary(checkOnly, pullRequestInfo: any) { logResults: logResults, manualActions: manualActionsSorted, tickets: ticketsSorted, + flowDiffMarkdown: flowDiffMarkdown }; } @@ -163,3 +254,136 @@ async function collectTicketsAndManualActions(str: string, tickets: Ticket[], ma const manualActionsMatches = await extractRegexMatches(manualActionsRegex, str); manualActions.push(...manualActionsMatches); } + +export async function getCommitUpdatedFiles(commitHash) { + const result = await git().show(["--name-only", "--pretty=format:", commitHash]); + // Split the result into lines (file paths) and remove empty lines + const files = result.split('\n').filter(file => file.trim() !== '' && fs.existsSync(file)); + return files; +} + +export async function buildCheckDeployCommitSummary() { + try { + const pullRequestInfo = await GitProvider.getPullRequestInfo({ useCache: true }); + const commitsSummary = await computeCommitsSummary(true, pullRequestInfo); + const prDataCommitsSummary = { + commitsSummary: commitsSummary.markdown, + flowDiffMarkdown: commitsSummary.flowDiffMarkdown + }; + globalThis.pullRequestData = Object.assign(globalThis.pullRequestData || {}, prDataCommitsSummary); + } catch (e3) { + uxLog("warning", this, c.yellow('Unable to compute git summary:\n' + e3)); + } +} + +export async function handlePostDeploymentNotifications(flags, targetUsername: any, quickDeploy: any, delta: boolean, debugMode: boolean, additionalMessage = "") { + const pullRequestInfo = await GitProvider.getPullRequestInfo({ useCache: true }); + const attachments: MessageAttachment[] = []; + try { + // Build notification attachments & handle ticketing systems comments + const commitsSummary = await collectNotifAttachments(attachments, pullRequestInfo); + await TicketProvider.postDeploymentActions( + commitsSummary.tickets, + flags['target-org']?.getConnection()?.instanceUrl || targetUsername || '', + pullRequestInfo + ); + } catch (e4: any) { + uxLog( + "warning", + this, + c.yellow('Unable to handle commit info on TicketProvider post deployment actions:\n' + e4.message) + + '\n' + + c.gray(e4.stack) + ); + } + + const orgMarkdown = await getOrgMarkdown( + flags['target-org']?.getConnection()?.instanceUrl || targetUsername || '' + ); + const branchMarkdown = await getBranchMarkdown(); + let notifMessage = `Deployment has been successfully processed from branch ${branchMarkdown} to org ${orgMarkdown}`; + notifMessage += quickDeploy + ? ' (🚀 quick deployment)' + : delta + ? ' (🌙 delta deployment)' + : ' (🌕 full deployment)'; + if (additionalMessage) { + notifMessage += '\n\n' + additionalMessage + "\n\n" + } + + const notifButtons = await getNotificationButtons(); + if (pullRequestInfo) { + if (debugMode) { + uxLog("error", this, c.grey('PR info:\n' + JSON.stringify(pullRequestInfo))); + } + const prAuthor = pullRequestInfo?.authorName; + notifMessage += `\nRelated: <${pullRequestInfo.webUrl}|${pullRequestInfo.title}>` + (prAuthor ? ` by ${prAuthor}` : ''); + const prButtonText = 'View Pull Request'; + notifButtons.push({ text: prButtonText, url: pullRequestInfo.webUrl }); + } else { + uxLog("warning", this, c.yellow("WARNING: Unable to get Pull Request info, notif won't have a button URL")); + } + await setConnectionVariables(flags['target-org']?.getConnection(), true);// Required for some notifications providers like Email + await NotifProvider.postNotifications({ + type: 'DEPLOYMENT', + text: notifMessage, + buttons: notifButtons, + severity: 'success', + attachments: attachments, + logElements: [], + data: { metric: 0 }, // Todo: if delta used, count the number of items deployed + metrics: { + DeployedItems: 0, + }, + }); +} + + +async function collectNotifAttachments(attachments: MessageAttachment[], pullRequestInfo: CommonPullRequestInfo | null) { + const commitsSummary = await computeCommitsSummary(false, pullRequestInfo); + // Tickets attachment + if (commitsSummary.tickets.length > 0) { + attachments.push({ + text: `*Tickets*\n${commitsSummary.tickets + .map((ticket) => { + if (ticket.foundOnServer) { + let ticketsMarkdown = '• ' + UtilsNotifs.markdownLink(ticket.url, ticket.id) + ' ' + ticket.subject; + if (ticket.statusLabel) { + ticketsMarkdown += ' (' + ticket.statusLabel + ')'; + } + return ticketsMarkdown; + } else { + return '• ' + UtilsNotifs.markdownLink(ticket.url, ticket.id); + } + }) + .join('\n')}`, + }); + } + // Manual actions attachment + if (commitsSummary.manualActions.length > 0) { + attachments.push({ + text: `*Manual actions*\n${commitsSummary.manualActions + .map((manualAction) => { + return '• ' + manualAction; + }) + .join('\n')}`, + }); + } + // Commits attachment + if (commitsSummary.logResults.length > 0) { + attachments.push({ + text: `*Commits*\n${commitsSummary.logResults + .map((logResult) => { + return '• ' + logResult.message + ', by ' + logResult.author_name; + }) + .join('\n')}`, + }); + } + return commitsSummary; +} + +export function makeFileNameGitCompliant(fileName: string) { + // Remove all characters that are not alphanumeric, underscore, hyphen, space or dot + const sanitizedFileName = fileName.replace(/[^a-zA-Z0-9_. -]/g, '_'); + return sanitizedFileName; +} \ No newline at end of file diff --git a/src/common/utils/index.ts b/src/common/utils/index.ts index efaa04f93..9d0a9d160 100644 --- a/src/common/utils/index.ts +++ b/src/common/utils/index.ts @@ -1,56 +1,75 @@ -import * as c from "chalk"; -import * as child from "child_process"; -import * as crossSpawn from "cross-spawn"; -import * as crypto from "crypto"; -import * as csvStringify from "csv-stringify/lib/sync"; -import * as fs from "fs-extra"; -import * as os from "os"; -import * as path from "path"; - -import * as util from "util"; -import * as which from "which"; -import * as xml2js from "xml2js"; +import c from 'chalk'; +import * as child from 'child_process'; +import { spawn as crossSpawn } from 'cross-spawn'; +import * as crypto from 'crypto'; +import { stringify as csvStringify } from 'csv-stringify/sync'; +import fs from 'fs-extra'; +import * as os from 'os'; +import * as path from 'path'; + +import * as util from 'util'; +import which from 'which'; +import * as xml2js from 'xml2js'; const exec = util.promisify(child.exec); -import { SfdxError } from "@salesforce/core"; -import * as ora from "ora"; -import simpleGit, { FileStatusResult, SimpleGit } from "simple-git"; -import { CONSTANTS, getConfig, getReportDirectory, setConfig } from "../../config"; -import { prompts } from "./prompts"; -import { encryptFile } from "../cryptoUtils"; -import { deployMetadatas, truncateProgressLogLines } from "./deployUtils"; -import { promptProfiles, promptUserEmail } from "./orgUtils"; -import { WebSocketClient } from "../websocketClient"; -import * as moment from "moment"; -import { writeXmlFile } from "./xmlUtils"; - -let pluginsStdout = null; +import { SfError } from '@salesforce/core'; +import ora from 'ora'; +import { simpleGit, FileStatusResult, SimpleGit } from 'simple-git'; +import { CONSTANTS, getApiVersion, getConfig, getReportDirectory, setConfig } from '../../config/index.js'; +import { prompts } from './prompts.js'; +import { encryptFile } from '../cryptoUtils.js'; +import { deployMetadatas, shortenLogLines } from './deployUtils.js'; +import { isProductionOrg, promptProfiles, promptUserEmail } from './orgUtils.js'; +import { LogType, WebSocketClient } from '../websocketClient.js'; +import moment from 'moment'; +import { writeXmlFile } from './xmlUtils.js'; +import { SfCommand } from '@salesforce/sf-plugins-core'; + +let pluginsStdout: string | null = null; export const isCI = process.env.CI != null; -export function git(options: any = { output: false }): SimpleGit { +export function git(options: any = { output: false, displayCommand: true }): SimpleGit { const simpleGitInstance = simpleGit(); // Hack to be able to display executed git command (and it still doesn't work...) // cf: https://github.com/steveukx/git-js/issues/593 return simpleGitInstance.outputHandler((command, stdout, stderr, gitArgs) => { let first = true; - stdout.on("data", (data) => { + stdout.on('data', (data) => { logCommand(); if (options.output) { - uxLog(this, c.italic(c.grey(data))); + uxLog("other", this, c.italic(c.grey(data))); } }); - stderr.on("data", (data) => { + stderr.on('data', (data) => { logCommand(); if (options.output) { - uxLog(this, c.italic(c.yellow(data))); + uxLog("other", this, c.italic(c.yellow(data))); } }); function logCommand() { if (first) { first = false; - const gitArgsStr = (gitArgs || []).join(" "); - if (!(gitArgsStr.includes("branch -v") || gitArgsStr.includes("config --list --show-origin --null"))) { - uxLog(this, `[command] ${c.bold(c.bgWhite(c.grey(command + " " + gitArgsStr)))}`); + const gitArgsStr = (gitArgs || []).join(' '); + if (!(gitArgsStr.includes('branch -v') || gitArgsStr.includes('config --list --show-origin --null'))) { + if (options.displayCommand) { + if (WebSocketClient.isAlive()) { + WebSocketClient.sendCommandSubCommandStartMessage( + command + ' ' + gitArgsStr, + process.cwd(), + options, + ); + } + uxLog("other", this, `[command] ${c.bold(c.bgWhite(c.blue(command + ' ' + gitArgsStr)))}`); + if (WebSocketClient.isAlive()) { + WebSocketClient.sendCommandSubCommandEndMessage( + command + ' ' + gitArgsStr, + process.cwd(), + options, + true, + '', + ); + } + } } } } @@ -58,26 +77,31 @@ export function git(options: any = { output: false }): SimpleGit { } export async function createTempDir() { - const tmpDir = path.join(os.tmpdir(), "sfdx-hardis-" + Math.random().toString(36).substring(7)); + const tmpDir = path.join(os.tmpdir(), 'sfdx-hardis-' + Math.random().toString(36).substring(7)); await fs.ensureDir(tmpDir); return tmpDir; } +let isGitRepoCache: boolean | null = null; export function isGitRepo() { - const isInsideWorkTree = child.spawnSync("git", ["rev-parse", "--is-inside-work-tree"], { - encoding: "utf8", + if (isGitRepoCache !== null) { + return isGitRepoCache; + } + const isInsideWorkTree = child.spawnSync('git', ['rev-parse', '--is-inside-work-tree'], { + encoding: 'utf8', windowsHide: true, }); - return isInsideWorkTree.status === 0; + isGitRepoCache = isInsideWorkTree.status === 0 + return isGitRepoCache; } export async function getGitRepoName() { if (!isGitRepo) { return null; } - const origin = await git().getConfig("remote.origin.url"); - if (origin.value && origin.value.includes("/")) { - return /[^/]*$/.exec(origin.value)[0]; + const origin = await git().getConfig('remote.origin.url'); + if (origin.value && origin.value.includes('/')) { + return (/[^/]*$/.exec(origin.value) || '')[0]; } return null; } @@ -86,7 +110,7 @@ export async function getGitRepoUrl() { if (!isGitRepo) { return null; } - const origin = await git().getConfig("remote.origin.url"); + const origin = await git().getConfig('remote.origin.url'); if (origin && origin.value) { // Replace https://username:token@gitlab.com/toto by https://gitlab.com/toto return origin.value.replace(/\/\/(.*:.*@)/gm, `//`); @@ -97,45 +121,46 @@ export async function getGitRepoUrl() { export async function gitHasLocalUpdates(options = { show: false }) { const changes = await git().status(); if (options.show) { - uxLog(this, c.cyan(JSON.stringify(changes))); + uxLog("action", this, c.cyan(JSON.stringify(changes))); } return changes.files.length > 0; } // Install plugin if not present export async function checkSfdxPlugin(pluginName: string) { - // Manage cache of sfdx plugins result + // Manage cache of SF CLI Plugins result if (pluginsStdout == null) { - const config = await getConfig("user"); + const config = await getConfig('user'); if (config.sfdxPluginsStdout) { pluginsStdout = config.sfdxPluginsStdout; } else { - const pluginsRes = await exec("sfdx plugins"); + const pluginsRes = await exec('sf plugins'); pluginsStdout = pluginsRes.stdout; - await setConfig("user", { sfdxPluginsStdout: pluginsStdout }); + await setConfig('user', { sfdxPluginsStdout: pluginsStdout }); } } - if (!pluginsStdout.includes(pluginName)) { + if (!(pluginsStdout || '').includes(pluginName)) { uxLog( + "warning", this, c.yellow( - `[dependencies] Installing sfdx plugin ${c.green(pluginName)}... \nIf is stays stuck for too long, please run ${c.green( - `sfdx plugins:install ${pluginName}`, - )})`, - ), + `[dependencies] Installing SF CLI plugin ${c.green( + pluginName + )}... \nIf is stays stuck for too long, please run ${c.green(`sf plugins install ${pluginName}`)})` + ) ); - const installCommand = `echo y|sfdx plugins:install ${pluginName}`; + const installCommand = `echo y|sf plugins install ${pluginName}`; await execCommand(installCommand, this, { fail: true, output: false }); } } const dependenciesInstallLink = { - git: "Download installer at https://git-scm.com/downloads", + git: 'Download installer at https://git-scm.com/downloads', openssl: 'Run "choco install openssl" in Windows Powershell, or use Git Bash as command line tool', }; export async function checkAppDependency(appName) { - const config = await getConfig("user"); + const config = await getConfig('user'); const installedApps = config.installedApps || []; if (installedApps.includes(appName)) { return true; @@ -143,71 +168,97 @@ export async function checkAppDependency(appName) { which(appName) .then(async () => { installedApps.push(appName); - await setConfig("user", { installedApps: installedApps }); + await setConfig('user', { installedApps: installedApps }); }) .catch(() => { - uxLog(this, c.red(`You need ${c.bold(appName)} to be locally installed to run this command.\n${dependenciesInstallLink[appName] || ""}`)); + uxLog( + "error", + this, + c.red( + `You need ${c.bold(appName)} to be locally installed to run this command.\n${dependenciesInstallLink[appName] || '' + }` + ) + ); process.exit(); }); } -export async function promptInstanceUrl(orgTypes = ["login", "test"], alias = "default org", defaultOrgChoice: any = null) { +export async function promptInstanceUrl( + orgTypes = ['login', 'test'], + alias = 'default org', + defaultOrgChoice: any = null +) { const customLoginUrlExample = - orgTypes && orgTypes.length === 1 && orgTypes[0] === "login" - ? "https://myclient.lightning.force.com/" - : "https://myclient--preprod.sandbox.lightning.force.com/"; + orgTypes && orgTypes.length === 1 && orgTypes[0] === 'login' + ? 'https://myclient.lightning.force.com/' + : 'https://myclient--preprod.sandbox.lightning.force.com/'; const allChoices = [ { - title: "📝 Custom login URL (Sandbox, DevHub or Production Org)", + title: '📝 Custom login URL (Sandbox, DevHub or Production Org)', description: `Recommended option :) Example: ${customLoginUrlExample}`, - value: "custom", + value: 'custom', }, { - title: "🧪 Sandbox or Scratch org (test.salesforce.com)", - description: "The org I want to connect is a sandbox or a scratch org", - value: "https://test.salesforce.com", + title: '🧪 Sandbox or Scratch org (test.salesforce.com)', + description: 'The org I want to connect is a sandbox or a scratch org', + value: 'https://test.salesforce.com', }, { - title: "☢️ Other: Dev org, Production org or DevHub org (login.salesforce.com)", - description: "The org I want to connect is NOT a sandbox", - value: "https://login.salesforce.com", + title: '☢️ Other: Dev org, Production org or DevHub org (login.salesforce.com)', + description: 'The org I want to connect is NOT a sandbox', + value: 'https://login.salesforce.com', }, ]; const choices = allChoices.filter((choice) => { - if (choice.value === "https://login.salesforce.com" && !orgTypes.includes("login")) { + if (choice.value === 'https://login.salesforce.com' && !orgTypes.includes('login')) { return false; } - if (choice.value === "https://test.salesforce.com" && !orgTypes.includes("test")) { + if (choice.value === 'https://test.salesforce.com' && !orgTypes.includes('test')) { return false; } return true; }); if (defaultOrgChoice != null) { - choices.push({ + choices.unshift({ title: `♻️ ${defaultOrgChoice.instanceUrl}`, - description: "Your current default org", + description: 'Your current default org', value: defaultOrgChoice.instanceUrl, }); } const orgTypeResponse = await prompts({ - type: "select", - name: "value", - message: c.cyanBright(`What is the base URL or the org you want to connect to, as ${alias} ?`), + type: 'select', + name: 'value', + message: c.cyanBright(`What is the base URL or domain or the org you want to connect to, as ${alias} ?`), + description: 'Select the Salesforce environment type or specify a custom URL for authentication', choices: choices, initial: 1, }); // login.salesforce.com or test.salesforce.com const url = orgTypeResponse.value; - if (url.startsWith("http")) { + if (url.startsWith('http')) { return url; } // Custom url to input const customUrlResponse = await prompts({ - type: "text", - name: "value", - message: c.cyanBright("Please input the base URL of the salesforce org (ex: https://myclient.my.salesforce.com)"), + type: 'text', + name: 'value', + message: c.cyanBright('Please input the base URL of the salesforce org'), + description: 'Copy paste the full URL of your currently open Salesforce org :)', + placeholder: 'Ex: https://myclient.my.salesforce.com , or myclient', }); - const urlCustom = (customUrlResponse.value || []).replace(".lightning.force.com", ".my.salesforce.com"); + let urlCustom = (customUrlResponse?.value || "") + .replace('.lightning.force.com', '.my.salesforce.com') + .replace('.my.salesforce-setup.com', '.my.salesforce.com'); + // Remove everything after '.my.salesforce.com' if existing + if (urlCustom.includes('.my.salesforce.com')) { + urlCustom = urlCustom.substring(0, urlCustom.indexOf('.my.salesforce.com') + '.my.salesforce.com'.length); + } + if (!urlCustom.startsWith('https://')) { + urlCustom = 'https://' + urlCustom; + } + if (!urlCustom.endsWith('.my.salesforce.com')) { + urlCustom = urlCustom + '.my.salesforce.com'; + } return urlCustom; } @@ -216,88 +267,97 @@ export async function ensureGitRepository(options: any = { init: false, clone: f if (!isGitRepo()) { // Init repo if (options.init) { - await exec("git init -b main"); + await exec('git init -b main'); console.info(c.yellow(c.bold(`[sfdx-hardis] Initialized git repository in ${process.cwd()}`))); + isGitRepoCache = null; } else if (options.clone) { // Clone repo let cloneUrl = options.cloneUrl; if (!cloneUrl) { // Request repo url if not provided const cloneUrlPrompt = await prompts({ - type: "text", - name: "value", + type: 'text', + name: 'value', message: c.cyanBright( - "What is the URL of your git repository ? example: https://gitlab.hardis-group.com/busalesforce/monclient/monclient-org-monitoring.git", + 'What is the URL of your git repository ?' ), + description: 'Enter the full URL of the git repository to clone', + placeholder: 'Ex: https://gitlab.hardis-group.com/busalesforce/monclient/monclient-org-monitoring.git', }); cloneUrl = cloneUrlPrompt.value; } // Git lcone await new Promise((resolve) => { - crossSpawn("git", ["clone", cloneUrl, "."], { stdio: "inherit" }).on("close", () => { + crossSpawn('git', ['clone', cloneUrl, '.'], { stdio: 'inherit' }).on('close', () => { resolve(null); }); }); - uxLog(this, `Git repository cloned. ${c.yellow("Please run again the same command :)")}`); + uxLog("other", this, `Git repository cloned. ${c.yellow('Please run again the same command :)')}`); process.exit(0); } else { - throw new SfdxError("You need to be at the root of a git repository to run this command"); + throw new SfError('You need to be at the root of a git repository to run this command'); } } // Check if root else if (options.mustBeRoot) { const gitRepoRoot = await getGitRepoRoot(); if (path.resolve(gitRepoRoot) !== path.resolve(process.cwd())) { - throw new SfdxError(`You must be at the root of the git repository (${path.resolve(gitRepoRoot)})`); + throw new SfError(`You must be at the root of the git repository (${path.resolve(gitRepoRoot)})`); } } } export async function getGitRepoRoot() { - const gitRepoRoot = await git().revparse(["--show-toplevel"]); + const gitRepoRoot = await git().revparse(['--show-toplevel']); return gitRepoRoot; } // Get local git branch name export async function getCurrentGitBranch(options: any = { formatted: false }) { - if (git == null) { + if (!isGitRepo()) { return null; } const gitBranch = process.env.CI_COMMIT_REF_NAME || (await git().branchLocal()).current; if (options.formatted === true) { - return gitBranch.replace("/", "__"); + return gitBranch.replace('/', '__'); } return gitBranch; } export async function getLatestGitCommit() { - if (git == null) { + if (!isGitRepo()) { return null; } - const log = await git().log(["-1"]); + const log = await git().log(['-1']); return log?.latest ?? null; } // Select git branch and checkout & pull if requested -export async function selectGitBranch(options: { remote: true; checkOutPull: boolean } = { remote: true, checkOutPull: false }) { - const gitBranchOptions = ["--list"]; +export async function selectGitBranch( + options: { remote: true; checkOutPull: boolean; message?: string; allowAll?: boolean } = { remote: true, checkOutPull: false } +) { + const gitBranchOptions = ['--list']; if (options.remote) { - gitBranchOptions.push("-r"); + gitBranchOptions.push('-r'); } const branches = await git().branch(gitBranchOptions); + if (options.allowAll) { + branches.all.unshift("ALL BRANCHES") + } const branchResp = await prompts({ - type: "select", - name: "value", - message: "Please select a Git branch", + type: 'select', + name: 'value', + message: options.message || 'Please select a Git branch', + description: 'Choose a git branch to work with', choices: branches.all.map((branchName) => { - return { title: branchName.replace("origin/", ""), value: branchName.replace("origin/", "") }; + return { title: branchName.replace('origin/', ''), value: branchName.replace('origin/', '') }; }), }); const branch = branchResp.value; // Checkout & pull if requested - if (options.checkOutPull) { + if (options.checkOutPull && branch !== "ALL BRANCHES") { await gitCheckOutRemote(branch); - WebSocketClient.sendMessage({ event: "refreshStatus" }); + WebSocketClient.sendRefreshStatusMessage(); } return branch; } @@ -308,10 +368,11 @@ export async function gitCheckOutRemote(branchName: string) { } // Get local git branch name -export async function ensureGitBranch(branchName: string, options: any = { init: false, parent: "current" }) { - if (git == null) { +export async function ensureGitBranch(branchName: string, options: any = { init: false, parent: 'current' }) { + if (!isGitRepo()) { if (options.init) { await ensureGitRepository({ init: true }); + isGitRepoCache = null; } else { return false; } @@ -325,15 +386,15 @@ export async function ensureGitBranch(branchName: string, options: any = { init: await git().checkout(branchName); // await git().pull() } else { - if (options?.parent === "main") { + if (options?.parent === 'main') { // Create from main branch - const mainBranch = branches.all.includes("main") - ? "main" - : branches.all.includes("origin/main") - ? "main" - : branches.all.includes("remotes/origin/main") - ? "main" - : "master"; + const mainBranch = branches.all.includes('main') + ? 'main' + : branches.all.includes('origin/main') + ? 'main' + : branches.all.includes('remotes/origin/main') + ? 'main' + : 'master'; await git().checkout(mainBranch); await git().checkoutBranch(branchName, mainBranch); } else { @@ -347,8 +408,8 @@ export async function ensureGitBranch(branchName: string, options: any = { init: // Checks that current git status is clean. export async function checkGitClean(options: any) { - if (git == null) { - throw new SfdxError("[sfdx-hardis] You must be within a git repository"); + if (!isGitRepo()) { + throw new SfError('[sfdx-hardis] You must be within a git repository'); } const gitStatus = await git({ output: true }).status(); if (gitStatus.files.length > 0) { @@ -356,15 +417,21 @@ export async function checkGitClean(options: any) { .map((fileStatus: FileStatusResult) => { return `(${fileStatus.working_dir}) ${getSfdxFileLabel(fileStatus.path)}`; }) - .join("\n"); + .join('\n'); if (options.allowStash) { - await execCommand("git add --all", this, { output: true, fail: true }); - await execCommand("git stash", this, { output: true, fail: true }); + try { + await execCommand('git add --all', this, { output: true, fail: true }); + await execCommand('git stash', this, { output: true, fail: true }); + } catch (e) { + uxLog("warning", this, c.yellow(c.bold("You might need to run the following command in Powershell launched as Administrator"))); + uxLog("warning", this, c.yellow(c.bold("git config --system core.longpaths true"))); + throw e; + } } else { - throw new SfdxError( + throw new SfError( `[sfdx-hardis] Branch ${c.bold(gitStatus.current)} is not clean. You must ${c.bold( - "commit or reset", - )} the following local updates:\n${c.yellow(localUpdates)}`, + 'commit or reset' + )} the following local updates:\n${c.yellow(localUpdates)}` ); } } @@ -372,15 +439,17 @@ export async function checkGitClean(options: any) { // Interactive git add export async function interactiveGitAdd(options: any = { filter: [], groups: [] }) { - if (git == null) { - throw new SfdxError("[sfdx-hardis] You must be within a git repository"); + if (!isGitRepo()) { + throw new SfError('[sfdx-hardis] You must be within a git repository'); } // List all files and arrange their format - const config = await getConfig("project"); + const config = await getConfig('project'); const gitStatus = await git().status(); let filesFiltered = gitStatus.files .filter((fileStatus: FileStatusResult) => { - return (options.filter || []).filter((filterString: string) => fileStatus.path.includes(filterString)).length === 0; + return ( + (options.filter || []).filter((filterString: string) => fileStatus.path.includes(filterString)).length === 0 + ); }) .map((fileStatus: FileStatusResult) => { fileStatus.path = normalizeFileStatusPath(fileStatus.path, config); @@ -391,7 +460,7 @@ export async function interactiveGitAdd(options: any = { filter: [], groups: [] if (groups.length === 0) { groups = [ { - label: "All", + label: 'All', regex: /(.*)/i, defaultSelect: false, ignore: false, @@ -399,7 +468,7 @@ export async function interactiveGitAdd(options: any = { filter: [], groups: [] ]; } // Ask user what he/she wants to git add/rm - const result = { added: [], removed: [] }; + const result: any = { added: [], removed: [] }; if (filesFiltered.length > 0) { for (const group of groups) { // Extract files matching group regex @@ -415,11 +484,14 @@ export async function interactiveGitAdd(options: any = { filter: [], groups: [] }); // Ask user for input const selectFilesStatus = await prompts({ - type: "multiselect", - name: "files", + type: 'multiselect', + name: 'files', message: c.cyanBright( - `Please select ${c.red("carefully")} the ${c.bgWhite(c.red(c.bold(group.label.toUpperCase())))} files you want to commit (save)}`, + `Please select the ${c.bgWhite( + c.red(c.bold(group.label.toUpperCase())) + )} files you want to commit (save)}` ), + description: 'Choose files to include in the git commit. Be careful with your selection.', choices: matchingFiles.map((fileStatus: FileStatusResult) => { return { title: `(${getGitWorkingDirLabel(fileStatus.working_dir)}) ${getSfdxFileLabel(fileStatus.path)}`, @@ -434,26 +506,27 @@ export async function interactiveGitAdd(options: any = { filter: [], groups: [] // Separate added to removed files result.added.push( ...selectFilesStatus.files - .filter((fileStatus: FileStatusResult) => fileStatus.working_dir !== "D") - .map((fileStatus: FileStatusResult) => fileStatus.path.replace('"', "")), + .filter((fileStatus: FileStatusResult) => fileStatus.working_dir !== 'D') + .map((fileStatus: FileStatusResult) => fileStatus.path.replace('"', '')) ); result.removed.push( ...selectFilesStatus.files - .filter((fileStatus: FileStatusResult) => fileStatus.working_dir === "D") - .map((fileStatus: FileStatusResult) => fileStatus.path.replace('"', "")), + .filter((fileStatus: FileStatusResult) => fileStatus.working_dir === 'D') + .map((fileStatus: FileStatusResult) => fileStatus.path.replace('"', '')) ); } if (filesFiltered.length > 0) { uxLog( + "log", this, c.grey( - "The following list of files has not been proposed for selection\n" + - filesFiltered - .map((fileStatus: FileStatusResult) => { - return ` - (${getGitWorkingDirLabel(fileStatus.working_dir)}) ${getSfdxFileLabel(fileStatus.path)}`; - }) - .join("\n"), - ), + 'The following list of files has not been proposed for selection\n' + + filesFiltered + .map((fileStatus: FileStatusResult) => { + return ` - (${getGitWorkingDirLabel(fileStatus.working_dir)}) ${getSfdxFileLabel(fileStatus.path)}`; + }) + .join('\n') + ) ); } // Ask user for confirmation @@ -462,29 +535,30 @@ export async function interactiveGitAdd(options: any = { filter: [], groups: [] .map((group) => { return ( c.bgWhite(c.red(c.bold(group.label))) + - "\n" + + '\n' + group.files .map((fileStatus: FileStatusResult) => { return ` - (${getGitWorkingDirLabel(fileStatus.working_dir)}) ${getSfdxFileLabel(fileStatus.path)}`; }) - .join("\n") + - "\n" + .join('\n') + + '\n' ); }) - .join("\n"); + .join('\n'); const addFilesResponse = await prompts({ - type: "select", - name: "addFiles", + type: 'select', + name: 'addFiles', message: c.cyanBright(`Do you confirm that you want to add the following list of files ?\n${confirmationText}`), + description: 'Confirm your file selection for the git commit', choices: [ - { title: "Yes, my selection is complete !", value: "yes" }, - { title: "No, I want to select again", value: "no" }, - { title: "Let me out of here !", value: "bye" }, + { title: 'Yes, my selection is complete !', value: 'yes' }, + { title: 'No, I want to select again', value: 'no' }, + { title: 'Let me out of here !', value: 'bye' }, ], initial: 0, }); // Commit if requested - if (addFilesResponse.addFiles === "yes") { + if (addFilesResponse.addFiles === 'yes') { if (result.added.length > 0) { await git({ output: true }).add(result.added); } @@ -493,16 +567,16 @@ export async function interactiveGitAdd(options: any = { filter: [], groups: [] } } // restart selection - else if (addFilesResponse.addFiles === "no") { + else if (addFilesResponse.addFiles === 'no') { return await interactiveGitAdd(options); } // exit else { - uxLog(this, "Cancelled by user"); + uxLog("other", this, 'Cancelled by user'); process.exit(0); } } else { - uxLog(this, c.cyan("There is no new file to commit")); + uxLog("action", this, c.cyan('There is no new file to commit')); } return result; } @@ -511,24 +585,25 @@ export async function interactiveGitAdd(options: any = { filter: [], groups: [] export async function gitAddCommitPush( options: any = { init: false, - pattern: "./*", - commitMessage: "Updated by sfdx-hardis", + pattern: './*', + commitMessage: 'Updated by sfdx-hardis', branch: null, - }, + } ) { - if (git == null) { + if (!isGitRepo()) { if (options.init) { // Initialize git repo - await execCommand("git init -b main", this); - await git().checkoutBranch(options.branch || "dev", "main"); + await execCommand('git init -b main', this); + isGitRepoCache = null; + await git().checkoutBranch(options.branch || 'dev', 'main'); } } // Add, commit & push const currentgitBranch = (await git().branchLocal()).current; await git() - .add(options.pattern || "./*") - .commit(options.commitMessage || "Updated by sfdx-hardis") - .push(["-u", "origin", currentgitBranch]); + .add(options.pattern || './*') + .commit(options.commitMessage || 'Updated by sfdx-hardis') + .push(['-u', 'origin', currentgitBranch]); } // Normalize git FileStatus path @@ -540,7 +615,7 @@ export function normalizeFileStatusPath(fileStatusPath: string, config): string fileStatusPath = fileStatusPath.slice(0, -1); } if (config.gitRootFolderPrefix) { - fileStatusPath = fileStatusPath.replace(config.gitRootFolderPrefix, ""); + fileStatusPath = fileStatusPath.replace(config.gitRootFolderPrefix, ''); } return fileStatusPath; } @@ -553,10 +628,10 @@ export async function execSfdxJson( fail: false, output: false, debug: false, - }, + } ): Promise { - if (!command.includes("--json")) { - command += " --json"; + if (!command.includes('--json')) { + command += ' --json'; } return await execCommand(command, commandThis, options); } @@ -564,58 +639,102 @@ export async function execSfdxJson( // Execute command export async function execCommand( command: string, - commandThis: any, + commandThis: SfCommand | null, options: any = { fail: false, output: false, debug: false, spinner: true, - }, + } ): Promise { - let commandLog = `[sfdx-hardis][command] ${c.bold(c.bgWhite(c.grey(command)))}`; + let commandLog = `[sfdx-hardis][command] ${c.bold(c.bgWhite(c.blue(command)))}`; const execOptions: any = { maxBuffer: 10000 * 10000 }; if (options.cwd) { execOptions.cwd = options.cwd; if (path.resolve(execOptions.cwd) !== path.resolve(process.cwd())) { - commandLog += c.grey(` ${c.italic("in directory")} ${execOptions.cwd}`); + commandLog += c.grey(` ${c.italic('in directory')} ${execOptions.cwd}`); } } - let commandResult = null; - // Call command (disable color before for json parsing) - const prevForceColor = process.env.FORCE_COLOR; - process.env.FORCE_COLOR = "0"; - const output = options.output !== null ? options.output : !process.argv.includes("--json"); + + const env = Object.assign({}, process.env); + // Disable colors for json parsing + // Remove NODE_OPTIONS in case it contains --inspect-brk to avoid to trigger again the debugger + env.FORCE_COLOR = '0'; + if (env?.NODE_OPTIONS && env.NODE_OPTIONS.includes("--inspect-brk")) { + env.NODE_OPTIONS = ""; + } + if (env?.JSFORCE_LOG_LEVEL) { + env.JSFORCE_LOG_LEVEL = ""; + } + execOptions.env = env; + if (command.startsWith('sf hardis')) { + execOptions.env.NO_NEW_COMMAND_TAB = 'true'; + } + let commandResult: any = {}; + const output = options.output !== null ? options.output : !commandThis?.argv?.includes('--json'); let spinner: any; if (output && !(options.spinner === false)) { - spinner = ora({ text: commandLog, spinner: "moon" }).start(); + spinner = ora({ text: commandLog, spinner: 'moon' }).start(); + if (globalThis.hardisLogFileStream) { + globalThis.hardisLogFileStream.write(stripAnsi(commandLog) + '\n'); + } } else { - uxLog(this, commandLog); + uxLog("other", this, commandLog); + } + if (WebSocketClient.isAlive()) { + WebSocketClient.sendCommandSubCommandStartMessage( + command, + execOptions.cwd || process.cwd(), + options, + ); } try { commandResult = await exec(command, execOptions); if (spinner) { spinner.succeed(commandLog); } + if (WebSocketClient.isAlive()) { + WebSocketClient.sendCommandSubCommandEndMessage( + command, + execOptions.cwd || process.cwd(), + options, + true, + commandResult, + ); + } } catch (e) { if (spinner) { spinner.fail(commandLog); } - process.env.FORCE_COLOR = prevForceColor; + WebSocketClient.sendCommandSubCommandEndMessage( + command, + execOptions.cwd || process.cwd(), + options, + false, + e, + ); // Display error in red if not json - if (!command.includes("--json") || options.fail) { - const strErr = truncateProgressLogLines(`${e.stdout}\n${e.stderr}`); - console.error(c.red(strErr)); - e.message = e.message += "\n" + strErr; + if (!command.includes('--json') || options.fail) { + const strErr = shortenLogLines(`${(e as any).stdout}\n${(e as any).stderr}`); + if (output) { + console.error(c.red(strErr)); + } + (e as Error).message = (e as Error).message += '\n' + strErr; // Manage retry if requested if (options.retry != null) { options.retry.tryCount = (options.retry.tryCount || 0) + 1; if ( options.retry.tryCount <= (options.retry.retryMaxAttempts || 1) && - (options.retry.retryStringConstraint == null || (e.stdout + e.stderr).includes(options.retry.retryStringConstraint)) + (options.retry.retryStringConstraint == null || + ((e as any).stdout + (e as any).stderr).includes(options.retry.retryStringConstraint)) ) { - uxLog(commandThis, c.yellow(`Retry command: ${options.retry.tryCount} on ${options.retry.retryMaxAttempts || 1}`)); + uxLog( + "warning", + commandThis, + c.yellow(`Retry command: ${options.retry.tryCount} on ${options.retry.retryMaxAttempts || 1}`) + ); if (options.retry.retryDelay) { - uxLog(this, `Waiting ${options.retry.retryDelay} seconds before retrying command`); + uxLog("other", this, `Waiting ${options.retry.retryDelay} seconds before retrying command`); await new Promise((resolve) => setTimeout(resolve, options.retry.retryDelay * 1000)); } return await execCommand(command, commandThis, options); @@ -626,17 +745,16 @@ export async function execCommand( // if --json, we should not have a crash, so return status 1 + output log return { status: 1, - errorMessage: `[sfdx-hardis][ERROR] Error processing command\n$${e.stdout}\n${e.stderr}`, + errorMessage: `[sfdx-hardis][ERROR] Error processing command\n$${(e as any).stdout}\n${(e as any).stderr}`, error: e, }; } - // Display output if requested, for better user unrstanding of the logs + // Display output if requested, for better user understanding of the logs if (options.output || options.debug) { - uxLog(commandThis, c.italic(c.grey(truncateProgressLogLines(commandResult.stdout)))); + uxLog("other", commandThis, c.italic(c.grey(shortenLogLines(commandResult.stdout)))); } // Return status 0 if not --json - process.env.FORCE_COLOR = prevForceColor; - if (!command.includes("--json")) { + if (!command.includes('--json')) { return { status: 0, stdout: commandResult.stdout, @@ -647,10 +765,10 @@ export async function execCommand( try { const parsedResult = JSON.parse(commandResult.stdout); if (options.fail && parsedResult.status && parsedResult.status > 0) { - throw new SfdxError(c.red(`[sfdx-hardis][ERROR] Command failed: ${commandResult}`)); + throw new SfError(c.red(`[sfdx-hardis][ERROR] Command failed: ${commandResult}`)); } if (commandResult.stderr && commandResult.stderr.length > 2) { - uxLog(this, "[sfdx-hardis][WARNING] stderr: " + c.yellow(commandResult.stderr)); + uxLog("other", this, '[sfdx-hardis][WARNING] stderr: ' + c.yellow(commandResult.stderr)); } return parsedResult; } catch (e) { @@ -658,7 +776,8 @@ export async function execCommand( return { status: 1, errorMessage: c.red( - `[sfdx-hardis][ERROR] Error parsing JSON in command result: ${e.message}\n${commandResult.stdout}\n${commandResult.stderr})`, + `[sfdx-hardis][ERROR] Error parsing JSON in command result: ${(e as Error).message}\n${commandResult.stdout}\n${commandResult.stderr + })` ), }; } @@ -667,7 +786,9 @@ export async function execCommand( /* Ex: force-app/main/default/layouts/Opportunity-Opportunity %28Marketing%29 Layout.layout-meta.xml becomes layouts/Opportunity-Opportunity (Marketing Layout).layout-meta.xml */ export function getSfdxFileLabel(filePath: string) { - const cleanStr = decodeURIComponent(filePath.replace("force-app/main/default/", "").replace("force-app/main/", "").replace('"', "")); + const cleanStr = decodeURIComponent( + filePath.replace('force-app/main/default/', '').replace('force-app/main/', '').replace('"', '') + ); const dotNumbers = (filePath.match(/\./g) || []).length; if (dotNumbers > 1) { const m = /(.*)\/(.*)\..*\..*/.exec(cleanStr); @@ -684,7 +805,7 @@ export function getSfdxFileLabel(filePath: string) { } function getGitWorkingDirLabel(workingDir) { - return workingDir === "?" ? "CREATED" : workingDir === "D" ? "DELETED" : workingDir === "M" ? "UPDATED" : "OOOOOPS"; + return workingDir === '?' ? 'CREATED' : workingDir === 'D' ? 'DELETED' : workingDir === 'M' ? 'UPDATED' : 'OOOOOPS'; } const elapseAll = {}; @@ -695,7 +816,7 @@ export function elapseEnd(text: string, commandThis: any = this) { if (elapseAll[text]) { const elapsed = Number(process.hrtime.bigint() - elapseAll[text]); const ms = elapsed / 1000000; - uxLog(commandThis, c.grey(c.italic(text + " " + moment().startOf("day").milliseconds(ms).format("H:mm:ss.SSS")))); + uxLog("log", commandThis, c.grey(c.italic(text + ' ' + moment().startOf('day').milliseconds(ms).format('H:mm:ss.SSS')))); delete elapseAll[text]; } } @@ -732,28 +853,46 @@ export async function filterPackageXml( packageXmlFile: string, packageXmlFileOut: string, options: any = { + keepOnlyNamespaces: [], removeNamespaces: [], removeMetadatas: [], removeStandard: false, removeFromPackageXmlFile: null, updateApiVersion: null, - }, + } ): Promise<{ updated: boolean; message: string }> { let updated = false; let message = `[sfdx-hardis] ${packageXmlFileOut} not updated`; const initialFileContent = await fs.readFile(packageXmlFile); const manifest = await xml2js.parseStringPromise(initialFileContent); + + // Keep only namespaces + if ((options.keepOnlyNamespaces || []).length > 0) { + uxLog("log", this, c.grey(`Keeping items from namespaces ${options.keepOnlyNamespaces.join(',')} ...`)); + manifest.Package.types = manifest.Package.types.map((type: any) => { + type.members = type.members.filter((member: string) => { + const containsNamespace = options.keepOnlyNamespaces.filter((ns: string) => member.startsWith(ns) || member.includes(`${ns}__`)).length > 0; + if (containsNamespace) { + return true; + } + return false; + }); + return type; + }); + } + // Remove namespaces if ((options.removeNamespaces || []).length > 0) { - uxLog(this, c.grey(`Removing items from namespaces ${options.removeNamespaces.join(",")} ...`)); + uxLog("log", this, c.grey(`Removing items from namespaces ${options.removeNamespaces.join(',')} ...`)); manifest.Package.types = manifest.Package.types.map((type: any) => { type.members = type.members.filter((member: string) => { const startsWithNamespace = options.removeNamespaces.filter((ns: string) => member.startsWith(ns)).length > 0; if (startsWithNamespace) { - const splits = member.split("."); + const splits = member.split('.'); if ( splits.length === 2 && - (((splits[1].match(/__/g) || []).length == 1 && splits[1].endsWith("__c")) || (splits[1].match(/__/g) || []).length == 0) + (((splits[1].match(/__/g) || []).length == 1 && splits[1].endsWith('__c')) || + (splits[1].match(/__/g) || []).length == 0) ) { // Keep ns__object__c.field__c and ns__object.stuff return true; @@ -777,7 +916,7 @@ export async function filterPackageXml( }); if (destructiveTypes.length > 0) { type.members = type.members.filter((member: string) => { - return destructiveTypes[0].members.filter((destructiveMember: string) => destructiveMember === member).length === 0; + return shouldRetainMember(destructiveTypes[0].members, member); }); } return type; @@ -785,24 +924,33 @@ export async function filterPackageXml( .filter((type: any) => { // Remove types with wildcard const wildcardDestructiveTypes = destructiveManifest.Package.types.filter((destructiveType: any) => { - return destructiveType.name[0] === type.name[0] && destructiveType.members.length === 1 && destructiveType.members[0] === "*"; + return ( + destructiveType.name[0] === type.name[0] && + destructiveType.members.length === 1 && + destructiveType.members[0] === '*' + ); }); if (wildcardDestructiveTypes.length > 0) { - uxLog(this, c.grey(`Removed ${type.name[0]} type`)); + uxLog("log", this, c.grey(`Removed ${type.name[0]} type`)); } return wildcardDestructiveTypes.length === 0; }); } // Remove standard objects if (options.removeStandard) { + const customFields: Array = manifest.Package.types.filter((t: any) => t.name[0] === 'CustomField')?.[0]?.members || []; manifest.Package.types = manifest.Package.types.map((type: any) => { - if (["CustomObject"].includes(type.name[0])) { - type.members = type.members.filter((member: string) => { - return member.endsWith("__c"); + if (['CustomObject'].includes(type.name[0])) { + type.members = type.members.filter((customObjectName: string) => { + // If a custom field is defined on the standard object, keep the standard object + if (customFields.some((field: string) => field.startsWith(customObjectName + '.'))) { + return true; + } + return customObjectName.endsWith('__c'); }); } type.members = type.members.filter((member: string) => { - return !member.startsWith("standard__"); + return !member.startsWith('standard__'); }); return type; }); @@ -816,21 +964,21 @@ export async function filterPackageXml( // Remove metadata types (named, and empty ones) manifest.Package.types = manifest.Package.types.filter((type: any) => { if (options.keepMetadataTypes.includes(type.name[0])) { - uxLog(this, c.grey("kept " + type.name[0])); + uxLog("log", this, c.grey('kept ' + type.name[0])); return true; } - uxLog(this, c.grey("removed " + type.name[0])); + uxLog("log", this, c.grey('removed ' + type.name[0])); return false; }); } // Remove metadata types (named, and empty ones) manifest.Package.types = manifest.Package.types.filter( - (type: any) => !(options.removeMetadatas || []).includes(type.name[0]) && (type?.members?.length || 0) > 0, + (type: any) => !(options.removeMetadatas || []).includes(type.name[0]) && (type?.members?.length || 0) > 0 ); - const builder = new xml2js.Builder({ renderOpts: { pretty: true, indent: " ", newline: "\n" } }); + const builder = new xml2js.Builder({ renderOpts: { pretty: true, indent: ' ', newline: '\n' } }); const updatedFileContent = builder.buildObject(manifest); - if (updatedFileContent !== initialFileContent) { + if (updatedFileContent !== initialFileContent.toString()) { await writeXmlFile(packageXmlFileOut, manifest); updated = true; if (packageXmlFile !== packageXmlFileOut) { @@ -845,9 +993,30 @@ export async function filterPackageXml( }; } +function shouldRetainMember(destructiveMembers: string[], member: string) { + if (destructiveMembers.length === 1 && destructiveMembers[0] === '*') { + // Whole type will be filtered later in the code + return true; + } + const matchesWithItemsToExclude = destructiveMembers.filter((destructiveMember: string) => { + if (destructiveMember === member) { + return true; + } + // Handle cases wild wildcards, like pi__* , *__dlm , or begin*end + if (destructiveMember.includes('*')) { + const regex = new RegExp(destructiveMember.replace(/\*/g, '.*')); + if (regex.test(member)) { + return true; + } + } + return false; + }); + return matchesWithItemsToExclude.length === 0; +} + // Catch matches in files according to criteria export async function catchMatches(catcher: any, file: string, fileText: string, commandThis: any) { - const matchResults = []; + const matchResults: any[] = []; if (catcher.regex) { // Check if there are matches const matches = await countRegexMatches(catcher.regex, fileText); @@ -861,7 +1030,7 @@ export async function catchMatches(catcher: any, file: string, fileText: string, detail[detailCrit.name] = detailCritVal; } } - const catcherLabel = catcher.regex ? `regex ${catcher.regex.toString()}` : "ERROR"; + const catcherLabel = catcher.regex ? `regex ${catcher.regex.toString()}` : 'ERROR'; matchResults.push({ fileName, fileText, @@ -872,7 +1041,11 @@ export async function catchMatches(catcher: any, file: string, fileText: string, catcherLabel, }); if (commandThis.debug) { - uxLog(commandThis, `[${fileName}]: Match [${matches}] occurrences of [${catcher.type}/${catcher.name}] with catcher [${catcherLabel}]`); + uxLog( + "other", + commandThis, + `[${fileName}]: Match [${matches}] occurrences of [${catcher.type}/${catcher.name}] with catcher [${catcherLabel}]` + ); } } } @@ -881,19 +1054,19 @@ export async function catchMatches(catcher: any, file: string, fileText: string, // Count matches of a regex export async function countRegexMatches(regex: RegExp, text: string): Promise { - return ((text || "").match(regex) || []).length; + return ((text || '').match(regex) || []).length; } // Get all captured groups of a regex in a string export async function extractRegexGroups(regex: RegExp, text: string): Promise { - const matches = ((text || "").match(regex) || []).map((e) => e.replace(regex, "$1").trim()); + const matches = ((text || '').match(regex) || []).map((e) => e.replace(regex, '$1').trim()); return matches; // return ((text || '').matchAll(regex) || []).map(item => item.trim()); } export async function extractRegexMatches(regex: RegExp, text: string): Promise { let m; - const matchStrings = []; + const matchStrings: any[] = []; while ((m = regex.exec(text)) !== null) { // This is necessary to avoid infinite loops with zero-width matches if (m.index === regex.lastIndex) { @@ -911,14 +1084,14 @@ export async function extractRegexMatches(regex: RegExp, text: string): Promise< export async function extractRegexMatchesMultipleGroups(regex: RegExp, text: string): Promise { let m; - const matchResults = []; + const matchResults: any[] = []; while ((m = regex.exec(text)) !== null) { // This is necessary to avoid infinite loops with zero-width matches if (m.index === regex.lastIndex) { regex.lastIndex++; } // Iterate thru the regex matches - const matchGroups = []; + const matchGroups: any[] = []; m.forEach((match) => { matchGroups.push(match); }); @@ -935,7 +1108,7 @@ export function arrayUniqueByKey(array, key: string) { export function arrayUniqueByKeys(array, keysIn: string[]) { const keys = new Set(); const buildKey = (el) => { - return keysIn.map((key) => el[key]).join(";"); + return keysIn.map((key) => el[key]).join(';'); }; return array.filter((el) => !keys.has(buildKey(el)) && keys.add(buildKey(el))); } @@ -945,12 +1118,12 @@ export async function generateReports( resultSorted: any[], columns: any[], commandThis: any, - options: any = { logFileName: null, logLabel: "Generated report files:" }, + options: any = { logFileName: null, logLabel: 'Report' } ): Promise { - const logLabel = options.logLabel || "Generated report files:"; + const logLabel = options.logLabel || 'Report'; let logFileName = options.logFileName || null; if (!logFileName) { - logFileName = "sfdx-hardis-" + commandThis.id.substr(commandThis.id.lastIndexOf(":") + 1); + logFileName = 'sfdx-hardis-' + commandThis.id.substr(commandThis.id.lastIndexOf(':') + 1); } const dateSuffix = new Date().toJSON().slice(0, 10); const reportDir = await getReportDirectory(); @@ -958,43 +1131,158 @@ export async function generateReports( const reportFileExcel = path.resolve(`${reportDir}/${logFileName}-${dateSuffix}.xls`); await fs.ensureDir(path.dirname(reportFile)); const csv = csvStringify(resultSorted, { - delimiter: ";", + delimiter: ';', header: true, columns, }); - await fs.writeFile(reportFile, csv, "utf8"); + await fs.writeFile(reportFile, csv, 'utf8'); // Trigger command to open CSV file in VsCode extension - WebSocketClient.requestOpenFile(reportFile); + try { + if (!WebSocketClient.isAliveWithLwcUI()) { + WebSocketClient.requestOpenFile(reportFile); + } + WebSocketClient.sendReportFileMessage(reportFile, `${logLabel} (CSV)`, "report"); + } catch (e: any) { + uxLog("warning", commandThis, c.yellow(`[sfdx-hardis] Error opening file in VsCode: ${e.message}`)); + } const excel = csvStringify(resultSorted, { - delimiter: "\t", + delimiter: '\t', header: true, columns, }); - await fs.writeFile(reportFileExcel, excel, "utf8"); - uxLog(commandThis, c.cyan(logLabel)); - uxLog(commandThis, c.cyan(`- CSV: ${reportFile}`)); - uxLog(commandThis, c.cyan(`- XLS: ${reportFileExcel}`)); + await fs.writeFile(reportFileExcel, excel, 'utf8'); + WebSocketClient.sendReportFileMessage(reportFileExcel, `${logLabel} (CSV)`, "report"); + uxLog("action", commandThis, c.cyan(logLabel)); + uxLog("log", commandThis, c.grey(c.cyan(`- CSV: ${reportFile}`))); + uxLog("log", commandThis, c.grey(c.cyan(`- XLS: ${reportFileExcel}`))); return [ - { type: "csv", file: reportFile }, - { type: "xls", file: reportFileExcel }, + { type: 'csv', file: reportFile }, + { type: 'xls', file: reportFileExcel }, ]; } -export function uxLog(commandThis: any, text: string) { - text = text.includes("[sfdx-hardis]") ? text : "[sfdx-hardis]" + (text.startsWith("[") ? "" : " ") + text; +export function uxLog(logType: LogType, commandThis: any, textInit: string, sensitive = false) { + const text = textInit.includes('[sfdx-hardis]') ? textInit : '[sfdx-hardis]' + (textInit.startsWith('[') ? '' : ' ') + textInit; + // Console log if (commandThis?.ux) { commandThis.ux.log(text); - } else if (!(globalThis.processArgv || process.argv).includes("--json")) { + } else if (!(globalThis?.processArgv || process?.argv || "").includes('--json')) { console.log(text); } + // File log if (globalThis.hardisLogFileStream) { - globalThis.hardisLogFileStream.write(stripAnsi(text) + "\n"); + if (sensitive) { + globalThis.hardisLogFileStream.write('OBFUSCATED LOG LINE\n'); + } + else { + globalThis.hardisLogFileStream.write(stripAnsi(text) + '\n'); + } + } + // VsCode sfdx-hardis log + if (WebSocketClient.isAlive() && !text.includes('[command]') && !text.includes('[NotifProvider]')) { + if (sensitive && !text.includes('SFDX_CLIENT_ID_') && !text.includes('SFDX_CLIENT_KEY_')) { + WebSocketClient.sendCommandLogLineMessage('OBFUSCATED LOG LINE'); + } + else { + let isQuestion = false; + let textToSend = textInit; + if (textInit.includes("Look up in VsCode")) { + // Remove "Look up in VsCode" and everything after + textToSend = textInit.split("Look up in VsCode")[0].trim(); + isQuestion = true; + } + + // Send message to WebSocket client + if (logType !== "other") { + WebSocketClient.sendCommandLogLineMessage(textToSend, logType, isQuestion); + } + } + } +} + +export function uxLogTable(commandThis: any, tableData: any[], columnsOrder: string[] = []) { + // Build a table string as tableData is an array of objects compliant with console.table + // This string will be used to display the table in the console + if (!tableData || tableData.length === 0) { + return; + } + let columns: string[]; + let displayData = tableData; + if (columnsOrder && columnsOrder.length > 0) { + columns = columnsOrder; + // Rebuild each row to contain only the columns in columnsOrder, in order + displayData = tableData.map(row => { + const newRow: any = {}; + for (const col of columnsOrder) { + newRow[col] = row[col] ?? ''; + } + return newRow; + }); + } else { + columns = Object.keys(tableData[0]); + displayData = tableData; } + // Compute column widths based on the longest value in each column + const colWidths = columns.map(col => + Math.max( + col.length, + ...displayData.map(row => String(row[col] ?? '').replace(/\n/g, ' ').length) + ) + ); + // Build header + const header = columns + .map((col, i) => c.bold(col.padEnd(colWidths[i]))) + .join(' | '); + // Build separator + const separator = colWidths.map(w => '-'.repeat(w)).join('-|-'); + // Build rows + const rows = displayData.map(row => + columns + .map((col, i) => { + let val = row[col] ?? ''; + if (typeof val === 'boolean') { + val = bool2emoji(val); + } + return String(val).replace(/\n/g, ' ').padEnd(colWidths[i]); + }) + .join(' | ') + ); + const tableString = [header, separator, ...rows].join('\n'); + uxLog("other", commandThis, c.italic("\n" + tableString)); + // Send table to WebSocket client + if (WebSocketClient.isAliveWithLwcUI()) { + const maxLen = 20; + let sendRows = displayData; + if (displayData.length > maxLen) { + sendRows = displayData.slice(0, maxLen); + sendRows.push({ + sfdxHardisTruncatedMessage: `Truncated to the first ${maxLen} lines on ${displayData.length} total lines, see full report for more details.`, + returnedNumber: maxLen, + totalNumber: displayData.length + }); + } + WebSocketClient.sendCommandLogLineMessage(JSON.stringify(sendRows), 'table'); + } + +} + +export function humanizeObjectKeys(obj: object) { + const objWithHumanizedKeys = Object.keys(obj || {}).map(key => { + const keyTitle = key + .replace(/([A-Z])/g, ' $1') // Add space before capital letters + .replace(/^./, str => str.toUpperCase()); // Capitalize the first letter + return { Key: keyTitle, Value: obj[key] }; + }); + return objWithHumanizedKeys; +} + +export function bool2emoji(bool: boolean): string { + return bool ? "✅" : "⬜" } // Caching methods -const SFDX_LOCAL_FOLDER = "/root/.sfdx"; -const TMP_COPY_FOLDER = ".cache/sfdx-hardis/.sfdx"; +const SFDX_LOCAL_FOLDER = '/root/.sfdx'; +const TMP_COPY_FOLDER = '.cache/sfdx-hardis/.sfdx'; let RESTORED = false; // Put local sfdx folder in tmp/sfdx-hardis-local for CI tools needing cache/artifacts to be within repo dir @@ -1008,9 +1296,9 @@ export async function copyLocalSfdxInfo() { dereference: true, overwrite: true, }); - // uxLog(this, `[cache] Copied sfdx cache in ${TMP_COPY_FOLDER} for later reuse`); + // uxLog("other", this, `[cache] Copied SF CLI cache in ${TMP_COPY_FOLDER} for later reuse`); // const files = fs.readdirSync(TMP_COPY_FOLDER, {withFileTypes: true}).map(item => item.name); - // uxLog(this, '[cache]' + JSON.stringify(files)); + // uxLog("other", this, '[cache]' + JSON.stringify(files)); } } @@ -1024,88 +1312,131 @@ export async function restoreLocalSfdxInfo() { dereference: true, overwrite: false, }); - // uxLog(this, '[cache] Restored cache for CI'); + // uxLog("other", this, '[cache] Restored cache for CI'); // const files = fs.readdirSync(SFDX_LOCAL_FOLDER, {withFileTypes: true}).map(item => item.name); - // uxLog(this, '[cache]' + JSON.stringify(files)); + // uxLog("other", this, '[cache]' + JSON.stringify(files)); RESTORED = true; } } // Generate SSL certificate in temporary folder and copy the key in project directory -export async function generateSSLCertificate(branchName: string, folder: string, commandThis: any, conn: any, options: any) { - uxLog(commandThis, "Generating SSL certificate..."); +export async function generateSSLCertificate( + branchName: string, + folder: string, + commandThis: any, + conn: any, + options: any +) { + uxLog("action", commandThis, c.cyan('Generating SSL certificate...')); const tmpDir = await createTempDir(); const prevDir = process.cwd(); process.chdir(tmpDir); - const sslCommand = - 'openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj "/C=GB/ST=Paris/L=Paris/O=Hardis Group/OU=sfdx-hardis/CN=hardis-group.com"'; - await execCommand(sslCommand, this, { output: true, fail: true }); - await execCommand("openssl x509 -req -sha256 -days 3650 -in server.csr -signkey server.key -out server.crt", this, { - output: true, - fail: true, - }); + try { + const sslCommand = + 'openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj "/C=GB/ST=Paris/L=Paris/O=Hardis Group/OU=sfdx-hardis/CN=hardis-group.com"'; + await execCommand(sslCommand, this, { output: true, fail: true }); + await execCommand('openssl x509 -req -sha256 -days 3650 -in server.csr -signkey server.key -out server.crt', this, { + output: true, + fail: true, + }); + } catch (e) { + uxLog("error", commandThis, c.red(`Error generating SSL certificate, please ensure you have openssl installed +- It is included in Git Bash for Windows +- You can also install it using "choco install openssl" if you have chocolatey installed +- If it is installed, make sure that paths to bash and openssl are available in PATH +- If you still have issues, run sfdx-hardis command in Git Bash terminal +`)); + throw e; + } process.chdir(prevDir); // Copy certificate key in local project await fs.ensureDir(folder); const targetKeyFile = path.join(folder, `${branchName}.key`); - await fs.copy(path.join(tmpDir, "server.key"), targetKeyFile); + await fs.copy(path.join(tmpDir, 'server.key'), targetKeyFile); const encryptionKey = await encryptFile(targetKeyFile); + WebSocketClient.sendReportFileMessage(targetKeyFile, `Encrypted SSL certificate key for branch ${branchName}`, 'report'); // Copy certificate file in user home project const crtFile = path.join(os.homedir(), `${branchName}.crt`); - await fs.copy(path.join(tmpDir, "server.crt"), crtFile); + await fs.copy(path.join(tmpDir, 'server.crt'), crtFile); // delete temporary cert folder await fs.remove(tmpDir); // Generate random consumer key for Connected app - const consumerKey = crypto.randomBytes(256).toString("base64").substr(0, 119); + const consumerKey = crypto.randomBytes(256).toString('base64').substr(0, 119); // Ask user if he/she wants to create connected app const confirmResponse = await prompts({ - type: "confirm", - name: "value", + type: 'confirm', + name: 'value', initial: true, - message: c.cyanBright("Do you want sfdx-hardis to configure the SFDX connected app on your org ? (say yes if you don't know)"), + message: c.cyanBright( + "Do you want sfdx-hardis to configure the SFDX connected app on your org ?" + ), + description: 'Creates a Connected App required for CI/CD authentication. Choose yes if you are unsure.', }); if (confirmResponse.value === true) { + uxLog("action", commandThis, c.cyan('Please configure both below variables in your CI/CD platform.\n(expand section below to see values)')); uxLog( + "log", commandThis, - c.cyanBright( - `You must configure CI variable ${c.green(c.bold(`SFDX_CLIENT_ID_${branchName.toUpperCase()}`))} with value ${c.bold(c.green(consumerKey))}`, - ), + c.grey( + c.cyanBright( + `${c.green( + c.bold(`- SFDX_CLIENT_ID_${branchName.toUpperCase()}`) + )} with value ${c.bold(c.green(consumerKey))}` + )), + true ); uxLog( + "log", commandThis, - c.cyanBright( - `You must configure CI variable ${c.green(c.bold(`SFDX_CLIENT_KEY_${branchName.toUpperCase()}`))} with value ${c.bold( - c.green(encryptionKey), - )}`, - ), + c.grey(c.cyanBright( + `${c.green( + c.bold(`- SFDX_CLIENT_KEY_${branchName.toUpperCase()}`) + )} with value ${c.bold(c.green(encryptionKey))}` + )), + true ); - uxLog(commandThis, c.yellow("Help to configure CI variables are here: https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/")); + uxLog( + "log", + commandThis, + c.grey(c.yellow(`Help to configure CI variables is here: ${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`)) + ); + WebSocketClient.sendReportFileMessage(`${CONSTANTS.DOC_URL_ROOT}/salesforce-ci-cd-setup-auth/`, "Help to configure CI variables", "docUrl"); await prompts({ - type: "confirm", - message: c.cyanBright("Hit ENTER when the CI/CD variables are set (check info in the console below)"), + type: 'confirm', + message: c.cyanBright('Please confirm when variables have been set'), + description: 'Confirm when you have configured the required CI/CD environment variables in your deployment platform', }); + // Build default app name from branch name by replacing all non-alphanumeric characters with empty string + let appNameDflt = branchName.replace(/[^a-zA-Z0-9]/g, '_').toLowerCase(); + if (appNameDflt.length > 20) { + appNameDflt = appNameDflt.substring(0, 20); + } // Request info for deployment const promptResponses = await prompts([ { - type: "text", - name: "appName", - initial: "sfdxhardis" + Math.floor(Math.random() * 9) + 1, - message: c.cyanBright("How would you like to name the Connected App (ex: sfdx_hardis) ?"), + type: 'text', + name: 'appName', + initial: 'sfdxhardis' + appNameDflt, + message: c.cyanBright('How would you like to name the Connected App ?'), + description: 'Name for the Connected App that will be created in your Salesforce org', + placeholder: 'Ex: sfdx_hardis', }, ]); - const contactEmail = await promptUserEmail("Enter a contact email for the Connect App (ex: nicolas.vuillamy@cloudity.com)"); + const contactEmail = await promptUserEmail( + 'Enter a contact email for the Connect App (ex: nicolas.vuillamy@cloudity.com)' + ); const profile = await promptProfiles(conn, { multiselect: false, - message: "What profile will be used for the connected app ? (ex: System Administrator)", - initialSelection: ["System Administrator", "Administrateur Système"], + message: 'What profile will be used for the connected app ? (ex: System Administrator)', + initialSelection: ['System Administrator', 'Administrateur Système'], }); - const crtContent = await fs.readFile(crtFile, "utf8"); + const crtContent = await fs.readFile(crtFile, 'utf8'); // Build ConnectedApp metadata const connectedAppMetadata = ` ${contactEmail} - + http://localhost:1717/OauthRedirect ${crtContent} @@ -1122,7 +1453,7 @@ export async function generateSSLCertificate(branchName: string, folder: string, ENFORCE specific_lifetime:3:HOURS - ${profile || "System Administrator"} + ${profile || 'System Administrator'} `; const packageXml = ` @@ -1131,96 +1462,261 @@ export async function generateSSLCertificate(branchName: string, folder: string, ${promptResponses.appName} ConnectedApp - ${CONSTANTS.API_VERSION} + ${getApiVersion()} `; // create metadata folder const tmpDirMd = await createTempDir(); - const connectedAppDir = path.join(tmpDirMd, "connectedApps"); + const connectedAppDir = path.join(tmpDirMd, 'connectedApps'); await fs.ensureDir(connectedAppDir); - await fs.writeFile(path.join(tmpDirMd, "package.xml"), packageXml); + await fs.writeFile(path.join(tmpDirMd, 'package.xml'), packageXml); await fs.writeFile(path.join(connectedAppDir, `${promptResponses.appName}.connectedApp`), connectedAppMetadata); // Deploy metadatas try { - uxLog(commandThis, c.cyan(`Deploying Connected App ${c.bold(promptResponses.appName)} into target org ${options.targetUsername || ""} ...`)); - const deployRes = await deployMetadatas({ + uxLog( + "action", + commandThis, + c.cyan( + `Deploying Connected App ${c.bold(promptResponses.appName)} into target org ${options.targetUsername || '' + } ...` + ) + ); + // Replace sensitive info in connectedAppMetadata for logging + const connectedAppMetadataForLog = connectedAppMetadata + .replace(new RegExp(consumerKey, 'g'), '***CONSUMERKEY***') + .replace(new RegExp(crtContent, 'g'), '***CERTIFICATE***'); + + uxLog("log", commandThis, c.grey(`Connected App metadatas XML:\n${connectedAppMetadataForLog}`)); + uxLog( + "log", + commandThis, + c.grey(c.yellow( + `If you have an upload error, PLEASE READ THE MESSAGE AFTER, that will explain how to manually create the connected app, and don't forget the CERTIFICATE file :)` + )) + ); + const isProduction = await isProductionOrg(options.targetUsername || null, { conn: conn }); + const deployParams: any = { deployDir: tmpDirMd, - testlevel: branchName.includes("production") ? "RunLocalTests" : "NoTestRun", - soap: true, + testlevel: isProduction ? 'RunLocalTests' : 'NoTestRun', targetUsername: options.targetUsername ? options.targetUsername : null, - }); - console.assert(deployRes.status === 0, c.red("[sfdx-hardis] Failed to deploy metadatas")); - uxLog(commandThis, c.cyan(`Successfully deployed ${c.green(promptResponses.appName)} Connected App`)); + }; + // If is Production org, find the first Apex test class to run + if (isProduction) { + let uniqueTestClass = process.env.SFDX_HARDIS_TECH_DEPLOY_TEST_CLASS || null; + if (!uniqueTestClass) { + const testClasses = await conn.tooling.query( + "SELECT Id, Name FROM ApexClass WHERE Name LIKE '%Test%' OR Name LIKE '%test%' OR Name LIKE '%TEST%' ORDER BY Name LIMIT 1" + ); + if (testClasses.totalSize > 0) { + uniqueTestClass = testClasses.records[0].Name; + } + } + if (uniqueTestClass) { + deployParams.testlevel = 'RunSpecifiedTests'; + deployParams.runTests = [uniqueTestClass]; + uxLog("log", commandThis, c.grey(`Production org detected, will run test class found ${uniqueTestClass} on deployment. +If you want to specify a specific test class, set SFDX_HARDIS_TECH_DEPLOY_TEST_CLASS variable`)); + } + } + const deployRes = await deployMetadatas(deployParams); + console.assert(deployRes.status === 0, c.red('[sfdx-hardis] Failed to deploy metadatas')); + uxLog("action", commandThis, c.cyan(`Successfully deployed ${c.green(promptResponses.appName)} Connected App`)); await fs.remove(tmpDirMd); await fs.remove(crtFile); // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) { uxLog( + "error", commandThis, - c.red("Error pushing ConnectedApp metadata. Maybe the app name is already taken ?\nYou may try again with another connected app name"), + c.red( + 'Error pushing ConnectedApp metadata. Maybe the app name is already taken ?\nYou may try again with another connected app name' + ) ); uxLog( + "warning", commandThis, c.yellow(` -${c.bold("MANUAL INSTRUCTIONS")} -If this is a Test class issue (production env), you may have to create manually connected app ${promptResponses.appName}: +${c.bold('MANUAL INSTRUCTIONS')} +If this is a Test class issue (production env), you may have to create manually connected app ${promptResponses.appName + }: - Follow instructions here: https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_auth_connected_app.htm - Use certificate ${c.bold(crtFile)} in "Use Digital Signature section" (delete the file from your computer after !) - Once created, update CI/CD variable ${c.green( - c.bold(`SFDX_CLIENT_ID_${branchName.toUpperCase()}`), - )} with the ConsumerKey of the newly created connected app`), + c.bold(`SFDX_CLIENT_ID_${branchName.toUpperCase()}`) + )} with the ConsumerKey of the newly created connected app`) ); await prompts({ - type: "confirm", - message: c.cyanBright("You need to manually configure the connected app. Follow the MANUAL INSTRUCTIONS in the console, then continue here"), + type: 'confirm', + message: c.cyanBright( + 'You need to manually configure the connected app. Follow the MANUAL INSTRUCTIONS above, then continue here' + ), + description: 'Confirm when you have completed the manual Connected App configuration steps', }); } } else { // Tell infos to install manually - uxLog(commandThis, c.yellow("Now you can configure the sfdx connected app")); + uxLog("action", commandThis, c.cyan('Now you can configure the SF CLI connected app')); + uxLog( + "log", + commandThis, + c.grey( + `Follow instructions here: ${c.bold( + 'https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_auth_connected_app.htm' + )}` + ) + ); uxLog( + "log", commandThis, - `Follow instructions here: ${c.bold( - "https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_auth_connected_app.htm", - )}`, + c.grey( + `Use ${c.green(crtFile)} as certificate on Connected App configuration page, ${c.bold( + `then delete ${crtFile} for security` + )}` + ) ); uxLog( + "log", commandThis, - `Use ${c.green(crtFile)} as certificate on Connected App configuration page, ${c.bold(`then delete ${crtFile} for security`)}`, + c.grey( + `- configure CI variable ${c.green( + `SFDX_CLIENT_ID_${branchName.toUpperCase()}` + )} with value of ConsumerKey on Connected App configuration page` + ) ); uxLog( + "log", commandThis, - `- configure CI variable ${c.green(`SFDX_CLIENT_ID_${branchName.toUpperCase()}`)} with value of ConsumerKey on Connected App configuration page`, + c.grey( + `- configure CI variable ${c.green(`SFDX_CLIENT_KEY_${branchName.toUpperCase()}`)} with value ${c.green( + encryptionKey + )} key` + ) ); - uxLog(commandThis, `- configure CI variable ${c.green(`SFDX_CLIENT_KEY_${branchName.toUpperCase()}`)} with value ${c.green(encryptionKey)} key`); } } export async function isMonitoringJob() { - if (process.env.SFDX_HARDIS_MONITORING === "true") { + if (process.env.SFDX_HARDIS_MONITORING === 'true') { return true; } if (!isCI) { return false; } - const repoName = await git().revparse("--show-toplevel"); - if (isCI && repoName.includes("monitoring")) { + const repoName = await git().revparse('--show-toplevel'); + if (isCI && repoName.includes('monitoring')) { return true; } return false; } export function getNested(nestedObj, pathArr) { - return pathArr.reduce((obj, key) => (obj && obj[key] !== "undefined" ? obj[key] : undefined), nestedObj); + return pathArr.reduce((obj, key) => (obj && obj[key] !== 'undefined' ? obj[key] : undefined), nestedObj); } const ansiPattern = [ - "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", - "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))", -].join("|"); -const ansiRegex = new RegExp(ansiPattern, "g"); + '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))', +].join('|'); +const ansiRegex = new RegExp(ansiPattern, 'g'); export function stripAnsi(str: string) { - return str.replace(ansiRegex, ""); + if (typeof str !== 'string') { + uxLog("warning", this, c.yellow('Warning: stripAnsi expects a string')); + return ''; + } + return str.replace(ansiRegex, ''); +} + +export function findJsonInString(inputString: string) { + // Regular expression to match a JSON object + const jsonMatch = stripAnsi(inputString).match(/\{[\s\S]*\}|\[[\s\S]*\]/); + if (jsonMatch) { + try { + const jsonObject = JSON.parse(jsonMatch[0]); // Extract and parse JSON + return jsonObject; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + } catch (err) { + return null; + } + } + return null; +} + +export function replaceJsonInString(inputString: string, jsonObject: any): string { + // Regular expression to match a JSON object + const jsonMatch = stripAnsi(inputString).match(/\{[\s\S]*\}|\[[\s\S]*\]/); + if (jsonMatch) { + try { + const jsonString = JSON.stringify(jsonObject, null, 2); + return stripAnsi(inputString).replace(jsonMatch[0], jsonString); + } catch (err: any) { + uxLog("warning", this, c.yellow('Warning: unable to replace JSON in string:' + err.message)); + return inputString; + } + } + uxLog("warning", this, c.yellow('Warning: unable to find json to replace in string')); + return inputString; +} + +// Ugly hack but no choice +// It happens that in case of huge logs, process.exit triggers a blocking error. +// Remove them, as anyway we want to stop the process. +export function killBoringExitHandlers() { + const listeners = process.listeners('exit'); + for (const listener of listeners) { + if (listener.toString().includes("function onExit ()")) { + process.removeListener('exit', listener); + } + } +} + +export async function isDockerRunning(): Promise { + try { + await exec("docker info"); + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + catch (e) { + return false; + } + return true; +} + +export function sortCrossPlatform(arr: any[]) { + return arr.sort((a, b) => { + // Normalize to string in case elements are not strings + const strA = String(a).normalize('NFD'); + const strB = String(b).normalize('NFD'); + + // 1. Base comparison: case-insensitive, accent-insensitive + const baseCompare = strA.localeCompare(strB, 'en', { sensitivity: 'base' }); + if (baseCompare !== 0) return baseCompare; + + // 2. Tie-breaker: uppercase before lowercase + const isAUpper = strA[0] === strA[0].toUpperCase(); + const isBUpper = strB[0] === strB[0].toUpperCase(); + + if (isAUpper && !isBUpper) return -1; + if (!isAUpper && isBUpper) return 1; + + return 0; + }); +} + +export function getExecutionContext(): "web" | "local" { + const env = process.env; + // GitHub Codespaces / github.dev + if (env.CODESPACES || env.GITHUB_CODESPACE_TOKEN) { + return "web"; + } + // Salesforce Code Builder (commonly sets Salesforce-specific vars) + if (env.CODE_BUILDER_URI) { + return "web"; + } + // Check for containerized/cloud workspaces + if (env.CLOUD_SHELL || env.CLOUD_ENV) { + return "web"; + } + // Default: assume local + return "local"; } diff --git a/src/common/utils/limitUtils.ts b/src/common/utils/limitUtils.ts new file mode 100644 index 000000000..4fabeb435 --- /dev/null +++ b/src/common/utils/limitUtils.ts @@ -0,0 +1,221 @@ +// External Libraries and Node.js Modules +import c from 'chalk'; + +// Salesforce Specific Libraries +import { Connection, SfError } from '@salesforce/core'; + +// Project Specific Utilities +import { uxLog } from './index.js'; + +// Optimized API Limits Management System +export class ApiLimitsManager { + private conn: Connection; + private commandThis: any; + + // Caching system + private cachedLimits: any = null; + private lastRefreshTime: number = 0; + private cacheDuration: number = 5 * 60 * 1000; // 5 minutes + + // Local tracking counters + private localRestApiCalls: number = 0; + private localBulkApiCalls: number = 0; + + // Base limits from Salesforce + private baseRestApiUsed: number = 0; + private baseRestApiLimit: number = 0; + private baseBulkApiUsed: number = 0; + private baseBulkApiLimit: number = 0; + + // Thresholds for API management + private readonly WARNING_THRESHOLD = 70; // Force refresh at 70% + private readonly DANGER_THRESHOLD = 80; // Stop operations at 80% + + constructor(conn: Connection, commandThis: any) { + this.conn = conn; + this.commandThis = commandThis; + } + + // Initialize the limits manager with initial API limits data + async initialize(): Promise { + await this.refreshLimits(); // Initial refresh + } + + // Refresh limits from Salesforce + private async refreshLimits(): Promise { + const now = Date.now(); + + try { + uxLog("log", this.commandThis, c.grey(`Refreshing API limits from Salesforce...`)); + + // Fetch fresh limits from Salesforce + this.cachedLimits = await this.conn.limits(); + + if (!this.cachedLimits) { + throw new SfError("Unable to retrieve API limit information from Salesforce org."); + } + + // Extract REST API limits + if (!this.cachedLimits.DailyApiRequests) { + throw new SfError("DailyApiRequests limit not available from Salesforce org."); + } + + this.baseRestApiUsed = this.cachedLimits.DailyApiRequests.Max - this.cachedLimits.DailyApiRequests.Remaining; + this.baseRestApiLimit = this.cachedLimits.DailyApiRequests.Max; + + // Extract Bulk API v2 limits + if (!this.cachedLimits.DailyBulkV2QueryJobs) { + throw new SfError("DailyBulkV2QueryJobs limit not available from Salesforce org."); + } + + this.baseBulkApiUsed = this.cachedLimits.DailyBulkV2QueryJobs.Max - this.cachedLimits.DailyBulkV2QueryJobs.Remaining; + this.baseBulkApiLimit = this.cachedLimits.DailyBulkV2QueryJobs.Max; + + // Reset local counters on fresh data + this.localRestApiCalls = 0; + this.localBulkApiCalls = 0; + this.lastRefreshTime = now; + + uxLog("success", this.commandThis, + `API Limits refreshed - REST: ${this.baseRestApiUsed}/${this.baseRestApiLimit}, Bulk: ${this.baseBulkApiUsed}/${this.baseBulkApiLimit}` + ); + + } catch (error: any) { + if (error instanceof SfError) throw error; + throw new SfError(`Failed to refresh API limits: ${error?.message || 'Unknown error'}`); + } + } + + // Track API call and check if we need to wait or refresh + async trackApiCall(apiType: 'REST' | 'BULK'): Promise { + // Increment local counter + if (apiType === 'REST') { + this.localRestApiCalls++; + } else { + this.localBulkApiCalls++; + } + + // Check if cache has expired (5 minutes) + const now = Date.now(); + const cacheAge = now - this.lastRefreshTime; + const cacheExpired = cacheAge >= this.cacheDuration; + + if (cacheExpired) { + await this.refreshLimits(); // Use smart caching (no force) + } + + // Calculate current usage after potential refresh + const currentRestUsage = this.baseRestApiUsed + this.localRestApiCalls; + const currentBulkUsage = this.baseBulkApiUsed + this.localBulkApiCalls; + const restPercent = (currentRestUsage / this.baseRestApiLimit) * 100; + const bulkPercent = (currentBulkUsage / this.baseBulkApiLimit) * 100; + + // Check if we need to wait due to danger threshold + if (apiType === 'REST' && restPercent >= this.DANGER_THRESHOLD) { + await this.waitForLimitReset('REST', restPercent); + } + + if (apiType === 'BULK' && bulkPercent >= this.DANGER_THRESHOLD) { + await this.waitForLimitReset('BULK', bulkPercent); + } + } // Wait for API limits to reset + private async waitForLimitReset(apiType: 'REST' | 'BULK', currentPercent: number): Promise { + const WAIT_INTERVAL = 300; // 5 minutes + const MAX_CYCLES = 12; // 1 hour max + + uxLog("warning", this.commandThis, + c.yellow(`${apiType} API at ${currentPercent.toFixed(1)}%. Waiting for limits to reset...`) + ); + + for (let cycle = 0; cycle < MAX_CYCLES; cycle++) { + uxLog("action", this.commandThis, + c.cyan(`Waiting ${WAIT_INTERVAL}s for ${apiType} API reset (${cycle + 1}/${MAX_CYCLES})...`) + ); + + // Wait in 1-second intervals + for (let i = 0; i < WAIT_INTERVAL; i++) { + await new Promise(resolve => setTimeout(resolve, 1000)); + } + + // Check if limits have reset + await this.refreshLimits(); + + const currentUsage = apiType === 'REST' + ? this.baseRestApiUsed + this.localRestApiCalls + : this.baseBulkApiUsed + this.localBulkApiCalls; + const limit = apiType === 'REST' ? this.baseRestApiLimit : this.baseBulkApiLimit; + const percent = (currentUsage / limit) * 100; + + if (percent < this.WARNING_THRESHOLD) { + uxLog("success", this.commandThis, + c.green(`${apiType} API usage dropped to ${percent.toFixed(1)}%. Resuming operations.`) + ); + return; + } + } + + throw new SfError(`${apiType} API limits did not reset after ${MAX_CYCLES * WAIT_INTERVAL / 60} minutes.`); + } + + // Get current API usage status for display + getUsageStatus(): { rest: number; bulk: number; message: string } { + const currentRestUsage = this.baseRestApiUsed + this.localRestApiCalls; + const currentBulkUsage = this.baseBulkApiUsed + this.localBulkApiCalls; + + const restPercent = (currentRestUsage / this.baseRestApiLimit) * 100; + const bulkPercent = (currentBulkUsage / this.baseBulkApiLimit) * 100; + + return { + rest: restPercent, + bulk: bulkPercent, + message: `[REST: ${restPercent.toFixed(1)}% | Bulk: ${bulkPercent.toFixed(1)}%]` + }; + } + + // Get current usage for API consumption estimation + getCurrentUsage(): { + restUsed: number; + restLimit: number; + bulkUsed: number; + bulkLimit: number; + restRemaining: number; + bulkRemaining: number; + } { + const currentRestUsage = this.baseRestApiUsed + this.localRestApiCalls; + const currentBulkUsage = this.baseBulkApiUsed + this.localBulkApiCalls; + + return { + restUsed: currentRestUsage, + restLimit: this.baseRestApiLimit, + bulkUsed: currentBulkUsage, + bulkLimit: this.baseBulkApiLimit, + restRemaining: this.baseRestApiLimit - currentRestUsage, + bulkRemaining: this.baseBulkApiLimit - currentBulkUsage + }; + } + + // Get final usage for reporting (forces a fresh refresh) + async getFinalUsage(): Promise<{ + restUsed: number; + restLimit: number; + restRemaining: number; + bulkUsed: number; + bulkLimit: number; + bulkRemaining: number; + }> { + await this.refreshLimits(); // Get fresh data + // Try to get fresh limits from Salesforce + const currentLimits = this.cachedLimits; + const restUsed = currentLimits.DailyApiRequests.Max - currentLimits.DailyApiRequests.Remaining; + const bulkUsed = currentLimits.DailyBulkV2QueryJobs.Max - currentLimits.DailyBulkV2QueryJobs.Remaining; + + return { + restUsed: restUsed, + restLimit: currentLimits.DailyApiRequests.Max, + restRemaining: currentLimits.DailyApiRequests.Remaining, + bulkUsed: bulkUsed, + bulkLimit: currentLimits.DailyBulkV2QueryJobs.Max, + bulkRemaining: currentLimits.DailyBulkV2QueryJobs.Remaining + }; + } +} \ No newline at end of file diff --git a/src/common/utils/markdownUtils.ts b/src/common/utils/markdownUtils.ts new file mode 100644 index 000000000..88f07bbed --- /dev/null +++ b/src/common/utils/markdownUtils.ts @@ -0,0 +1,47 @@ +import c from "chalk" +import { uxLog } from "./index.js"; +import { mdToPdf } from 'md-to-pdf'; + +export async function generatePdfFileFromMarkdown(markdownFile: string): Promise { + try { + const outputPdfFile = markdownFile.replace('.md', '.pdf'); + await mdToPdf({ path: markdownFile }, { + dest: outputPdfFile, + css: `img { + max-width: 50%; + max-height: 20%; + display: block; + margin: 0 auto; + }`, + stylesheet_encoding: 'utf-8' + }); + uxLog("success", this, c.green(`PDF file generated from ${markdownFile} documentation: ${c.bold(outputPdfFile)}`)); + return outputPdfFile; + } catch (e: any) { + uxLog("warning", this, c.yellow(`Error generating PDF file from ${markdownFile} documentation with CLI: ${e.message}`) + "\n" + c.grey(e.stack)); + return false; + } +} + +// Add a new line before each start of list of items starting by "-" +// If the previous line is already empty, do nothing +// Example before: +// Some line +// - item 1 +// - item 2 +// Example after: +// Some line +// +// - item 1 +// - item 2 +export function formatMarkdownForMkDocs(markdown: string): string { + const lines = markdown.split("\n"); + const formattedLines = lines.map((line, index) => { + if (line.trim().startsWith("-") && (index === 0 || lines[index - 1].trim() !== "")) { + return "\n" + line; + } + return line; + }); + const formattedMarkdown = formattedLines.join("\n"); + return formattedMarkdown; +} \ No newline at end of file diff --git a/src/common/utils/mermaidUtils.ts b/src/common/utils/mermaidUtils.ts new file mode 100644 index 000000000..9d67c0dd0 --- /dev/null +++ b/src/common/utils/mermaidUtils.ts @@ -0,0 +1,740 @@ +import c from "chalk" +import fs from 'fs-extra'; +import * as Diff from "diff"; +import * as path from "path"; +import which from "which"; +import { execCommand, git, isDockerRunning, uxLog } from "./index.js"; +import { parseFlow } from "./flowVisualiser/flowParser.js"; +import { getReportDirectory } from "../../config/index.js"; +import moment from "moment"; +import { SfError } from "@salesforce/core"; +import { PACKAGE_ROOT_DIR } from "../../settings.js"; +import { AiProvider } from "../aiProvider/index.js"; +import { UtilsAi } from "../aiProvider/utils.js"; +import { generatePdfFileFromMarkdown } from "../utils/markdownUtils.js"; +import { DocBuilderFlow } from "../docBuilder/docBuilderFlow.js"; +import { includeFromFile } from "../docBuilder/docUtils.js"; + +let IS_MERMAID_AVAILABLE: boolean | null = null; +export async function isMermaidAvailable() { + if (IS_MERMAID_AVAILABLE !== null) { + return IS_MERMAID_AVAILABLE; + } + const isMmdAvailable = await which("mmdc", { nothrow: true }); + IS_MERMAID_AVAILABLE = isMmdAvailable !== null + if (IS_MERMAID_AVAILABLE === false) { + uxLog("warning", this, c.yellow("MermaidJs is not available. To improve performances, please install it by running `npm install @mermaid-js/mermaid-cli --global`")); + } + return IS_MERMAID_AVAILABLE; +} + +let IS_DOCKER_AVAILABLE: boolean | null = null; +export async function isDockerAvailable() { + if (IS_DOCKER_AVAILABLE !== null) { + return IS_DOCKER_AVAILABLE; + } + IS_DOCKER_AVAILABLE = await isDockerRunning(); + if (!IS_DOCKER_AVAILABLE) { + uxLog("warning", this, c.yellow("Docker daemon is not available. If you have issues running npm package @mermaid-js/mermaid-cli, please install Docker and start it")); + } + return IS_DOCKER_AVAILABLE; +} + +export async function generateFlowMarkdownFile(flowName: string, flowXml: string, outputFlowMdFile: string, options: { collapsedDetails: boolean, describeWithAi: boolean, flowDependencies: any } = { collapsedDetails: true, describeWithAi: true, flowDependencies: {} }): Promise { + try { + const flowDocGenResult = await parseFlow(flowXml, 'mermaid', { outputAsMarkdown: true, collapsedDetails: options.collapsedDetails }); + let flowMarkdownDoc = flowDocGenResult.uml; + if (options.describeWithAi) { + const docBuilder = new DocBuilderFlow(flowName, flowXml, ""); + docBuilder.markdownDoc = flowMarkdownDoc; + flowMarkdownDoc = await docBuilder.completeDocWithAiDescription(); + } + + // Add link to history flow doc + const historyFlowDoc = path.join("docs", "flows", flowName + "-history.md"); + if (fs.existsSync(historyFlowDoc)) { + const historyLink = `[(_View History_)](${flowName + "-history.md"})`; + if (flowMarkdownDoc.includes("## Flow Diagram") && !flowMarkdownDoc.includes(historyLink)) { + flowMarkdownDoc = flowMarkdownDoc.replace("## Flow Diagram", `## Flow Diagram ${historyLink}`); + } + } + + // Add flow dependencies + const dependencies: string[] = []; + for (const mainFlow of Object.keys(options.flowDependencies)) { + if (options.flowDependencies[mainFlow].includes(flowName)) { + dependencies.push(mainFlow); + } + } + if (dependencies.length > 0) { + flowMarkdownDoc += `\n\n## Dependencies\n\n${dependencies.map(dep => `- [${dep}](${dep}.md)`).join("\n")}\n`; + } + + await fs.writeFile(outputFlowMdFile, flowMarkdownDoc); + uxLog("log", this, c.grey(`Written ${flowName} documentation in ${outputFlowMdFile}`)); + return true; + } catch (e: any) { + uxLog("warning", this, c.yellow(`Error generating Flow ${flowName} documentation: ${e.message}`) + "\n" + c.grey(e.stack)); + return false; + } +} + +export async function generateMarkdownFileWithMermaid(outputFlowMdFileIn: string, outputFlowMdFileOut: string, mermaidModes: string[] | null = null, withPdf = false): Promise { + await fs.ensureDir(path.dirname(outputFlowMdFileIn)); + await fs.ensureDir(path.dirname(outputFlowMdFileOut)); + if (withPdf) { + // Force the usage of mermaid CLI so the mermaid code is converted to SVG + mermaidModes = ["cli"]; + } else if (process.env.MERMAID_MODES) { + mermaidModes = process.env.MERMAID_MODES.split(","); + } + else if (mermaidModes === null) { + mermaidModes = ["mermaid", "cli", "docker"]; + } + if (mermaidModes.includes("mermaid")) { + return true; + } + const isDockerAvlbl = await isDockerAvailable(); + if (isDockerAvlbl && (!(globalThis.mermaidUnavailableTools || []).includes("docker")) && mermaidModes.includes("docker")) { + const dockerSuccess = await generateMarkdownFileWithMermaidDocker(outputFlowMdFileIn, outputFlowMdFileOut); + if (dockerSuccess) { + return true; + } + } + if ((!(globalThis.mermaidUnavailableTools || []).includes("cli")) && mermaidModes.includes("cli")) { + const mmCliSuccess = await generateMarkdownFileWithMermaidCli(outputFlowMdFileIn, outputFlowMdFileOut); + if (mmCliSuccess) { + if (withPdf) { + const pdfGenerated = await generatePdfFileFromMarkdown(outputFlowMdFileOut); + if (!pdfGenerated) { return false; } + + const fileName = path.basename(pdfGenerated).replace(".pdf", ""); + uxLog("log", this, c.grey(`Written ${fileName} PDF documentation in ${pdfGenerated}`)); + } + return true; + } + } + if ((globalThis.mermaidUnavailableTools || []).includes("cli") && (globalThis.mermaidUnavailableTools || []).includes("docker")) { + uxLog("warning", this, c.yellow("Either mermaid-cli or docker is required to work to generate mermaidJs Graphs. Please install/fix one of them if you want to generate SVG diagrams.")); + } + return false; +} + +export async function generateMarkdownFileWithMermaidDocker(outputFlowMdFileIn: string, outputFlowMdFileOut: string): Promise { + const fileDir = path.resolve(path.dirname(outputFlowMdFileIn)); + const fileName = path.basename(outputFlowMdFileIn); + const fileOut = path.basename(outputFlowMdFileOut); + const dockerCommand = `docker run --rm -v "${fileDir}:/data" ghcr.io/mermaid-js/mermaid-cli/mermaid-cli -i "${fileName}" -o "${fileOut}"`; + try { + await execCommand(dockerCommand, this, { output: false, fail: true, debug: false }); + return true; + } catch (e: any) { + uxLog("warning", this, c.yellow(`Error generating mermaidJs Graphs from ${outputFlowMdFileIn} documentation with Docker: ${e.message}`) + "\n" + c.grey(e.stack)); + if (JSON.stringify(e).includes("Cannot connect to the Docker daemon") || JSON.stringify(e).includes("daemon is not running")) { + globalThis.mermaidUnavailableTools = (globalThis.mermaidUnavailableTools || []).concat("docker"); + uxLog("warning", this, c.yellow("[Mermaid] Docker unavailable: do not try again")); + } + return false; + } +} + +export async function generateMarkdownFileWithMermaidCli(outputFlowMdFileIn: string, outputFlowMdFileOut: string): Promise { + // Try with NPM package + const isMmdAvailable = await isMermaidAvailable(); + const puppeteerConfigPath = path.join(PACKAGE_ROOT_DIR, 'defaults', 'puppeteer-config.json'); + const mermaidCmd = `${!isMmdAvailable ? 'npx --yes -p @mermaid-js/mermaid-cli ' : ''}mmdc -i "${outputFlowMdFileIn}" -o "${outputFlowMdFileOut}" --puppeteerConfigFile "${puppeteerConfigPath}"`; + try { + await execCommand(mermaidCmd, this, { output: false, fail: true, debug: false }); + return true; + } catch (e: any) { + uxLog("warning", this, c.yellow(`Error generating mermaidJs Graphs from ${outputFlowMdFileIn} documentation with CLI: ${e.message}`) + "\n" + c.grey(e.stack)); + if (JSON.stringify(e).includes("timed out")) { + globalThis.mermaidUnavailableTools = (globalThis.mermaidUnavailableTools || []).concat("cli"); + uxLog("warning", this, c.yellow("[Mermaid] CLI unavailable: do not try again")); + } + return false; + } +} + +export function getMermaidExtraClasses() { + const added = 'fill:green,color:white,stroke-width:4px,text-decoration:none,max-height:100px'; + const removed = 'fill:red,color:white,stroke-width:4px,text-decoration:none,max-height:100px'; + const changed = 'fill:orange,color:white,stroke-width:4px,text-decoration:none,max-height:100px'; + + const addedClasses = [ + 'actionCallsAdded', + 'assignmentsAdded', + 'collectionProcessorsAdded', + 'customErrorsAdded', + 'decisionsAdded', + 'loopsAdded', + 'recordCreatesAdded', + 'recordDeletesAdded', + 'recordLookupsAdded', + 'recordUpdatesAdded', + 'screensAdded', + 'subflowsAdded', + 'startClassAdded', + 'transformsAdded' + ]; + + const removedClasses = [ + 'actionCallsRemoved', + 'assignmentsRemoved', + 'collectionProcessorsRemoved', + 'customErrorsRemoved', + 'decisionsRemoved', + 'loopsRemoved', + 'recordCreatesRemoved', + 'recordDeletesRemoved', + 'recordLookupsRemoved', + 'recordUpdatesRemoved', + 'screensRemoved', + 'subflowsRemoved', + 'startClassRemoved', + 'transformsRemoved' + ]; + + const changedClasses = [ + 'actionCallsChanged', + 'assignmentsChanged', + 'collectionProcessorsChanged', + 'customErrorsChanged', + 'decisionsChanged', + 'loopsChanged', + 'recordCreatesChanged', + 'recordDeletesChanged', + 'recordLookupsChanged', + 'recordUpdatesChanged', + 'screensChanged', + 'subflowsChanged', + 'startClassChanged', + 'transformsChanged' + ]; + + const formatClasses = (classList, style) => + classList.map(className => `classDef ${className} ${style}`).join('\n'); + + return ` +${formatClasses(addedClasses, added)} + +${formatClasses(removedClasses, removed)} + +${formatClasses(changedClasses, changed)} + `; +} + +export async function generateFlowVisualGitDiff(flowFile, commitBefore: string, commitAfter: string, + options: { mermaidMd: boolean, svgMd: boolean, pngMd: boolean, debug: boolean } = { mermaidMd: false, svgMd: true, pngMd: false, debug: false }) { + const result: any = { outputDiffMdFile: "", hasFlowDiffs: false }; + const { mermaidMdBefore, flowXmlBefore } = await getFlowXmlBefore(commitBefore, flowFile); + const { mermaidMdAfter, flowXmlAfter } = await getFlowXmlAfter(commitAfter, flowFile); + const flowLabel = path.basename(flowFile, ".flow-meta.xml"); + + const reportDir = await getReportDirectory(); + await fs.ensureDir(path.join(reportDir, "flow-diff")); + const diffMdFile = path.join(reportDir, 'flow-diff', `${flowLabel}_${moment().format("YYYYMMDD-hhmmss")}.md`); + + if (options.debug) { + uxLog("log", this, c.grey("FLOW DOC BEFORE:\n" + mermaidMdBefore) + "\n"); + await fs.writeFile(diffMdFile.replace(".md", ".mermaid-before.md"), mermaidMdBefore); + uxLog("log", this, c.grey("FLOW DOC AFTER:\n" + mermaidMdAfter) + "\n"); + await fs.writeFile(diffMdFile.replace(".md", ".mermaid-after.md"), mermaidMdAfter); + } + + const flowDiffs = Diff.diffLines(mermaidMdBefore, mermaidMdAfter); + result.hasFlowDiffs = flowDiffs.some((line) => (line.added || line.removed) && line.value.trim() !== ""); + result.diffLines = flowDiffs.filter(line => line.added || line.removed); + + const mixedLines: any[] = []; + for (const line of flowDiffs) { + if (line.added) { + mixedLines.push(...line.value.split(/\r?\n/).map(lineSplit => { return ["added", lineSplit] })); + } + else if (line.removed) { + mixedLines.push(...line.value.split(/\r?\n/).map(lineSplit => { return ["removed", lineSplit] })); + } + else { + mixedLines.push(...line.value.split(/\r?\n/).map(lineSplit => { return ["unchanged", lineSplit] })); + } + } + // uxLog("other", this, JSON.stringify(mixedLines, null, 2)); + const compareMdLines: string[] = []; + const linkLines: string[] = []; + buildFinalCompareMarkdown(mixedLines, compareMdLines, false, false, linkLines); + + let diffMarkdown = compareMdLines.join("\n"); + + if (result.hasFlowDiffs === true && flowXmlAfter !== "" && flowXmlBefore !== "") { + const flowDiffKey = `${flowLabel}-${commitBefore}-${commitAfter}`; + diffMarkdown = await completeWithDiffAiDescription(diffMarkdown, flowXmlAfter, flowXmlBefore, flowDiffKey) + } + + // Write markdown with diff in a file + await fs.writeFile(diffMdFile, diffMarkdown); + if (options.mermaidMd) { + await fs.copyFile(diffMdFile, diffMdFile.replace(".md", ".mermaid.md")); + } + result.outputDiffMdFile = diffMdFile; + if (!options.svgMd && !options.pngMd) { + return result; + } + if (options.svgMd) { + // Generate final markdown with mermaid SVG + const finalRes = await generateMarkdownFileWithMermaid(diffMdFile, diffMdFile, ["cli", "docker"]); + if (finalRes) { + uxLog("success", this, c.green(`Successfully generated visual git diff for flow: ${diffMdFile}`)); + } + } + else if (options.pngMd) { + // General final markdown with mermaid PNG + const pngFile = path.join(path.dirname(diffMdFile), path.basename(diffMdFile, ".md") + ".png"); + const pngRes = await generateMarkdownFileWithMermaid(diffMdFile, pngFile, ["cli", "docker"]); + if (pngRes) { + let mdWithMermaid = fs.readFileSync(diffMdFile, "utf8"); + mdWithMermaid = mdWithMermaid.replace( + /```mermaid\n([\s\S]*?)\n```/g, + `![Diagram as PNG](./${path.basename(pngFile).replace(".png", "-1.png")})`); + await fs.writeFile(diffMdFile, mdWithMermaid); + } + } + return result; +} + +async function getFlowXmlAfter(commitAfter: string, flowFile: any) { + try { + const flowXmlAfter = await git().show([`${commitAfter}:${flowFile}`]); + const mermaidMdAfter = await buildMermaidMarkdown(flowXmlAfter, flowFile); + return { mermaidMdAfter, flowXmlAfter }; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + catch (err: any) { + return { mermaidMdAfter: "", flowXmlAfter: "" }; + } +} + +async function getFlowXmlBefore(commitBefore: string, flowFile: any) { + try { + const flowXmlBefore = await git().show([`${commitBefore}:${flowFile}`]); + const mermaidMdBefore = await buildMermaidMarkdown(flowXmlBefore, flowFile); + return { mermaidMdBefore, flowXmlBefore }; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + catch (err: any) { + return { mermaidMdBefore: "", flowXmlBefore: "" }; + } +} + +function buildFinalCompareMarkdown(mixedLines: any[], compareMdLines, isMermaid, isTableStarted, linkLines) { + if (mixedLines.length === 0) { + return; + } + // Take line to process + const [status, currentLine] = mixedLines.shift(); + // Update mermaid state + if (isMermaid === false && currentLine.includes("```mermaid")) { + isMermaid = true; + } else if (isMermaid === true && currentLine.includes("```")) { + compareMdLines.push(...getMermaidExtraClasses().split("\n")); + // Build link positions + let pos = 0; + const positions = { + added: [], + removed: [], + unchanged: [] + } + for (const linkType of linkLines) { + positions[linkType].push(pos); + pos++; + } + // Build added and removed links styles + if (positions.added.length > 0) { + compareMdLines.push("linkStyle " + positions.added.join(",") + " stroke:#00ff00,stroke-width:4px,color:green;"); + } + if (positions.removed.length > 0) { + compareMdLines.push("linkStyle " + positions.removed.join(",") + " stroke:#ff0000,stroke-width:4px,color:red;"); + } + isMermaid = false + } + let styledLine = currentLine; + // Remove next diff line if not relevant + if (styledLine.startsWith("|") && mixedLines.length > 1 && mixedLines[0][1] === '' && mixedLines[1][1].startsWith("|") && !mixedLines[1][1].startsWith("|Condition Id|") && !mixedLines[1][1].startsWith("|Filter Id|")) { + mixedLines.shift(); + } + // Skip table block if there are no updated lines within + if (styledLine.startsWith("## ") && !styledLine.startsWith("## Flow Diagram")) { + let updatedInBlock = false; + let nextBlockPos = 0; + for (const nextLine of mixedLines) { + if (nextLine[1].startsWith("## ") || nextLine[1].includes("_Documentation") || nextLine[1].startsWith("___")) { + break; + } + if (nextLine[0] === "removed" || nextLine[0] === "added") { + updatedInBlock = true; + } + nextBlockPos++; + } + if (!updatedInBlock) { + const mixedLinesStartingFromNextBlock = mixedLines.slice(nextBlockPos); + // Continue processing next lines + buildFinalCompareMarkdown(mixedLinesStartingFromNextBlock, compareMdLines, isMermaid, false, linkLines); + return; + } + } + /* jscpd:ignore-start */ + // Skip node block if there are no updated lines within + else if (styledLine.startsWith("### ")) { + let updatedInBlock = false; + let nextBlockPos = 0; + for (const nextLine of mixedLines) { + if (nextLine[1].startsWith("### ") || nextLine[1].startsWith("## ") || nextLine[1].includes("_Documentation") || nextLine[1].startsWith("___")) { + break; + } + if (nextLine[0] === "removed" || nextLine[0] === "added") { + updatedInBlock = true; + } + nextBlockPos++; + } + if (!updatedInBlock) { + const mixedLinesStartingFromNextBlock = mixedLines.slice(nextBlockPos); + // Continue processing next lines + buildFinalCompareMarkdown(mixedLinesStartingFromNextBlock, compareMdLines, isMermaid, false, linkLines); + return; + } + } + else if (styledLine.startsWith("#### ")) { + let updatedInBlock = false; + let nextBlockPos = 0; + for (const nextLine of mixedLines) { + if (nextLine[1].startsWith("#### ") || nextLine[1].startsWith("### ") || nextLine[1].startsWith("## ") || nextLine[1].includes("_Documentation") || nextLine[1].startsWith("___")) { + break; + } + if (nextLine[0] === "removed" || nextLine[0] === "added") { + updatedInBlock = true; + } + nextBlockPos++; + } + if (!updatedInBlock) { + const mixedLinesStartingFromNextBlock = mixedLines.slice(nextBlockPos); + // Continue processing next lines + buildFinalCompareMarkdown(mixedLinesStartingFromNextBlock, compareMdLines, isMermaid, false, linkLines); + return; + } + } + /* jscpd:ignore-end */ + // Skip table lines that have not been updated + /* + else if (!isMermaid && styledLine.startsWith("|") && isTableStarted === false) { + isTableStarted = true; + const tableFilteredLines: any[] = []; + let endTablePos = 0; + for (const nextLine of mixedLines) { + if ((!nextLine[1].startsWith("|") || nextLine[1].includes("Condition Id") || nextLine[1].includes("Filter Id")) && nextLine[1] !== "") { + break; + } + if ((nextLine[0] === "removed" || nextLine[0] === "added" || endTablePos === 0) && nextLine[1] !== "") { + tableFilteredLines.push(nextLine); + } + endTablePos++; + } + if (tableFilteredLines.length < 2) { + // Empty table + const mixedLinesStartingFromEndOfTable = mixedLines.slice(endTablePos); + buildFinalCompareMarkdown(mixedLinesStartingFromEndOfTable, compareMdLines, isMermaid, false, linkLines); + } + else { + compareMdLines.push(styledLine); + const mixedLinesStartingFromEndOfTable = mixedLines.slice(endTablePos); + const newMixedLines = [...tableFilteredLines, ...[["unchanged", ""]], ...mixedLinesStartingFromEndOfTable]; + // Continue processing next lines + buildFinalCompareMarkdown(newMixedLines, compareMdLines, isMermaid, true, linkLines); + } + return; + } + */ + + // Tables lines + if (!isMermaid && status === "removed" && styledLine.startsWith("|") && !styledLine.startsWith("|:-")) { + styledLine = "|🟥" + styledLine.split("|").filter(e => e !== "").map((col: string) => `${col}`).join("|") + "|"; + } + else if (!isMermaid && status === "added" && styledLine.startsWith("|") && !styledLine.startsWith("|:-")) { + styledLine = "|🟩" + styledLine.split("|").filter(e => e !== "").map((col: string) => `${col}`).join("|") + "|"; + } + // Normal lines header 3 + else if (!isMermaid && status === "removed" && styledLine.startsWith("#### ")) { + styledLine = `#### 🟥${styledLine.replace("#### ", "")}`; + } + else if (!isMermaid && status === "added" && styledLine.startsWith("#### ")) { + styledLine = `#### 🟩${styledLine.replace("#### ", "")}`; + } + // Normal lines header 2 + else if (!isMermaid && status === "removed" && styledLine.startsWith("### ")) { + styledLine = `### 🟥${styledLine.replace("### ", "")}`; + } + else if (!isMermaid && status === "added" && styledLine.startsWith("### ")) { + styledLine = `### 🟩${styledLine.replace("### ", "")}`; + } + // Normal lines header 3 + else if (!isMermaid && status === "removed" && styledLine.startsWith("## ")) { + styledLine = `## 🟥${styledLine.replace("## ", "")}`; + } + else if (!isMermaid && status === "added" && styledLine.startsWith("## ")) { + styledLine = `## 🟩${styledLine.replace("## ", "")}`; + } + // Normal lines + else if (!isMermaid && status === "removed" && styledLine !== "" && !styledLine.includes('```') && !styledLine.startsWith("|:-") && !styledLine.startsWith("___")) { + styledLine = `🟥${styledLine}`; + } + else if (!isMermaid && status === "added" && styledLine !== "" && !styledLine.includes('```') && !styledLine.startsWith("|:-") && !styledLine.startsWith("___")) { + styledLine = `🟩${styledLine}`; + } + // Boxes lines + else if (isMermaid === true && status === "removed" && currentLine.split(":::").length === 2) { + styledLine = styledLine + "Removed" + if (styledLine.split('"').length === 3) { + const splits = styledLine.split('"'); + styledLine = splits[0] + '"' + splits[1] + '"' + splits[2] + } + } + else if (isMermaid === true && status === "added" && currentLine.split(":::").length === 2) { + styledLine = styledLine + "Added" + if (styledLine.split('"').length === 3) { + const splits = styledLine.split('"'); + styledLine = splits[0] + '"' + splits[1] + '"' + splits[2] + } + } + else if (isMermaid === true && currentLine.includes(":::")) { + // Detect if link line does not change, but its content did + const splits = currentLine.split(/[[({]/); + if (splits.length > 1) { + const boxName = splits[0]; + const changed = mixedLines.filter(([lineStatus, line]) => { return line.startsWith(`click ${boxName}`) && ["added", "removed"].includes(lineStatus) }).length; + if (changed > 0) { + styledLine = styledLine + "Changed" + if (styledLine.split('"').length === 3) { + const splits = styledLine.split('"'); + styledLine = splits[0] + '"' + splits[1] + '"' + splits[2] + } + // Remove "removed" line from mixedLines + const removedNodePos = mixedLines.findIndex(([lineStatus, line]) => { return line.startsWith(`click ${boxName}`) && lineStatus === "removed" }); + if (removedNodePos !== -1) { + mixedLines.splice(removedNodePos, 1); + } + } + } + } + // Long Link lines + else if (isMermaid === true && status === "removed" && currentLine.includes('-. Fault .->')) { + styledLine = styledLine.replace('-. Fault .->', '-. 🟥Fault .->') //+ ":::removedLink" + linkLines.push("removed"); + } + else if (isMermaid === true && status === "added" && currentLine.includes('-. Fault .->')) { + styledLine = styledLine.replace('-. Fault .->', '-. 🟩Fault .->') // + ":::addedLink" + linkLines.push("added"); + } + /* jscpd:ignore-start */ + // Long Link lines + else if (isMermaid === true && status === "removed" && currentLine.includes('--->')) { + styledLine = styledLine.replace("--->", "-.->");//+ ":::removedLink" + linkLines.push("removed"); + if (styledLine.split("|").length === 3) { + const splits = styledLine.split("|"); + styledLine = splits[0] + '|"🟥' + removeQuotes(splits[1]) + '"|' + splits[2] + } + } + else if (isMermaid === true && status === "added" && currentLine.includes('--->')) { + styledLine = styledLine.replace("--->", "===>"); // + ":::addedLink" + linkLines.push("added"); + if (styledLine.split("|").length === 3) { + const splits = styledLine.split("|"); + styledLine = splits[0] + '|"🟩' + removeQuotes(splits[1]) + '"|' + splits[2] + } + } + // Link lines + else if (isMermaid === true && status === "removed" && currentLine.includes('-->')) { + styledLine = styledLine.replace("-->", "-.->") // + ":::removedLink" + linkLines.push("removed"); + if (styledLine.split("|").length === 3) { + const splits = styledLine.split("|"); + styledLine = splits[0] + '|"🟥' + removeQuotes(splits[1]) + '"|' + splits[2] + } + } + else if (isMermaid === true && status === "added" && currentLine.includes('-->')) { + styledLine = styledLine.replace("-->", "==>") // + ":::addedLink" + linkLines.push("added"); + if (styledLine.split("|").length === 3) { + const splits = styledLine.split("|"); + styledLine = splits[0] + '|"🟩' + removeQuotes(splits[1]) + '"|' + splits[2] + } + } + else if (isMermaid === true && !["added", "removed"].includes(status) && + (currentLine.includes('-->') || currentLine.includes('-. Fault .->')) + ) { + linkLines.push("unchanged"); + } + /* jscpd:ignore-end */ + compareMdLines.push(styledLine); + // Continue processing next lines + buildFinalCompareMarkdown(mixedLines, compareMdLines, isMermaid, (styledLine.startsWith("|") && isTableStarted), linkLines); +} + +async function buildMermaidMarkdown(flowXml, flowFile) { + try { + const flowDocGenResult = await parseFlow(flowXml, 'mermaid', { outputAsMarkdown: true }); + return flowDocGenResult.uml; + } catch (err: any) { + throw new SfError(`Unable to build Graph for flow ${flowFile}: ${err.message}`) + } +} + +function removeQuotes(str: string) { + if (str.startsWith('"')) { + str = str.slice(1); + } + if (str.endsWith('"')) { + str = str.slice(0, -1) + } + return str; +} + +export async function generateHistoryDiffMarkdown(flowFile: string, debugMode: boolean) { + await fs.ensureDir(path.join("docs", "flows")); + const diffMdFile = path.join("docs", "flows", path.basename(flowFile).replace(".flow-meta.xml", "-history.md")); + // Compute for all states + const fileHistory = await git().log({ file: flowFile }); + const flowLabel = path.basename(flowFile, ".flow-meta.xml"); + uxLog("log", this, c.grey(`Generating ${flowLabel} markdown diff between ${fileHistory.all.length} Flow states...`)); + const diffMdFiles: any[] = []; + for (let i = 0; i < fileHistory.all.length; i++) { + const commitAfter = fileHistory.all[i]; + // Initial state + if (i === fileHistory.all.length - 1) { + const flowXml = await git().show([`${fileHistory.all[i].hash}:${flowFile}`]); + const reportDir = await getReportDirectory(); + await fs.ensureDir(path.join(reportDir, "flow-diff")); + const diffMdFileTmp = path.join(reportDir, 'flow-diff', `${flowLabel}_${moment().format("YYYYMMDD-hhmmss")}.md`); + const genRes = await generateFlowMarkdownFile(flowLabel, flowXml, diffMdFileTmp, { collapsedDetails: false, describeWithAi: false, flowDependencies: {} }); + if (!genRes) { + throw new Error(`Error generating markdown file for flow ${flowFile}`); + } + diffMdFiles.push({ + initialVersion: true, + commitAfter: commitAfter, + markdown: fs.readFileSync(diffMdFileTmp, "utf8") + }); + } + else { + const commitBefore = fileHistory.all[i + 1]; + const genDiffRes = await generateFlowVisualGitDiff(flowFile, commitBefore.hash, commitAfter.hash, { svgMd: false, mermaidMd: true, pngMd: false, debug: debugMode }); + if (genDiffRes.hasFlowDiffs && fs.existsSync(genDiffRes.outputDiffMdFile)) { + diffMdFiles.push({ + commitBefore: commitBefore, + commitAfter: commitAfter, + markdown: fs.readFileSync(genDiffRes.outputDiffMdFile, "utf8") + }); + } + else { + uxLog("warning", this, c.yellow(`No real flow diff has been found between ${commitBefore.hash} and ${commitAfter.hash}`)); + } + } + } + // Set all the results in a single tabbed markdown + uxLog("log", this, c.grey(`Aggregating results in summary tabbed file ${diffMdFile}...`)); + let finalMd = `# ${flowLabel} history\n\n`; + finalMd += "\n\n" + for (const diffMdFile of diffMdFiles) { + finalMd += `=== "${moment(diffMdFile.commitAfter.date).format("ll")}` + (diffMdFile.initialVersion ? " (Initial)" : "") + `"\n\n`; + finalMd += ` _${moment(diffMdFile.commitAfter.date).format("ll")}, by ${diffMdFile.commitAfter.author_name} in commit ${diffMdFile.commitAfter.message}_\n\n`; + // Remove title and add indentation for tabs to be displayed + finalMd += diffMdFile.markdown.split("\n").filter(line => !line.startsWith("# ")).map(line => ` ${line}`).join("\n"); + finalMd += "\n\n"; + } + await fs.writeFile(diffMdFile, finalMd); + if (debugMode) { + await fs.copyFile(diffMdFile, diffMdFile.replace(".md", ".mermaid.md")); + } + const genSvgRes = await generateMarkdownFileWithMermaid(diffMdFile, diffMdFile); + if (!genSvgRes) { + throw new Error("Error generating mermaid markdown file"); + } + + // Fix indentation for mermaid SVG links + const diffMarkdown = await fs.readFile(diffMdFile, "utf8"); + const diffMarkdownFixed = diffMarkdown.split("\n").map(line => { + if (line.startsWith("![diagram]")) { + return ` ${line}`; + } + return line; + }).join("\n"); + await fs.writeFile(diffMdFile, diffMarkdownFixed); + + // Add link to main flow doc + const mainFlowDoc = path.join("docs", "flows", path.basename(flowFile).replace(".flow-meta.xml", ".md")); + if (fs.existsSync(mainFlowDoc)) { + const mainFlowDocContent = await fs.readFile(mainFlowDoc, "utf8"); + const mainFlowDocLink = `[(_View History_)](${path.basename(flowFile).replace(".flow-meta.xml", "-history.md")})`; + if (mainFlowDocContent.includes("## Flow Diagram") && !mainFlowDocContent.includes(mainFlowDocLink)) { + const updatedFlowDocContent = mainFlowDocContent.replace("## Flow Diagram", `## Flow Diagram ${mainFlowDocLink}`); + await fs.writeFile(mainFlowDoc, updatedFlowDocContent); + } + } + + uxLog("success", this, c.green(`Markdown diff between ${fileHistory.all.length} Flow states generated in ${diffMdFile}`)); + return diffMdFile; +} + +export function removeMermaidLinks(messageBody: string) { + let result = messageBody + ""; + if (result.includes("```mermaid")) { + let withinMermaid = false; + result = result + .split("\n") + .filter((line) => { + // Toggle mermaid flag on/off + if (line.includes("```mermaid")) { + withinMermaid = true; + } + else if (line.includes("```") && withinMermaid === true) { + withinMermaid = false; + } + // Filter if click line for better display + if (line.startsWith("click") && withinMermaid === true) { + return false; + } + return true; + }) + .join("\n"); + } + return result; +} + +/* jscpd:ignore-start */ +async function completeWithDiffAiDescription(flowMarkdownDoc: string, flowXmlNew: string, flowXmlPrevious: string, diffKey: string): Promise { + const flowXmlNewStripped = await new DocBuilderFlow("", flowXmlNew, "").stripXmlForAi(); + const flowXmlPreviousStripped = await new DocBuilderFlow("", flowXmlPrevious, "").stripXmlForAi(); + const aiCache = await UtilsAi.findAiCache("PROMPT_DESCRIBE_FLOW_DIFF", [flowXmlNewStripped, flowXmlPreviousStripped], diffKey); + if (aiCache.success) { + uxLog("log", this, c.grey("Used AI cache for diff description (set IGNORE_AI_CACHE=true to force call to AI)")); + const replaceText = `## AI-Generated Differences Summary\n\n${includeFromFile(aiCache.aiCacheDirFile, aiCache.cacheText || "")}`; + return flowMarkdownDoc.replace("", replaceText); + } + if (AiProvider.isAiAvailable()) { + // Invoke AI Service + const prompt = AiProvider.buildPrompt("PROMPT_DESCRIBE_FLOW_DIFF", { "FLOW_XML_NEW": flowXmlNewStripped, "FLOW_XML_PREVIOUS": flowXmlPreviousStripped }); + const aiResponse = await AiProvider.promptAi(prompt, "PROMPT_DESCRIBE_FLOW_DIFF"); + // Replace description in markdown + if (aiResponse?.success) { + let responseText = aiResponse.promptResponse || "No AI description available"; + if (responseText.startsWith("##")) { + responseText = responseText.split("\n").slice(1).join("\n"); + } + await UtilsAi.writeAiCache("PROMPT_DESCRIBE_FLOW_DIFF", [flowXmlNewStripped, flowXmlPreviousStripped], diffKey, responseText); + const replaceText = `## AI-Generated Differences Summary\n\n${includeFromFile(aiCache.aiCacheDirFile, responseText || "")}`; + const flowMarkdownDocUpdated = flowMarkdownDoc.replace("", replaceText); + return flowMarkdownDocUpdated; + } + } + return flowMarkdownDoc; +} +/* jscpd:ignore-end */ + diff --git a/src/common/utils/notifUtils.ts b/src/common/utils/notifUtils.ts index d4420a8e3..12d4ecf5a 100644 --- a/src/common/utils/notifUtils.ts +++ b/src/common/utils/notifUtils.ts @@ -3,9 +3,9 @@ This class is deprecated and kept for backward compatibility Use NotifProvider class instead :) */ -import { getCurrentGitBranch } from "."; -import { GitProvider } from "../gitProvider"; -import { NotifSeverity, UtilsNotifs } from "../notifProvider"; +import { getCurrentGitBranch } from "./index.js"; +import { GitProvider } from "../gitProvider/index.js"; +import { NotifSeverity, UtilsNotifs } from "../notifProvider/index.js"; /** * @description This function retrieves the job URL from the GitProvider and creates a notification button if the job URL exists. @@ -15,7 +15,7 @@ import { NotifSeverity, UtilsNotifs } from "../notifProvider"; * @returns {Promise<{ text: string; url: string }[]>} - A Promise that resolves to an array of notification buttons. */ export async function getNotificationButtons(): Promise<{ text: string; url: string }[]> { - const notifButtons = []; + const notifButtons: any[] = []; const jobUrl = await GitProvider.getJobUrl(); if (jobUrl) { notifButtons.push({ text: "View Job", url: jobUrl }); @@ -32,7 +32,7 @@ export async function getNotificationButtons(): Promise<{ text: string; url: str * @returns {Promise} - A Promise that resolves to a markdown string for the current Git branch. */ export async function getBranchMarkdown(type = "slack"): Promise { - const currentGitBranch = await getCurrentGitBranch(); + const currentGitBranch = await getCurrentGitBranch() || ""; let branchMd = type === "jira" ? `{ "label": "${currentGitBranch}"}` diff --git a/src/common/utils/orgConfigUtils.ts b/src/common/utils/orgConfigUtils.ts index bb0ce51f8..e14fb27ab 100644 --- a/src/common/utils/orgConfigUtils.ts +++ b/src/common/utils/orgConfigUtils.ts @@ -1,17 +1,22 @@ -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { glob } from "glob"; -import * as puppeteer from "puppeteer"; -import * as yaml from "js-yaml"; -import { uxLog } from "."; +import c from 'chalk'; +import fs from 'fs-extra'; +import { glob } from 'glob'; +import puppeteer, { Browser } from 'puppeteer-core'; +import sortArray from 'sort-array'; +import * as chromeLauncher from 'chrome-launcher'; +import * as yaml from 'js-yaml'; +import { uxLog } from './index.js'; +import { Connection, SfError } from '@salesforce/core'; +import { DescribeSObjectResult } from '@jsforce/jsforce-node'; +import { GLOB_IGNORE_PATTERNS } from './projectUtils.js'; const listViewRegex = /objects\/(.*)\/listViews\/(.*)\.listView-meta\.xml/gi; export async function restoreListViewMine(listViewStrings: Array, conn: any, options: any = { debug: false }) { - const listViewItems = []; + const listViewItems: any[] = []; for (const listViewStr of listViewStrings) { // Format Object:ListViewName - const splits = listViewStr.split(":"); + const splits = listViewStr.split(':'); if (splits.length === 2) { listViewItems.push({ object: splits[0], listViewName: splits[1] }); } else { @@ -20,12 +25,13 @@ export async function restoreListViewMine(listViewStrings: Array, conn: const m = listViewRegex.exec(listViewStr); if (!m) { uxLog( + "error", this, c.red( - `Unable to find list view object and name from ${listViewStr}. Use format ${c.bold("Object:ListViewName")} , or ${c.bold( - ".../objects/OBJECT/listViews/LISTVIEWNAME.listview-meta.xml", - )}`, - ), + `Unable to find list view object and name from ${listViewStr}. Use format ${c.bold( + 'Object:ListViewName' + )} , or ${c.bold('.../objects/OBJECT/listViews/LISTVIEWNAME.listview-meta.xml')}` + ) ); continue; } @@ -37,19 +43,31 @@ export async function restoreListViewMine(listViewStrings: Array, conn: const instanceUrl = conn.instanceUrl; const loginUrl = `${instanceUrl}/secur/frontdoor.jsp?sid=${conn.accessToken}`; + // Get chrome/chromium executable path using the utility function + const chromeExecutablePath = getChromeExecutablePath(); + // Start puppeteer - const browser = await puppeteer.launch({ - args: ["--no-sandbox", "--disable-setuid-sandbox"], - headless: !(options.debug === true), - }); + let browser: Browser; + try { + browser = await puppeteer.launch({ + args: ['--no-sandbox', '--disable-setuid-sandbox'], + headless: !(options.debug === true), + executablePath: chromeExecutablePath + }); + } catch (e: any) { + uxLog("error", this, c.red("List View with Mine has not been restored: Error while trying to launch puppeteer (Browser simulator)")); + uxLog("error", this, c.red(e.message)); + uxLog("error", this, c.red("You might need to set variable PUPPETEER_EXECUTABLE_PATH with the target of a Chrome/Chromium path. example: /usr/bin/chromium-browser")); + return { error: e }; + } const page = await browser.newPage(); // Process login page - await page.goto(loginUrl, { waitUntil: ["domcontentloaded", "networkidle0"] }); + await page.goto(loginUrl, { waitUntil: ['domcontentloaded', 'networkidle0'] }); - const success = []; - const failed = []; - const unnecessary = []; + const success: any[] = []; + const failed: any[] = []; + const unnecessary: any[] = []; // Restore list views with Mine option for (const listView of listViewItems) { @@ -67,40 +85,68 @@ export async function restoreListViewMine(listViewStrings: Array, conn: await navigationPromise; // Open ListView settings - const filterButton = await page.waitForSelector(".filterButton"); - await filterButton.click(); + const filterButton = await page.waitForSelector('.filterButton'); + if (filterButton) { + await filterButton.click(); + } else { + throw new SfError('Puppeteer: .filterButton not found'); + } // Open Filter by owner popup - const filterByOwnerButtons = await page.waitForXPath("//div[contains(text(), 'Filter by Owner')]"); - await filterByOwnerButtons.click(); + const filterByOwnerButtons = await page.waitForSelector("xpath///div[contains(text(), 'Filter by Owner')]"); + if (filterByOwnerButtons) { + await filterByOwnerButtons.click(); + } else { + throw new SfError('Puppeteer: .filterByOwnerButtons not found'); + } // Select Mine value const mineValue = await page.waitForSelector('input[value="mine"]'); - const mineValueClickableLabel = await mineValue.$x("following-sibling::*"); - await mineValueClickableLabel[0].click(); + if (mineValue) { + const mineValueClickableLabel = await mineValue.$('following-sibling::*'); + if (mineValueClickableLabel) { + await mineValueClickableLabel[0].click(); + } + } else { + throw new SfError('Puppeteer: input[value="mine"] not found'); + } // Click done - const doneButtons = await page.waitForXPath("//span[contains(text(), 'Done')]"); - await doneButtons.click(); + const doneButtons = await page.waitForSelector("xpath///span[contains(text(), 'Done')]"); + if (doneButtons) { + await doneButtons.click(); + } else { + throw new SfError('Puppeteer: Done button not found'); + } // Save try { - const saveButton = await page.waitForSelector(".saveButton", { timeout: 3000 }); - await saveButton.click(); + const saveButton = await page.waitForSelector('.saveButton', { timeout: 3000 }); + if (saveButton) { + await saveButton.click(); + } else { + throw new SfError('Puppeteer: .saveButton not found'); + } } catch { unnecessary.push(`${objectName}:${listViewName}`); - uxLog(this, c.yellow(`Unable to hit save button, but it's probably because ${objectName}.${listViewName} was already set to "Mine"`)); + uxLog( + "warning", + this, + c.yellow( + `Unable to hit save button, but it's probably because ${objectName}.${listViewName} was already set to "Mine"` + ) + ); continue; } // Confirmed saved toast - await page.waitForXPath("//span[contains(text(), 'List view updated.')]"); + await page.waitForSelector("xpath///span[contains(text(), 'List view updated.')]"); success.push(`${objectName}:${listViewName}`); - uxLog(this, c.green(`Successfully set ${objectName}.${listViewName} as "Mine"`)); + uxLog("success", this, c.green(`Successfully set ${objectName}.${listViewName} as "Mine"`)); } catch (e) { // Unexpected puppeteer error failed.push(`${objectName}:${listViewName}`); - uxLog(this, c.red(`Puppeteer error while processing ${objectName}:${listViewName}: ${e.message}`)); + uxLog("error", this, c.red(`Puppeteer error while processing ${objectName}:${listViewName}: ${(e as Error).message}`)); } } // Close puppeteer browser @@ -110,11 +156,11 @@ export async function restoreListViewMine(listViewStrings: Array, conn: // List all yml files in config/branches and build list of major orgs from them export async function listMajorOrgs() { - const majorOrgs = []; - const branchConfigPattern = "**/config/branches/.sfdx-hardis.*.yml"; - const configFiles = await glob(branchConfigPattern); + const majorOrgs: any[] = []; + const branchConfigPattern = '**/config/branches/.sfdx-hardis.*.yml'; + const configFiles = await glob(branchConfigPattern, { ignore: GLOB_IGNORE_PATTERNS }); for (const configFile of configFiles) { - const props = yaml.load(fs.readFileSync(configFile, "utf-8")) || {}; + const props = (yaml.load(fs.readFileSync(configFile, 'utf-8')) || {}) as any; listViewRegex.lastIndex = 0; const branchNameRegex = /\.sfdx-hardis\.(.*)\.yml/gi; const m = branchNameRegex.exec(configFile); @@ -123,5 +169,133 @@ export async function listMajorOrgs() { } majorOrgs.push(props); } - return majorOrgs; + // Clumsy sorting but not other way :/ + const majorOrgsSorted: any = []; + // Main + for (const majorOrg of majorOrgs) { + if (isProduction(majorOrg?.branchName || "")) { + majorOrg.level = majorOrg.level || 100; + majorOrgsSorted.push(majorOrg); + } + } + // Preprod + for (const majorOrg of majorOrgs) { + if (isPreprod(majorOrg?.branchName || "")) { + majorOrg.level = majorOrg.level || 90; + majorOrgsSorted.push(majorOrg); + } + } + // uat run + for (const majorOrg of majorOrgs) { + if (isUatRun(majorOrg?.branchName || "")) { + majorOrg.level = majorOrg.level || 80; + majorOrgsSorted.push(majorOrg); + } + } + // uat + for (const majorOrg of majorOrgs) { + if (isUat(majorOrg?.branchName || "")) { + majorOrg.level = majorOrg.level || 70; + majorOrgsSorted.push(majorOrg); + } + } + // integration + for (const majorOrg of majorOrgs) { + if (isIntegration(majorOrg?.branchName || "")) { + majorOrg.level = majorOrg.level || 50; + majorOrgsSorted.push(majorOrg); + } + } + // Add remaining major branches + for (const majorOrg of sortArray(majorOrgs, { by: ['branchName'], order: ['asc'] }) as any[]) { + if (majorOrgsSorted.filter(org => org.branchName === majorOrg.branchName).length === 0) { + majorOrg.level = majorOrg.level || 40; + majorOrgsSorted.push(majorOrg); + } + } + const completedMajorOrgs = majorOrgsSorted.map((majorOrg: any) => { + if (majorOrg?.mergeTargets?.length > 0) { + return majorOrg; + } + majorOrg.mergeTargets = guessMatchingMergeTargets(majorOrg.branchName, majorOrgs); + return majorOrg; + }); + return completedMajorOrgs; } + +function guessMatchingMergeTargets(branchName: string, majorOrgs: any[]): string[] { + if (isProduction(branchName)) { + return []; + } + else if (isPreprod(branchName)) { + return majorOrgs.filter(org => isProduction(org.branchName)).map(org => org.branchName); + } + else if (isUat(branchName)) { + return majorOrgs.filter(org => isPreprod(org.branchName)).map(org => org.branchName); + } + else if (isUatRun(branchName)) { + return majorOrgs.filter(org => isPreprod(org.branchName)).map(org => org.branchName); + } + else if (isIntegration(branchName)) { + return majorOrgs.filter(org => isUat(org.branchName)).map(org => org.branchName); + } + uxLog("warning", this, c.yellow(`Unable to guess merge targets for ${branchName}. +Please set them manually in config/branches/.sfdx-hardis.${branchName}.yml +Example: +mergeTargets: + - preprod +`)); + return []; +} + +export function isProduction(branchName) { + return branchName.toLowerCase().startsWith("prod") || branchName.toLowerCase().startsWith("main"); +} + +export function isPreprod(branchName) { + return branchName.toLowerCase().startsWith("preprod") || branchName.toLowerCase().startsWith("staging"); +} + +export function isUat(branchName) { + return (branchName.toLowerCase().startsWith("uat") || branchName.toLowerCase().startsWith("recette")) && !branchName.toLowerCase().includes("run"); +} + +export function isIntegration(branchName) { + return branchName.toLowerCase().startsWith("integ"); +} + +export function isUatRun(branchName) { + return (branchName.toLowerCase().startsWith("uat") || branchName.toLowerCase().startsWith("recette")) && branchName.toLowerCase().includes("run"); +} + +export async function checkSfdxHardisTraceAvailable(conn: Connection) { + let traceObject: DescribeSObjectResult; + try { + traceObject = await conn.sobject("SfdxHardisTrace__c").describe(); + } catch (e: any) { + throw new SfError("You need a Custom Setting of type List (activate through Schema Settings), named SfdxHardisTrace__c, with Type__c and Key__c fields (both string, length 80)\n" + e.message); + } + const traceObjectFields = traceObject.fields; + if (traceObjectFields.filter(field => field.name === "Type__c").length === 0) { + throw new SfError("You need a field Type__c (string, length 80) on SfdxHardisTrace__c in target org"); + } + if (traceObjectFields.filter(field => field.name === "Key__c").length === 0) { + throw new SfError("You need a field Key__c (string, length 80) on SfdxHardisTrace__c in target org"); + } +} + +/** + * Get the Chrome/Chromium executable path for Puppeteer + * This is used by various commands that need browser automation + * @returns string - Path to Chrome executable, or empty string if not found + */ +export function getChromeExecutablePath(): string { + let chromeExecutablePath = process.env?.PUPPETEER_EXECUTABLE_PATH || ""; + if (chromeExecutablePath === "" || !fs.existsSync(chromeExecutablePath)) { + const chromePaths = chromeLauncher.Launcher.getInstallations(); + if (chromePaths && chromePaths.length > 0) { + chromeExecutablePath = chromePaths[0]; + } + } + return chromeExecutablePath; +} \ No newline at end of file diff --git a/src/common/utils/orgUtils.ts b/src/common/utils/orgUtils.ts index fcf7a1504..9993eed4d 100644 --- a/src/common/utils/orgUtils.ts +++ b/src/common/utils/orgUtils.ts @@ -1,26 +1,28 @@ -import { MetadataUtils } from "../metadata-utils"; -import { prompts } from "./prompts"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { createTempDir, elapseEnd, elapseStart, execCommand, execSfdxJson, isCI, uxLog } from "."; -import { WebSocketClient } from "../websocketClient"; -import { getConfig, setConfig } from "../../config"; -import * as EmailValidator from "email-validator"; -import * as sortArray from "sort-array"; -import { Connection, SfdxError } from "@salesforce/core"; -import { importData } from "./dataUtils"; -import { soqlQuery } from "./apiUtils"; -import { isSfdxProject } from "./projectUtils"; -import { deployMetadatas, forceSourceDeploy, forceSourcePush } from "./deployUtils"; -import { PACKAGE_ROOT_DIR } from "../../settings"; -import { clearCache } from "../cache"; +import { MetadataUtils } from '../metadata-utils/index.js'; +import { prompts } from './prompts.js'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { createTempDir, elapseEnd, elapseStart, execCommand, execSfdxJson, getExecutionContext, isCI, uxLog } from './index.js'; +import { WebSocketClient } from '../websocketClient.js'; +import { getConfig, setConfig } from '../../config/index.js'; +import * as EmailValidator from 'email-validator'; +import sortArray from 'sort-array'; +import { AuthInfo, Connection, SfError } from '@salesforce/core'; +import { importData } from './dataUtils.js'; +import { soqlQuery } from './apiUtils.js'; +import { isSfdxProject } from './projectUtils.js'; +import { deployMetadatas, smartDeploy, forceSourcePush } from './deployUtils.js'; +import { PACKAGE_ROOT_DIR } from '../../settings.js'; +import { clearCache } from '../cache/index.js'; +import { SfCommand } from '@salesforce/sf-plugins-core'; +import { authenticateUsingDeviceLogin } from './authUtils.js'; export async function listProfiles(conn: any) { if (conn in [null, undefined]) { return []; } - const profileRes = await conn.queryAll("SELECT Id,Name FROM Profile ORDER BY Name"); + const profileRes = await soqlQuery('SELECT Id,Name FROM Profile ORDER BY Name', conn); return profileRes.records; } @@ -33,9 +35,9 @@ export async function getRecordTypeId(recordTypeInfo: { sObjectType: string; dev } const recordTypeQueryRes = await soqlQuery( `SELECT Id FROM RecordType WHERE SobjectType='${recordTypeInfo.sObjectType}' AND` + - ` DeveloperName='${recordTypeInfo.developerName}'` + - ` LIMIT 1`, - conn, + ` DeveloperName='${recordTypeInfo.developerName}'` + + ` LIMIT 1`, + conn ); if (recordTypeQueryRes.records[0].Id) { recordTypeIdCache[cacheKey] = recordTypeQueryRes.records[0].Id; @@ -47,123 +49,151 @@ export async function getRecordTypeId(recordTypeInfo: { sObjectType: string; dev // Prompt profile(s) for selection /* Example calls from command class: -const profiles = await promptProfiles(this.org.getConnection(),{multiselect: true, initialSelection: ["System Administrator","Administrateur Système"]}); -const profile = await promptProfiles(this.org.getConnection(),{multiselect: false, initialSelection: ["System Administrator","Administrateur Système"]}); +const profiles = await promptProfiles(flags['target-org'].getConnection(),{multiselect: true, initialSelection: ["System Administrator","Administrateur Système"]}); +const profile = await promptProfiles(flags['target-org'].getConnection(),{multiselect: false, initialSelection: ["System Administrator","Administrateur Système"]}); */ export async function promptProfiles( conn: Connection, options: any = { multiselect: false, initialSelection: [], - returnField: "Name", - message: "Please select profile(s)", + returnField: 'Name', + message: 'Please select profile(s)', allowSelectAll: true, - allowSelectAllErrorMessage: "You can not select all profiles", + allowSelectAllErrorMessage: 'You can not select all profiles', allowSelectMine: true, - allowSelectMineErrorMessage: "You can not select the profile your user is assigned to", - }, + allowSelectMineErrorMessage: 'You can not select the profile your user is assigned to', + } ) { const profiles = await listProfiles(conn); // Profiles returned by active connection if (profiles.length > 0) { const profilesSelection = await prompts({ - type: options.multiselect ? "multiselect" : "select", - message: options.message || "Please select profile(s)", - name: "value", + type: options.multiselect ? 'multiselect' : 'select', + message: options.message || 'Please select profile(s)', + description: 'Select one or more Salesforce profiles for the operation', + name: 'value', choices: profiles.map((profile: any) => { return { title: profile.Name, - value: options.returnField === "record" ? profile : options.returnField === "Id" ? profile.Id : profile.Name, + value: options.returnField === 'record' ? profile : options.returnField === 'Id' ? profile.Id : profile.Name, }; }), }); // Verify that all profiles are not selected if allowSelectAll === false if (options.allowSelectAll === false && profilesSelection.value.length === profiles.length) { - throw new SfdxError(options.allowSelectAllErrorMessage); + throw new SfError(options.allowSelectAllErrorMessage); } // Verify that current user profile is not selected if (options.allowSelectMine === false) { - if (!["record", "Id"].includes(options.returnField)) { - throw new SfdxError("You can not use option allowSelectMine:false if you don't use record or Id as return value"); + if (!['record', 'Id'].includes(options.returnField)) { + throw new SfError("You can not use option allowSelectMine:false if you don't use record or Id as return value"); } - const userRes = await soqlQuery(`SELECT ProfileId FROM User WHERE Id='${(await conn.identity()).user_id}' LIMIT 1`, conn); - const profileId = userRes.records[0]["ProfileId"]; - if (profilesSelection.value.filter((profileSelected) => profileSelected === profileId || profileSelected?.Id === profileId).length > 0) { - throw new SfdxError(options.allowSelectMineErrorMessage); + const userRes = await soqlQuery( + `SELECT ProfileId FROM User WHERE Id='${(await conn.identity()).user_id}' LIMIT 1`, + conn + ); + const profileId = userRes.records[0]['ProfileId']; + if ( + profilesSelection.value.filter( + (profileSelected) => profileSelected === profileId || profileSelected?.Id === profileId + ).length > 0 + ) { + throw new SfError(options.allowSelectMineErrorMessage); } } return profilesSelection.value || null; } else { // Manual input of comma separated profiles const profilesSelection = await prompts({ - type: "text", - message: options.message || "Please input profile name", - name: "value", + type: 'text', + message: options.message || 'Please input profile name', + description: 'Enter the Salesforce profile name manually', + placeholder: 'Ex: System Administrator', + name: 'value', initial: options?.initialSelection[0] || null, }); - return options.multiselect ? profilesSelection.value.split(",") : profilesSelection.value; + return options.multiselect ? profilesSelection.value.split(',') : profilesSelection.value; } } export async function promptOrg( - commandThis: any, - options: any = { devHub: false, setDefault: true, scratch: false, devSandbox: false, promptMessage: null }, + commandThis: SfCommand, + options: any = { devHub: false, setDefault: true, scratch: false, devSandbox: false, promptMessage: null, quickOrgList: false, defaultOrgUsername: null, useCache: true } ) { // List all local orgs and request to user - const orgListResult = await MetadataUtils.listLocalOrgs(options.devSandbox === true ? "sandbox" : "any"); + // Access flags via commandThis, fallback to options if not present + const defaultOrgUsername = options.defaultOrgUsername || '' + const orgListResult = await MetadataUtils.listLocalOrgs(options.devSandbox === true ? 'sandbox' : 'any', { quickOrgList: options.quickOrgList, useCache: options.useCache }); let orgList = [ - ...sortArray(orgListResult?.scratchOrgs || [], { by: ["devHubUsername", "username", "alias", "instanceUrl"], order: ["asc", "asc", "asc"] }), - ...sortArray(orgListResult?.nonScratchOrgs || [], { by: ["username", "alias", "instanceUrl"], order: ["asc", "asc", "asc"] }), { - username: "🌍 Connect to another org", + username: '🌍 Login to another org', otherOrg: true, - descriptionForUi: "Connect in Web Browser to a Sandbox, a Production Org, a Dev Org or a Scratch Org", + descriptionForUi: 'Connect in Web Browser to a Sandbox, a Production Org, a Dev Org or a Scratch Org', }, + ...sortArray(orgListResult?.scratchOrgs || [], { + by: ['instanceUrl', 'devHubUsername', 'username', 'alias'], + order: ['asc', 'asc', 'asc'], + }), + ...sortArray(orgListResult?.nonScratchOrgs || [], { + by: ['instanceUrl', 'username', 'alias',], + order: ['asc', 'asc', 'asc'], + }), { username: "😱 I already authenticated my org but I don't see it !", clearCache: true, - descriptionForUi: "It might be a sfdx-hardis cache issue, reset it and try again !", + descriptionForUi: 'It might be a sfdx-hardis cache issue, reset it and try again !', }, - { username: "❌ Cancel", cancel: true, descriptionForUi: "Get out of here :)" }, + { username: '❌ Cancel', cancel: true, descriptionForUi: 'Get out of here :)' }, ]; // Filter if we want to list only the scratch attached to current devhub if (options.scratch === true) { - const configGetRes = await execSfdxJson("sfdx config:get defaultdevhubusername", this, { + const configGetRes = await execSfdxJson('sf config get target-dev-hub', this, { output: false, fail: true, }); - const hubOrgUsername = configGetRes?.result[0]?.value || ""; - orgList = orgList.filter((org: any) => org.status === "Active" && org.devHubUsername === hubOrgUsername); + const hubOrgUsername = configGetRes?.result[0]?.value || ''; + orgList = orgList.filter((org: any) => org.status === 'Active' && org.devHubUsername === hubOrgUsername); } + const defaultOrg = orgList.find((org: any) => org.username === defaultOrgUsername) || null; + // Prompt user + /* jscpd:ignore-start */ const orgResponse = await prompts({ - type: "select", - name: "org", - message: c.cyanBright(options.promptMessage || "Please select an org"), + type: 'select', + name: 'org', + message: c.cyanBright(options.promptMessage || 'Please select an org'), + description: 'Choose a Salesforce org from the list of authenticated orgs', + default: defaultOrg || '', choices: orgList.map((org: any) => { - const title = org.username || org.alias || org.instanceUrl; - const description = (title !== org.instanceUrl ? org.instanceUrl : "") + (org.devHubUsername ? ` (Hub: ${org.devHubUsername})` : "-"); + let title = org.instanceUrl || org.username || org.alias || "ERROR"; + if (org.alias && title !== org.alias) { + title += ` (${org.alias})`; + } + const description = `Connected with ${org.username || org.alias || 'unknown user'} ` + + (org.devHubUsername ? ` (Hub: ${org.devHubUsername})` : ''); return { - title: c.cyan(title), - description: org.descriptionForUi ? org.descriptionForUi : description || "-", + title: title.replace("https://", ""), + description: org.descriptionForUi ? org.descriptionForUi : description || '-', value: org, }; }), }); + /* jscpd:ignore-end */ let org = orgResponse.org; // Cancel if (org.cancel === true) { - uxLog(commandThis, c.cyan("Cancelled")); + uxLog("error", commandThis, c.red('Cancelled')); process.exit(0); } // Connect to new org if (org.otherOrg === true) { - await commandThis.config.runHook("auth", { + await commandThis.config.runHook('auth', { checkAuth: true, Command: commandThis, devHub: options.devHub === true, @@ -179,62 +209,148 @@ export async function promptOrg( } // Token is expired: login again to refresh it - if (org?.connectedStatus === "RefreshTokenAuthError") { - uxLog(this, c.yellow(`⚠️ Your authentication is expired. Please login again in the web browser`)); - const loginCommand = "sfdx auth:web:login" + ` --instanceurl ${org.instanceUrl}`; - const loginResult = await execSfdxJson(loginCommand, this, { fail: true, output: true }); - org = loginResult.result; + if (org?.connectedStatus === 'RefreshTokenAuthError' || org?.connectedStatus?.includes('expired')) { + uxLog("action", this, c.yellow(`⚠️ Your authentication is expired. Please login again in the web browser`)); + if (getExecutionContext() === "web") { + org = await authenticateUsingDeviceLogin(org.instanceUrl, org.username, null, {}, false, null); + } + else { + const loginCommand = 'sf org login web' + ` --instance-url ${org.instanceUrl}`; + const loginResult = await execSfdxJson(loginCommand, this, { fail: true, output: false }); + org = loginResult.result; + } } if (options.setDefault === true) { // Set default username - const setDefaultUsernameCommand = - `sfdx config:set ` + `${options.devHub ? "defaultdevhubusername" : "defaultusername"}=${org.username}` + (!isSfdxProject() ? " --global" : ""); - await execSfdxJson(setDefaultUsernameCommand, commandThis, { + const setDefaultOrgCommand = + `sf config set ` + + `${options.devHub ? 'target-dev-hub' : 'target-org'}=${org.username}` + + (!isSfdxProject() ? ' --global' : ''); + await execSfdxJson(setDefaultOrgCommand, commandThis, { fail: true, output: false, }); // If devHub , set alias of project devHub from config file - const config = await getConfig("project"); + const config = await getConfig('project'); if (options.devHub && config.devHubAlias) { - const setAliasCommand = `sfdx alias:set ${config.devHubAlias}=${org.username}`; - await execSfdxJson(setAliasCommand, commandThis, { - fail: true, - output: false, - }); - } else { - // If not devHub, set MY_ORG as alias - const setAliasCommand = `sfdx alias:set MY_ORG=${org.username}`; + const setAliasCommand = `sf alias set ${config.devHubAlias}=${org.username}`; await execSfdxJson(setAliasCommand, commandThis, { fail: true, output: false, }); } - WebSocketClient.sendMessage({ event: "refreshStatus" }); + WebSocketClient.sendRefreshStatusMessage(); // Update local user .sfdx-hardis.yml file with response if scratch has been selected - if (org.username.includes("scratch")) { - await setConfig("user", { + if (org.username.includes('scratch')) { + await setConfig('user', { scratchOrgAlias: org.alias || null, scratchOrgUsername: org.username || org.alias, }); } else { - await setConfig("user", { - scratchOrgAlias: null, - scratchOrgUsername: null, - }); + const configUser = await getConfig('user'); + if (configUser.scratchOrgAlias || configUser.scratchOrgUsername) { + await setConfig('user', { + scratchOrgAlias: null, + scratchOrgUsername: null, + }); + } } } // uxLog(commandThis, c.gray(JSON.stringify(org, null, 2))); - uxLog(commandThis, c.cyan(`Org ${c.green(org.username)} - ${c.green(org.instanceUrl)}`)); + uxLog("log", commandThis, c.grey(`Selected Org ${c.green(org.username)} - ${c.green(org.instanceUrl)}`)); return orgResponse.org; } -export async function promptOrgUsernameDefault(commandThis: any, defaultOrg: string, options: any = { devHub: false, setDefault: true }) { +export async function promptOrgList(options: { promptMessage?: string } = {}) { + const orgListResult = await MetadataUtils.listLocalOrgs('any'); + const orgListSorted = sortArray(orgListResult?.nonScratchOrgs || [], { + by: ['instanceUrl', 'username', 'alias',], + order: ['asc', 'asc', 'asc'], + }); + // Prompt user + const orgResponse = await prompts({ + type: 'multiselect', + name: 'orgs', + message: c.cyanBright(options.promptMessage || 'Please select orgs'), + description: 'Choose multiple Salesforce orgs from the list of authenticated orgs', + choices: orgListSorted.map((org: any) => { + const title = org.instanceUrl || org.username || org.alias || "ERROR"; + const description = `Connected with ${org.username || org.alias || 'unknown user'} ` + + (org.devHubUsername ? ` (Hub: ${org.devHubUsername})` : ''); + return { + title: title, + description: org.descriptionForUi ? org.descriptionForUi : description || '-', + value: org, + }; + }), + }); + return orgResponse.orgs; +} + +export async function makeSureOrgIsConnected(targetOrg: string | any) { + // Get connected Status and instance URL + let connectedStatus; + let instanceUrl; + let orgResult: any; + if (typeof targetOrg !== 'string') { + instanceUrl = targetOrg.instanceUrl; + connectedStatus = targetOrg.connectedStatus; + targetOrg = targetOrg.username; + orgResult = targetOrg; + } + else { + const displayOrgCommand = `sf org display --target-org ${targetOrg}`; + const displayResult = await execSfdxJson(displayOrgCommand, this, { + fail: false, + output: false, + }); + connectedStatus = displayResult?.result?.connectedStatus || "error"; + instanceUrl = displayResult?.result?.instanceUrl || "error"; + orgResult = displayResult.result + } + // Org is connected + if (connectedStatus === "Connected") { + return orgResult; + } + // Authentication is necessary + if (connectedStatus?.includes("expired")) { + uxLog("action", this, c.yellow("Your auth token is expired, you need to authenticate again\n(Be patient after login, it can take a while 😑)")); + // Delete rotten authentication json file in case there has been a sandbox refresh + const homeSfdxDir = path.join(process.env.HOME || process.env.USERPROFILE || "~", '.sfdx'); + const authFile = path.join(homeSfdxDir, `${targetOrg}.json`); + if (fs.existsSync(authFile)) { + try { + await fs.unlink(authFile); + uxLog("log", this, c.cyan(`Deleted potentially rotten auth file ${c.green(authFile)}`)); + } catch (e: any) { + uxLog("warning", this, c.red(`Error while deleting potentially rotten auth file ${c.green(authFile)}: ${e.message}\nYou might need to delete it manually.`)); + } + } + if (getExecutionContext() === "web") { + orgResult = await authenticateUsingDeviceLogin(instanceUrl, targetOrg, null, {}, false, null); + return orgResult; + } + // Authenticate again + const loginCommand = 'sf org login web' + ` --instance-url ${instanceUrl}`; + const loginRes = await execSfdxJson(loginCommand, this, { fail: true, output: false }); + return loginRes.result; + } + // We shouldn't be here :) + uxLog("warning", this, c.yellow("What are we doing here ? Please declare an issue with the following text: " + instanceUrl + ":" + connectedStatus)); +} + +export async function promptOrgUsernameDefault( + commandThis: any, + defaultOrg: string, + options: any = { devHub: false, setDefault: true, message: "", quickOrgList: true } +) { const defaultOrgRes = await prompts({ - type: "confirm", - message: `Do you want to use org ${defaultOrg}`, + type: 'confirm', + message: options.message || `Do you want to use org ${defaultOrg} ?`, + description: 'Confirms whether to use the currently configured default org or select a different one', }); if (defaultOrgRes.value === true) { return defaultOrg; @@ -245,18 +361,20 @@ export async function promptOrgUsernameDefault(commandThis: any, defaultOrg: str } export async function promptUserEmail(promptMessage: string | null = null) { - const userConfig = await getConfig("user"); + const userConfig = await getConfig('user'); const promptResponse = await prompts({ - type: "text", - name: "value", - initial: userConfig.userEmail || "", - message: c.cyanBright(promptMessage || "Please input your email address (it will be stored locally for later use)"), + type: 'text', + name: 'value', + initial: userConfig.userEmail || '', + message: c.cyanBright(promptMessage || 'Please input your email address'), + description: 'Your email address will be stored locally and used for CI/CD operations', + placeholder: 'Ex: john.doe@company.com', validate: (value: string) => EmailValidator.validate(value), }); const userEmail = promptResponse.value; // Store email in user .sfdx-hardis.USERNAME.yml file for later reuse if (userConfig.userEmail !== userEmail) { - await setConfig("user", { + await setConfig('user', { userEmail: userEmail, }); } @@ -266,21 +384,27 @@ export async function promptUserEmail(promptMessage: string | null = null) { // Authenticate with SfdxUrlStore export async function authenticateWithSfdxUrlStore(org: any) { // Authenticate to scratch org to delete - const authFile = path.join(await createTempDir(), "sfdxScratchAuth.txt"); + const authFile = path.join(await createTempDir(), 'sfdxScratchAuth.txt'); const authFileContent = org.scratchOrgSfdxAuthUrl || (org.authFileJson ? JSON.stringify(org.authFileJson) : null); - await fs.writeFile(authFile, authFileContent, "utf8"); - const authCommand = `sfdx auth:sfdxurl:store -f ${authFile}`; + await fs.writeFile(authFile, authFileContent, 'utf8'); + const authCommand = `sf org login sfdx-url --sfdx-url-file ${authFile}`; await execCommand(authCommand, this, { fail: true, output: false }); } // Add package installation to project .sfdx-hardis.yml -export async function managePackageConfig(installedPackages, packagesToInstallCompleted) { - const config = await getConfig("project"); +export async function managePackageConfig(installedPackages, packagesToInstallCompleted, filterStandard = false) { + const config = await getConfig('project'); let projectPackages = config.installedPackages || []; let updated = false; + const promptPackagesToInstall: any[] = []; for (const installedPackage of installedPackages) { - const matchInstalled = packagesToInstallCompleted.filter((pckg) => pckg.SubscriberPackageId === installedPackage.SubscriberPackageId); - const matchLocal = projectPackages.filter((projectPackage) => installedPackage.SubscriberPackageId === projectPackage.SubscriberPackageId); + // Filter standard packages + const matchInstalled = packagesToInstallCompleted.filter( + (pckg) => pckg.SubscriberPackageId === installedPackage.SubscriberPackageId + ); + const matchLocal = projectPackages.filter( + (projectPackage) => installedPackage.SubscriberPackageId === projectPackage.SubscriberPackageId + ); // Upgrade version of already installed package if (matchInstalled.length > 0 && matchLocal.length > 0) { projectPackages = projectPackages.map((projectPackage) => { @@ -294,58 +418,104 @@ export async function managePackageConfig(installedPackages, packagesToInstallCo return projectPackage; }); uxLog( + "action", this, c.cyan( `Updated package ${c.green(installedPackage.SubscriberPackageName)} with version id ${c.green( - installedPackage.SubscriberPackageVersionId, - )}`, - ), + installedPackage.SubscriberPackageVersionId + )}` + ) ); updated = true; } else if (matchInstalled.length > 0 && matchLocal.length === 0) { - // Request user about automatic installation during scratch orgs and deployments - const installResponse = await prompts({ - type: "select", - name: "value", - message: c.cyanBright(`Please select the install configuration for ${c.bold(installedPackage.SubscriberPackageName)}`), - choices: [ - { - title: `Install automatically ${c.bold(installedPackage.SubscriberPackageName)} on scratch orgs only`, - value: "scratch", - }, - { - title: `Deploy automatically ${c.bold(installedPackage.SubscriberPackageName)} on integration/production orgs only`, - value: "deploy", - }, - { - title: `Both: Install & deploy automatically ${c.bold(installedPackage.SubscriberPackageName)}`, - value: "scratch-deploy", - }, - { - title: `Do not configure ${c.bold(installedPackage.SubscriberPackageName)} installation / deployment`, - value: "none", - }, - ], - }); - installedPackage.installOnScratchOrgs = installResponse.value.includes("scratch"); - installedPackage.installDuringDeployments = installResponse.value.includes("deploy"); - if (installResponse.value !== "none" && installResponse.value != null) { - projectPackages.push(installedPackage); - updated = true; + // Check if not filtered package + if ( + filterStandard && + [ + "License Management App", + "Sales Cloud", + "Sales Insights", + "Salesforce Chatter Dashboards 1.0", + "Salesforce Chatter Dashboards", + "Salesforce Connected Apps", + "Salesforce Mobile Apps", + "Salesforce.com CRM Dashboards", + "SalesforceA Connected Apps", + "Trail Tracker" + ].includes(installedPackage.SubscriberPackageName) + ) { + uxLog("action", this, c.cyan(`Skipped ${installedPackage.SubscriberPackageName} as it is a Salesforce standard package`)) + continue; } + + promptPackagesToInstall.push(installedPackage); + } + } + + const promptPackagesRes = await prompts({ + type: "multiselect", + name: 'value', + message: c.cyanBright('Please select packages to add to your project configuration'), + description: 'Select packages to add to your project configuration for automatic installation during scratch org creation and/or deployments', + choices: promptPackagesToInstall.map((pckg) => { + return { + title: `${pckg.SubscriberPackageName} (${pckg.SubscriberPackageVersionNumber})`, + value: pckg, + }; + }), + }); + const selectedPackages: any[] = promptPackagesRes.value || []; + + for (const installedPackage of selectedPackages) { + // Request user about automatic installation during scratch orgs and deployments + const installResponse = await prompts({ + type: 'select', + name: 'value', + message: c.cyanBright( + `Please select the install configuration for ${c.bold(installedPackage.SubscriberPackageName)}` + ), + description: 'Configure how this package should be automatically installed during CI/CD operations', + choices: [ + { + title: `Deploy automatically ${c.bold( + installedPackage.SubscriberPackageName + )} on integration/production orgs only`, + value: 'deploy', + }, + { + title: `Install automatically ${c.bold(installedPackage.SubscriberPackageName)} on scratch orgs only`, + value: 'scratch', + }, + { + title: `Both: Install & deploy automatically ${c.bold(installedPackage.SubscriberPackageName)}`, + value: 'scratch-deploy', + }, + { + title: `Do not configure ${c.bold(installedPackage.SubscriberPackageName)} installation / deployment`, + value: 'none', + }, + ], + }); + installedPackage.installOnScratchOrgs = installResponse.value.includes('scratch'); + installedPackage.installDuringDeployments = installResponse.value.includes('deploy'); + if (installResponse.value !== 'none' && installResponse.value != null) { + projectPackages.push(installedPackage); + updated = true; } } + if (updated) { - uxLog(this, "Updated package configuration in sfdx-hardis config"); - await setConfig("project", { installedPackages: projectPackages }); + uxLog("action", this, c.cyan('Updated package configuration in .sfdx-hardis.yml config file')); + const configFile = await setConfig('project', { installedPackages: projectPackages }); + WebSocketClient.sendReportFileMessage(`${configFile!}#installedPackages`, "Package config in .sfdx-hardis.yml", "report"); } } export async function installPackages(installedPackages: any[], orgAlias: string) { const packages = installedPackages || []; - elapseStart("Install all packages"); - await MetadataUtils.installPackagesOnOrg(packages, orgAlias, this, "scratch"); - elapseEnd("Install all packages"); + elapseStart('Install all packages'); + await MetadataUtils.installPackagesOnOrg(packages, orgAlias, this, 'scratch'); + elapseEnd('Install all packages'); } export async function initOrgMetadatas( @@ -354,53 +524,68 @@ export async function initOrgMetadatas( orgAlias: string, projectScratchDef: any, debugMode: boolean, - options: any = {}, + options: any = {} ) { // Push or deploy according to config (default: push) - if ((isCI && process.env.CI_SCRATCH_MODE === "deploy") || process.env.DEBUG_DEPLOY === "true") { - // if CI, use force:source:deploy to make sure package.xml is consistent - uxLog(this, c.cyan(`Deploying project sources to scratch org ${c.green(orgAlias)}...`)); + if ((isCI && process.env.CI_SCRATCH_MODE === 'deploy') || process.env.DEBUG_DEPLOY === 'true') { + // if CI, use sf project deploy start to make sure package.xml is consistent + uxLog("action", this, c.cyan(`Deploying project sources to org ${c.green(orgAlias)}...`)); const packageXmlFile = - process.env.PACKAGE_XML_TO_DEPLOY || configInfo.packageXmlToDeploy || fs.existsSync("./manifest/package.xml") - ? "./manifest/package.xml" - : "./config/package.xml"; - await forceSourceDeploy(packageXmlFile, false, "NoTestRun", debugMode, this, { + process.env.PACKAGE_XML_TO_DEPLOY || configInfo.packageXmlToDeploy || fs.existsSync('./manifest/package.xml') + ? './manifest/package.xml' + : './config/package.xml'; + await smartDeploy(packageXmlFile, false, 'NoTestRun', debugMode, this, { targetUsername: orgUsername, + conn: null, + testClasses: "" }); } else { // Use push for local scratch orgs - uxLog(this, c.cyan(`Pushing project sources to scratch org ${c.green(orgAlias)}... (You can see progress in Setup -> Deployment Status)`)); + uxLog( + "action", + this, + c.cyan( + `Pushing project sources to org ${c.green( + orgAlias + )}... (You can see progress in Setup -> Deployment Status)` + ) + ); // Suspend sharing calc if necessary - const deferSharingCalc = (projectScratchDef.features || []).includes("DeferSharingCalc"); + const deferSharingCalc = (projectScratchDef.features || []).includes('DeferSharingCalc'); if (deferSharingCalc) { // Deploy to permission set allowing to update SharingCalc await deployMetadatas({ - deployDir: path.join(path.join(PACKAGE_ROOT_DIR, "defaults/utils/deferSharingCalc", ".")), - testlevel: "NoTestRun", - soap: true, + deployDir: path.join(path.join(PACKAGE_ROOT_DIR, 'defaults/utils/deferSharingCalc', '.')), + testlevel: 'NoTestRun', }); // Assign to permission set allowing to update SharingCalc try { - const assignCommand = `sfdx force:user:permset:assign -n SfdxHardisDeferSharingRecalc -u ${orgUsername}`; + const assignCommand = `sf org assign permset --name SfdxHardisDeferSharingRecalc --target-org ${orgUsername}`; await execSfdxJson(assignCommand, this, { fail: false, // Do not fail in case permission set already exists output: false, debug: debugMode, }); - await execCommand("sfdx texei:sharingcalc:suspend", this, { + await execCommand('sf texei:sharingcalc:suspend', this, { fail: false, output: true, debug: debugMode, }); } catch (e) { - uxLog(self, c.yellow("Issue while assigning SfdxHardisDeferSharingRecalc PS and suspending Sharing Calc, but it's probably ok anyway")); - uxLog(self, c.grey(e.message)); + uxLog( + "warning", + this, + c.yellow( + "Issue while assigning SfdxHardisDeferSharingRecalc PS and suspending Sharing Calc, but it's probably ok anyway" + ) + ); + uxLog("log", this, c.grey((e as Error).message)); } } await forceSourcePush(orgAlias, this, debugMode, options); // Resume sharing calc if necessary if (deferSharingCalc) { - await execCommand("sfdx texei:sharingcalc:resume", this, { + await execCommand('sf texei:sharingcalc:resume', this, { fail: false, output: true, debug: debugMode, @@ -411,33 +596,40 @@ export async function initOrgMetadatas( // Assign permission sets to user export async function initPermissionSetAssignments(permSets: Array, orgUsername: string) { - uxLog(this, c.cyan("Assigning Permission Sets...")); + uxLog("action", this, c.cyan('Assigning Permission Sets...')); for (const permSet of permSets) { - uxLog(this, c.cyan(`Assigning ${c.bold(permSet.name || permSet)} to sandbox org user`)); - const assignCommand = `sfdx force:user:permset:assign -n ${permSet.name || permSet} -u ${orgUsername}`; + uxLog("action", this, c.cyan(`Assigning ${c.bold(permSet.name || permSet)} to org user ${orgUsername}`)); + const assignCommand = `sf org assign permset --name ${permSet.name || permSet} --target-org ${orgUsername}`; const assignResult = await execSfdxJson(assignCommand, this, { fail: false, output: false, }); - if (assignResult?.result?.failures?.length > 0 && !assignResult?.result?.failures[0].message.includes("Duplicate")) { - uxLog(this, c.red(`Error assigning to ${c.bold(permSet.name || permSet)}\n${assignResult?.result?.failures[0].message}`)); + if ( + assignResult?.result?.failures?.length > 0 && + !assignResult?.result?.failures[0].message.includes('Duplicate') + ) { + uxLog( + "error", + this, + c.red(`Error assigning to ${c.bold(permSet.name || permSet)}\n${assignResult?.result?.failures[0].message}`) + ); } } } // Run initialization apex scripts export async function initApexScripts(orgInitApexScripts: Array, orgAlias: string) { - uxLog(this, c.cyan("Running apex initialization scripts...")); + uxLog("action", this, c.cyan('Running apex initialization scripts...')); // Build list of apex scripts and check their existence const initApexScripts = orgInitApexScripts.map((scriptName: string) => { if (!fs.existsSync(scriptName)) { - throw new SfdxError(c.red(`[sfdx-hardis][ERROR] Unable to find script ${scriptName}`)); + throw new SfError(c.red(`[sfdx-hardis][ERROR] Unable to find script ${scriptName}`)); } return scriptName; }); // Process apex scripts for (const apexScript of initApexScripts) { - const apexScriptCommand = `sfdx force:apex:execute -f "${apexScript}" -u ${orgAlias}`; + const apexScriptCommand = `sf apex run --file "${apexScript}" --target-org ${orgAlias}`; await execCommand(apexScriptCommand, this, { fail: true, output: true, @@ -449,15 +641,21 @@ export async function initApexScripts(orgInitApexScripts: Array, orgAlias: export async function initOrgData(initDataFolder: string, orgUsername: string) { // Init folder (accounts, etc...) if (fs.existsSync(initDataFolder)) { - uxLog(this, c.cyan("Loading sandbox org initialization data...")); + uxLog("action", this, c.cyan('Loading sandbox org initialization data...')); await importData(initDataFolder, this, { targetUsername: orgUsername, }); } else { - uxLog(this, c.cyan(`No initialization data: Define a sfdmu workspace in ${initDataFolder} if you need data in your new sandbox orgs`)); + uxLog( + "action", + this, + c.cyan( + `No initialization data: Define a sfdmu workspace in ${initDataFolder} if you need data in your new sandbox orgs` + ) + ); } // Import data packages - const config = await getConfig("user"); + const config = await getConfig('user'); const dataPackages = config.dataPackages || []; for (const dataPackage of dataPackages) { if (dataPackage.importInSandboxOrgs === true) { @@ -465,13 +663,19 @@ export async function initOrgData(initDataFolder: string, orgUsername: string) { targetUsername: orgUsername, }); } else { - uxLog(this, c.grey(`Skipped import of ${dataPackage.dataPath} as importInSandboxOrgs is not defined to true in .sfdx-hardis.yml`)); + uxLog( + "log", + this, + c.grey( + `Skipped import of ${dataPackage.dataPath} as importInSandboxOrgs is not defined to true in .sfdx-hardis.yml` + ) + ); } } } export async function getOrgAliasUsername(alias: string) { - const aliasListRes = await execSfdxJson("sfdx alias:list", this, { + const aliasListRes = await execSfdxJson('sf alias list', this, { output: false, fail: false, }); @@ -482,10 +686,29 @@ export async function getOrgAliasUsername(alias: string) { return null; } +// Returns true if the org is a sandbox and not a scratch org +export async function isProductionOrg(targetUsername: string, options: any) { + // Use jsforce connection is applicable + if (options?.conn?.username && options.conn.username === targetUsername) { + const orgRes = await soqlQuery('SELECT IsSandbox FROM Organization LIMIT 1', options.conn); + return orgRes.records[0].IsSandbox === false; + } + // Use SF Cli command + const orgQuery = `sf data query --query "SELECT IsSandbox FROM Organization LIMIT 1"` + + (targetUsername ? ` --target-org ${targetUsername}` : ""); + const orgQueryRes = await execSfdxJson(orgQuery, this, { + output: false, + debug: options.debugMode || false, + fail: true, + }); + const orgRes = orgQueryRes?.result?.records || orgQueryRes.records || []; + return orgRes[0].IsSandbox === false; +} + // Returns true if the org is a sandbox and not a scratch org export async function isSandbox(options: any) { if (options.conn) { - const orgRes = await soqlQuery("SELECT IsSandbox,TrialExpirationDate FROM Organization LIMIT 1", options.conn); + const orgRes = await soqlQuery('SELECT IsSandbox,TrialExpirationDate FROM Organization LIMIT 1', options.conn); return orgRes.records[0].IsSandbox === true && orgRes.records[0].TrialExpirationDate == null; } else { return options?.scratch === false; @@ -495,9 +718,47 @@ export async function isSandbox(options: any) { // Returns true if the org is a scratch org and not a sandbox export async function isScratchOrg(options: any) { if (options.conn) { - const orgRes = await soqlQuery("SELECT IsSandbox,TrialExpirationDate FROM Organization LIMIT 1", options.conn); + const orgRes = await soqlQuery('SELECT IsSandbox,TrialExpirationDate FROM Organization LIMIT 1', options.conn); return orgRes.records[0].IsSandbox === true && orgRes.records[0].TrialExpirationDate !== null; } else { return options?.scratch === true; } } + +// Set global variables with connections +let tryTechnical = true; +export async function setConnectionVariables(conn, handleTechnical = false) { + if (conn) { + globalThis.jsForceConn = conn; + } + if (handleTechnical && tryTechnical && !(process.env?.SKIP_TECHNICAL_ORG === "true")) { + try { + const techOrgDisplayCommand = 'sf org display --target-org TECHNICAL_ORG --json --verbose'; + const orgInfoResult = await execSfdxJson(techOrgDisplayCommand, this, { + fail: false, + output: false, + debug: false, + }); + if (orgInfoResult.result && orgInfoResult.result.connectedStatus === 'Connected') { + const authInfo = await AuthInfo.create({ + username: orgInfoResult.result.username, + isDefaultUsername: false, + }); + const connTechnical = await Connection.create({ + authInfo: authInfo, + connectionOptions: { + instanceUrl: orgInfoResult.result.instanceUrl, + accessToken: orgInfoResult.result.accessToken + } + }); + const identity = await connTechnical.identity(); + uxLog("log", this, c.grey(`Connected to technical org ${c.green(identity.username)}`)); + globalThis.jsForceConnTechnical = connTechnical; + } + } catch (e) { + uxLog("warning", this, c.yellow(`Unable to connect to technical org: ${e}\nThat's ok, we'll use default org :)`)); + globalThis.jsForceConnTechnical = null; + } + } + tryTechnical = false; +} diff --git a/src/common/utils/poolUtils.ts b/src/common/utils/poolUtils.ts index d8fee3fa6..4b2498230 100644 --- a/src/common/utils/poolUtils.ts +++ b/src/common/utils/poolUtils.ts @@ -1,23 +1,20 @@ -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as moment from "moment"; -import * as os from "os"; -import * as path from "path"; -import { getConfig, setConfig } from "../../config"; -import { createTempDir, execSfdxJson, isCI, uxLog } from "."; -import { KeyValueProviderInterface } from "./keyValueUtils"; -import { KeyValueXyzProvider } from "../keyValueProviders/keyValueXyz"; -import { KvdbIoProvider } from "../keyValueProviders/kvdbIo"; -import { LocalTestProvider } from "../keyValueProviders/localtest"; -import { SfdxError } from "@salesforce/core"; -import { prompts } from "./prompts"; -import { RedisProvider } from "../keyValueProviders/redis"; -import { SalesforceProvider } from "../keyValueProviders/salesforce"; +import c from 'chalk'; +import fs from 'fs-extra'; +import moment from 'moment'; +import * as os from 'os'; +import * as path from 'path'; +import { getConfig, setConfig } from '../../config/index.js'; +import { createTempDir, execSfdxJson, isCI, uxLog } from './index.js'; +import { KeyValueProviderInterface } from './keyValueUtils.js'; +import { LocalTestProvider } from '../keyValueProviders/localtest.js'; +import { SfError } from '@salesforce/core'; +import { prompts } from './prompts.js'; +import { SalesforceProvider } from '../keyValueProviders/salesforce.js'; let keyValueProvider: KeyValueProviderInterface; export async function getPoolConfig() { - const config = await getConfig("branch"); + const config = await getConfig('branch'); return config.poolConfig || null; } @@ -39,9 +36,9 @@ export async function getPoolStorage(options: any = {}) { export async function setPoolStorage(value: any, options: any = {}) { const providerInitialized = await initializeProvider(options); if (providerInitialized) { - uxLog(this, "[pool] " + c.grey(`Updating poolstorage value...`)); + uxLog("other", this, '[pool] ' + c.grey(`Updating poolstorage value...`)); const valueSetRes = await keyValueProvider.setValue(null, value); - uxLog(this, "[pool] " + c.grey(`Updated poolstorage value`)); + uxLog("other", this, '[pool] ' + c.grey(`Updated poolstorage value`)); return valueSetRes; } return null; @@ -57,9 +54,9 @@ export async function updateActiveScratchOrg(scratchOrg: string, keyValues: any, } let updatingPool = false; -export async function addScratchOrgToPool(scratchOrg: any, options: any = { position: "last" }) { +export async function addScratchOrgToPool(scratchOrg: any, options: any = { position: 'last' }) { if (updatingPool === true) { - uxLog(this, c.grey("Already updating scratch org pool: try again in 2000 ms")); + uxLog("log", this, c.grey('Already updating scratch org pool: try again in 2000 ms')); await new Promise((resolve) => setTimeout(resolve, 2000)); return await addScratchOrgToPool(scratchOrg, options); } else { @@ -71,19 +68,21 @@ export async function addScratchOrgToPool(scratchOrg: any, options: any = { posi } // Write scratch org pool remote storage -async function executeAddScratchOrgToPool(scratchOrg: any, options: any = { position: "last" }) { +async function executeAddScratchOrgToPool(scratchOrg: any, options: any = { position: 'last' }) { const poolStorage = await getPoolStorage(options); // Valid scratch orgs if (scratchOrg.status === 0) { const scratchOrgs = poolStorage.scratchOrgs || []; - if (options.position === "first") { + if (options.position === 'first') { scratchOrgs.push(scratchOrg); } else { scratchOrgs.unshift(scratchOrg); } poolStorage.scratchOrgs = scratchOrgs; await setPoolStorage(poolStorage, options); - await updateActiveScratchOrg(scratchOrg, { Description: `Added to pool by ${os.userInfo().username} on ${moment().format("YYYYMMDD_hhmm")}` }); + await updateActiveScratchOrg(scratchOrg, { + Description: `Added to pool by ${os.userInfo().username} on ${moment().format('YYYYMMDD_hhmm')}`, + }); } else { // Store scratch creation errors /* @@ -92,7 +91,7 @@ async function executeAddScratchOrgToPool(scratchOrg: any, options: any = { posi poolStorage.scratchOrgErrors = scratchOrgErrors; await setPoolStorage(poolStorage, options); */ - uxLog(this, "[pool] " + c.red("Scratch org creation error: \n" + JSON.stringify(scratchOrg))); + uxLog("other", this, '[pool] ' + c.red('Scratch org creation error: \n' + JSON.stringify(scratchOrg))); } } @@ -102,7 +101,13 @@ export async function fetchScratchOrg(options: any) { const scratchOrg = await tryFetchScratchOrg(options); return scratchOrg; } catch (e) { - uxLog(this, c.yellow(`[pool] Unable to fetch scratch org from pool. That's sad because it's faster !\nError: ${e.message}`)); + uxLog( + "warning", + this, + c.yellow( + `[pool] Unable to fetch scratch org from pool. That's sad because it's faster !\nError: ${(e as Error).message}` + ) + ); return null; } } @@ -110,38 +115,51 @@ export async function fetchScratchOrg(options: any) { export async function tryFetchScratchOrg(options: any) { const poolStorage = await getPoolStorage(options); if (poolStorage === null) { - uxLog(this, "[pool] " + c.yellow("No valid scratch pool storage has been reachable. Consider fixing the scratch pool config and auth")); + uxLog( + "warning", + this, + '[pool] ' + + c.yellow('No valid scratch pool storage has been reachable. Consider fixing the scratch pool config and auth') + ); return null; } - uxLog(this, "[pool] " + c.cyan("Trying to fetch a scratch org from scratch orgs pool to improve performances")); + uxLog("other", this, '[pool] ' + c.cyan('Trying to fetch a scratch org from scratch orgs pool to improve performances')); const scratchOrgs: Array = poolStorage.scratchOrgs || []; if (scratchOrgs.length > 0) { const scratchOrg = scratchOrgs.shift(); - await updateActiveScratchOrg(scratchOrg, { Description: `Fetched by ${os.userInfo().username} on ${moment().format("YYYYMMDD_hhmm")}` }); + await updateActiveScratchOrg(scratchOrg, { + Description: `Fetched by ${os.userInfo().username} on ${moment().format('YYYYMMDD_hhmm')}`, + }); // Remove and save poolStorage.scratchOrgs = scratchOrgs; await setPoolStorage(poolStorage, options); // Authenticate to scratch org - uxLog(this, "[pool] " + c.cyan("Authenticating to scratch org from pool...")); + uxLog("action", this, '[pool] ' + c.cyan('Authenticating to scratch org from pool...')); const authTempDir = await createTempDir(); - const tmpAuthFile = path.join(authTempDir, "authFile.txt"); - const authFileContent = scratchOrg.scratchOrgSfdxAuthUrl || (scratchOrg.authFileJson ? JSON.stringify(scratchOrg.authFileJson) : null); + const tmpAuthFile = path.join(authTempDir, 'authFile.txt'); + const authFileContent = + scratchOrg.scratchOrgSfdxAuthUrl || (scratchOrg.authFileJson ? JSON.stringify(scratchOrg.authFileJson) : null); if (authFileContent == null) { uxLog( + "warning", this, - c.yellow(`[pool] Unable to authenticate to org ${scratchOrg.scratchOrgAlias}: ${scratchOrg.scratchOrgUsername} (missing sfdxAuthUrl)`), + c.yellow( + `[pool] Unable to authenticate to org ${scratchOrg.scratchOrgAlias}: ${scratchOrg.scratchOrgUsername} (missing sfdxAuthUrl)` + ) ); return null; } - await fs.writeFile(tmpAuthFile, authFileContent, "utf8"); - const authCommand = `sfdx auth:sfdxurl:store -f ${tmpAuthFile} --setdefaultusername --setalias ${scratchOrg.scratchOrgAlias}`; + await fs.writeFile(tmpAuthFile, authFileContent, 'utf8'); + const authCommand = `sf org login sfdx-url --sfdx-url-file ${tmpAuthFile} --set-default --alias ${scratchOrg.scratchOrgAlias}`; const authRes = await execSfdxJson(authCommand, this, { fail: false, output: false }); if (authRes.status !== 0) { uxLog( + "warning", this, c.yellow( - `[pool] Unable to authenticate to org ${scratchOrg.scratchOrgAlias}: ${scratchOrg.scratchOrgUsername}\n${c.grey(JSON.stringify(authRes))}`, - ), + `[pool] Unable to authenticate to org ${scratchOrg.scratchOrgAlias}: ${scratchOrg.scratchOrgUsername + }\n${c.grey(JSON.stringify(authRes))}` + ) ); return null; } @@ -149,29 +167,35 @@ export async function tryFetchScratchOrg(options: any) { await fs.unlink(tmpAuthFile); // Store sfdxAuthUrl for next step if we are in CI if (isCI) { - await setConfig("user", { sfdxAuthUrl: authFileContent }); + await setConfig('user', { sfdxAuthUrl: authFileContent }); } // Display org URL - const openRes = await execSfdxJson(`sf org open --url-only -u ${scratchOrg.scratchOrgAlias}`, this, { fail: false, output: false }); - uxLog(this, c.cyan(`Open scratch org with url: ${c.green(openRes?.result?.url)}`)); + const openRes = await execSfdxJson(`sf org open --url-only --target-org ${scratchOrg.scratchOrgAlias}`, this, { + fail: false, + output: false, + }); + uxLog("action", this, c.cyan(`Open scratch org with url: ${c.green(openRes?.result?.url)}`)); // Return scratch org - await updateActiveScratchOrg(scratchOrg, { Description: `Authenticated by ${os.userInfo().username} on ${moment().format("YYYYMMDD_hhmm")}` }); + await updateActiveScratchOrg(scratchOrg, { + Description: `Authenticated by ${os.userInfo().username} on ${moment().format('YYYYMMDD_hhmm')}`, + }); return scratchOrg; } uxLog( + "warning", this, - "[pool]" + - c.yellow( - `No scratch org available in scratch org pool. You may increase ${c.white("poolConfig.maxScratchOrgsNumber")} or schedule call to ${c.white( - "sfdx hardis:scratch:pool:refresh", - )} more often in CI`, - ), + '[pool]' + + c.yellow( + `No scratch org available in scratch org pool. You may increase ${c.white( + 'poolConfig.maxScratchOrgsNumber' + )} or schedule call to ${c.white('sf hardis:scratch:pool:refresh')} more often in CI` + ) ); return null; } export async function listKeyValueProviders(): Promise> { - return [SalesforceProvider, RedisProvider, KvdbIoProvider, KeyValueXyzProvider, LocalTestProvider].map((cls) => new cls()); + return [SalesforceProvider, LocalTestProvider].map((cls) => new cls()); } async function initializeProvider(options: any) { @@ -189,11 +213,12 @@ async function initializeProvider(options: any) { if (isCI) { throw e; } - uxLog(this, "[pool] " + c.grey("Provider initialization error: " + e.message)); + uxLog("other", this, '[pool] ' + c.grey('Provider initialization error: ' + (e as Error).message)); // If manual, let's ask the user if he/she has credentials to input const resp = await prompts({ - type: "confirm", - message: "Scratch org pool credentials are missing, do you want to configure them ?", + type: 'confirm', + message: 'Scratch org pool credentials are missing, do you want to configure them ?', + description: 'Set up authentication credentials required to access the scratch org pool service', }); if (resp.value === true) { await keyValueProvider.userAuthenticate(options); @@ -209,7 +234,7 @@ export async function instantiateProvider(storageService: string) { const providerClasses = await listKeyValueProviders(); const providerClassRes = providerClasses.filter((cls) => cls.name === storageService); if (providerClassRes.length === 0) { - throw new SfdxError(c.red("Unable to find class for storage provider " + storageService)); + throw new SfError(c.red('Unable to find class for storage provider ' + storageService)); } return providerClassRes[0]; } diff --git a/src/common/utils/profileUtils.ts b/src/common/utils/profileUtils.ts index ec98fe055..5073ceab2 100644 --- a/src/common/utils/profileUtils.ts +++ b/src/common/utils/profileUtils.ts @@ -1,8 +1,8 @@ -import * as c from "chalk"; +import c from "chalk"; import * as path from "path"; -import { uxLog } from "."; -import { getConfig } from "../../config"; -import { parseXmlFile, writeXmlFile } from "./xmlUtils"; +import { uxLog } from "./index.js"; +import { getConfig } from "../../config/index.js"; +import { parseXmlFile, writeXmlFile } from "./xmlUtils.js"; // Push sources to org // For some cases, push must be performed in 2 times: the first with all passing sources, and the second with updated sources requiring the first push @@ -21,7 +21,7 @@ export async function minimizeProfile(profileFile: string) { const config = await getConfig("branch"); const nodesToRemove = config.minimizeProfilesNodesToRemove || nodesToRemoveDefault; // Remove nodes - const removed = []; + const removed: any[] = []; for (const node of nodesToRemove) { if (profileXml.Profile[node]) { delete profileXml.Profile[node]; @@ -31,7 +31,7 @@ export async function minimizeProfile(profileFile: string) { // Keep only default values or false values const isAdmin = path.basename(profileFile) === "Admin.profile-meta.xml"; let updatedDefaults = false; - const partiallyRemoved = []; + const partiallyRemoved: any[] = []; const nodesHavingDefaultOrFalse = ["applicationVisibilities", "recordTypeVisibilities", "userPermissions"]; for (const node of nodesHavingDefaultOrFalse) { if (profileXml.Profile[node]) { @@ -78,7 +78,7 @@ export async function minimizeProfile(profileFile: string) { if (partiallyRemovedUnique.length > 0) { log += ` and partially removing sections ${c.bold(partiallyRemovedUnique.join(", "))}`; } - uxLog(this, c.yellow(log)); + uxLog("warning", this, c.yellow(log)); } return { removed: removed, updatedDefaults: updatedDefaults, updated: updated }; diff --git a/src/common/utils/projectUtils.ts b/src/common/utils/projectUtils.ts index 8a3c56833..fa41c7abb 100644 --- a/src/common/utils/projectUtils.ts +++ b/src/common/utils/projectUtils.ts @@ -1,30 +1,195 @@ -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { execCommand, uxLog } from "."; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { execCommand, sortCrossPlatform, uxLog } from './index.js'; +import { glob } from 'glob'; +import { parseXmlFile } from './xmlUtils.js'; +import { getApiVersion } from '../../config/index.js'; export const GLOB_IGNORE_PATTERNS = [ - "**/node_modules/**", - "**/.git/**", - "**/cache/**", - "**/.npm/**", - "**/logs/**", - "**/.sfdx/**", - "**/.sf/**", - "**/.vscode/**", + '**/node_modules/**', + '**/.git/**', + '**/cache/**', + '**/.npm/**', + '**/logs/**', + '**/.sfdx/**', + '**/.sf/**', + '**/.vscode/**', ]; export function isSfdxProject(cwd = process.cwd()) { - return fs.existsSync(path.join(cwd, "sfdx-project.json")); + return fs.existsSync(path.join(cwd, 'sfdx-project.json')); } export async function createBlankSfdxProject(cwd = process.cwd(), debug = false) { - uxLog(this, c.cyan("Creating blank SFDX project...")); - const projectCreateCommand = 'sfdx force:project:create --projectname "sfdx-hardis-blank-project"'; + uxLog("action", this, c.cyan('Creating blank SFDX project...')); + const projectCreateCommand = 'sf project generate --name "sfdx-hardis-blank-project"'; await execCommand(projectCreateCommand, this, { cwd: cwd, fail: true, debug: debug, output: true, }); + return path.join(cwd, "sfdx-hardis-blank-project"); } + +export async function listFlowFiles(packageDirs) { + const flowFiles: any[] = []; + const skippedFlows: string[] = []; + for (const packageDir of packageDirs || []) { + const flowMetadatas = await glob("**/*.flow-meta.xml", { cwd: packageDir.path, ignore: GLOB_IGNORE_PATTERNS }); + for (const flowMetadata of flowMetadatas) { + const flowFile = path.join(packageDir.path, flowMetadata).replace(/\\/g, '/'); + if (await isManagedFlow(flowFile)) { + skippedFlows.push(flowFile); + } + else { + flowFiles.push(flowFile) + } + } + } + if (skippedFlows.length > 0) { + uxLog("warning", this, c.yellow(`Skipped ${skippedFlows.length} managed flows:`)); + for (const skippedFlow of sortCrossPlatform(skippedFlows)) { + uxLog("warning", this, c.yellow(` ${skippedFlow}`)); + } + } + return sortCrossPlatform(flowFiles); +} + +export async function isManagedFlow(flowFile: string) { + const flowXml = await parseXmlFile(flowFile); + for (const flowNodeType of [ + 'start', + 'actionCalls', + 'assignments', + 'customErrors', + 'collectionProcessors', + 'decisions', + 'loops', + 'recordCreates', + 'recordDeletes', + 'recordLookups', + 'recordUpdates', + 'transforms', + 'screens', + 'subflows', + 'variables', + 'constants', + 'formulas']) { + if (flowXml?.Flow?.[flowNodeType] && flowXml?.Flow?.[flowNodeType]?.length > 0) { + return false; + } + } + return true; +} + +export async function listApexFiles(packageDirs) { + const apexFiles: any[] = []; + const skippedApex: string[] = []; + for (const packageDir of packageDirs || []) { + const apexMetadatas = await glob("**/*.{cls,trigger}", { cwd: packageDir.path, ignore: GLOB_IGNORE_PATTERNS }); + for (const apexMetadata of apexMetadatas) { + const apexFile = path.join(packageDir.path, apexMetadata).replace(/\\/g, '/'); + if (apexFile.includes('__')) { + skippedApex.push(apexFile); + } + else { + apexFiles.push(apexFile) + } + } + } + if (skippedApex.length > 0) { + uxLog("warning", this, c.yellow(`Skipped ${skippedApex.length} managed Apex:`)); + for (const skippedApexItem of sortCrossPlatform(skippedApex)) { + uxLog("warning", this, c.yellow(` ${skippedApexItem}`)); + } + } + return apexFiles.sort(); +} + +export async function listPageFiles(packageDirs) { + const pageFiles: any[] = []; + const skippedPages: string[] = []; + for (const packageDir of packageDirs || []) { + const pageMetadatas = await glob("**/*.flexipage-meta.xml", { cwd: packageDir.path, ignore: GLOB_IGNORE_PATTERNS }); + for (const pageMetadata of pageMetadatas) { + const pageFile = path.join(packageDir.path, pageMetadata).replace(/\\/g, '/'); + if (pageFile.includes('__')) { + skippedPages.push(pageFile); + } + else { + pageFiles.push(pageFile) + } + } + } + if (skippedPages.length > 0) { + uxLog("warning", this, c.yellow(`Skipped ${skippedPages.length} managed Lightning Pages:`)); + for (const skippedPage of sortCrossPlatform(skippedPages)) { + uxLog("warning", this, c.yellow(` ${skippedPage}`)); + } + } + return pageFiles.sort(); +} + +export function returnApexType(apexCode: string) { + const apexContentlower = apexCode.toLowerCase(); + return apexContentlower.includes("@istest(seealldata=true)") ? "Test (See All Data)" : + apexContentlower.includes("@istest") ? "Test" : + apexContentlower.includes("@invocablemethod") ? "Invocable" : + apexContentlower.includes("@restresource") ? "REST" : + apexContentlower.includes("implements database.batchable") ? "Batch" : + apexContentlower.includes("implements batchable") ? "Batch" : + apexContentlower.includes("implements database.schedulable") ? "Schedulable" : + apexContentlower.includes("implements schedulable") ? "Schedulable" : + apexContentlower.includes("@auraenabled") ? "Lightning Controller" : + apexContentlower.includes("apexpages.standardcontroller") ? "Visualforce Controller" : + apexContentlower.includes("pagereference") ? "Visualforce Controller" : + apexContentlower.includes("triggerhandler") ? "Trigger Handler" : + apexContentlower.includes("new httprequest") ? "Callout" : + apexContentlower.includes("jsonparser parser") ? "JSON" : + apexContentlower.includes("public class soaprequest") ? "SOAP" : + "Class"; +} + +// Update only if found API version is inferior to the candidate API version (convert to number) +export async function updateSfdxProjectApiVersion() { + const candidateApiVersion: string = getApiVersion(); + // Handle sfdx-project.json file + const sfdxProjectFile = path.join(process.cwd(), 'sfdx-project.json'); + if (await fs.pathExists(sfdxProjectFile)) { + const sfdxProject = await fs.readJson(sfdxProjectFile); + if (sfdxProject?.sourceApiVersion) { + const currentApiVersionStr = sfdxProject.sourceApiVersion; + const currentApiVersion = parseFloat(currentApiVersionStr); + if (currentApiVersion < parseFloat(candidateApiVersion)) { + sfdxProject.sourceApiVersion = candidateApiVersion; + await fs.writeJson(sfdxProjectFile, sfdxProject, { spaces: 2 }); + uxLog("action", this, c.cyan(`Updated API version in sfdx-project.json from ${currentApiVersionStr} to ${candidateApiVersion}`)); + } + } + } + // Handle all .xml files found in manifest folder + const manifestDir = path.join(process.cwd(), 'manifest'); + if (fs.existsSync(manifestDir)) { + const manifestFiles = await glob('**/*.xml', { cwd: manifestDir }); + for (const manifestFile of manifestFiles) { + const fullPath = path.join(manifestDir, manifestFile); + if (fs.existsSync(fullPath)) { + const xmlContent = await fs.readFile(fullPath, 'utf-8'); + if (xmlContent.includes('version')) { + const regex = /(\d+\.\d+)<\/version>/; + const match = xmlContent.match(regex); + if (match && match[1]) { + const currentApiVersion = parseFloat(match[1]); + if (currentApiVersion < parseFloat(candidateApiVersion)) { + const updatedXmlContent = xmlContent.replace(regex, `${candidateApiVersion}`); + await fs.writeFile(fullPath, updatedXmlContent, 'utf-8'); + uxLog("action", this, c.cyan(`Updated API version in ${manifestFile} from ${match[1]} to ${candidateApiVersion}`)); + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/common/utils/prompts.ts b/src/common/utils/prompts.ts index 0469a7db4..7148c7311 100644 --- a/src/common/utils/prompts.ts +++ b/src/common/utils/prompts.ts @@ -1,12 +1,14 @@ -import * as c from "chalk"; +import c from "chalk"; // eslint-disable-next-line @typescript-eslint/no-var-requires import inquirer from "inquirer"; -import { SfdxError } from "@salesforce/core"; -import { isCI, uxLog } from "."; -import { WebSocketClient } from "../websocketClient"; +import { SfError } from "@salesforce/core"; +import { isCI, uxLog } from "./index.js"; +import { WebSocketClient } from "../websocketClient.js"; export interface PromptsQuestion { message: string; + description: string; + placeholder?: string; type: "select" | "multiselect" | "confirm" | "text" | "number"; name?: string; choices?: Array; @@ -19,10 +21,11 @@ export interface PromptsQuestion { // Centralized prompts function export async function prompts(options: PromptsQuestion | PromptsQuestion[]) { if (isCI) { - throw new SfdxError("Nothing should be prompted during CI !"); + uxLog("log", this, c.grey(JSON.stringify(options, null, 2))); + throw new SfError("Nothing should be prompted during CI !"); } const questionsRaw = Array.isArray(options) ? options : [options]; - const questionsReformatted = []; + const questionsReformatted: any = []; for (const question of questionsRaw) { if (!question.message.startsWith("🦙")) { question.message = "🦙 " + question.message; @@ -41,7 +44,8 @@ export async function prompts(options: PromptsQuestion | PromptsQuestion[]) { question.name = "value"; } // Add exit option when possible - if (question.type === "select") { + if (question.type === "select" && !WebSocketClient.isAliveWithLwcUI()) { + question.choices = question.choices || []; question.choices.push({ title: "⛔ Exit this script", value: "exitNow" }); } if (["select", "multiselect"].includes(question.type) && question.optionsPerPage == null) { @@ -54,13 +58,18 @@ export async function prompts(options: PromptsQuestion | PromptsQuestion[]) { if (WebSocketClient.isAlive()) { // Use UI prompt for (const question of questionsReformatted) { - uxLog(this, c.cyan(question.message) + c.white(" Look up in VsCode ⬆️")); + uxLog("action", this, c.cyan(question.message) + c.white(" Look up in VsCode ⬆️")); const [questionAnswer] = await WebSocketClient.sendPrompts([question]); answers = Object.assign(answers, questionAnswer); - if (JSON.stringify(answers).toLowerCase().includes("token")) { - uxLog(this, c.grey("Selection done but hidden in log because it contains sensitive information")); + checkStopPrompts(answers); + // Find the answer value (the value of the only property of questionAnswer) + const answerKey = Object.keys(questionAnswer)[0]; + const answerValue = questionAnswer[answerKey]; + const answerLabel = getAnswerLabel(answerValue, question.choices); + if (JSON.stringify(answerLabel).toLowerCase().includes("token")) { + uxLog("log", this, c.grey("Selection hidden because it contains sensitive information")); } else { - uxLog(this, c.grey(JSON.stringify(answers))); + uxLog("log", this, c.grey(answerLabel)); } } } else { @@ -68,17 +77,66 @@ export async function prompts(options: PromptsQuestion | PromptsQuestion[]) { answers = await terminalPrompts(questionsReformatted); } // Stop script if requested + checkStopPrompts(answers); + return answers; +} + +// Helper to get display label(s) for answer value(s) +function getAnswerLabel(answerValue: any, choices?: Array): string { + if (Array.isArray(answerValue)) { + if (choices && Array.isArray(choices) && choices.length > 0) { + return answerValue.map(val => findChoiceLabel(val, choices) ?? (typeof val === 'string' ? `- ${val}` : "- " + JSON.stringify(val))).join('\n'); + } else { + return answerValue.map(val => (typeof val === 'string' ? `- ${val}` : "- " + JSON.stringify(val))).join('\n'); + } + } + const label = findChoiceLabel(answerValue, choices); + if (label) return label; + return typeof answerValue === 'string' ? answerValue : JSON.stringify(answerValue); +} + +// Helper to find the label for a value in choices +function findChoiceLabel(val: any, choices?: Array): string | undefined { + if (!choices || !Array.isArray(choices) || choices.length === 0) return undefined; + const found = choices.find(choice => { + if (typeof choice.value === "object" && typeof val === "object") { + try { + return JSON.stringify(choice.value) === JSON.stringify(val); + } catch { + return false; + } + } + return choice.value === val; + }); + return found && found.title ? found.title : undefined; +} + +// Stop script if user requested it +function checkStopPrompts(answers: any) { + if (typeof answers !== "object" || answers === null) { + stopPrompt(); + } + if (Object.keys(answers).length === 0) { + stopPrompt(); + } for (const answer of Object.keys(answers)) { if (answers[answer] === "exitNow") { - uxLog(this, "Script stopped by user request"); - process.exit(0); + stopPrompt(); } } - return answers; +} + +function stopPrompt() { + uxLog("error", this, c.red("Script terminated at user request")); + // Send close client message with aborted status if WebSocket is alive + if (WebSocketClient.isAlive()) { + WebSocketClient.sendCloseClientMessage("aborted"); + } + process.exit(0); } async function terminalPrompts(questions: PromptsQuestion[]) { - const inquirerQuestions = []; + const inquirerQuestions: any = []; for (const question of questions) { const inquirerQuestion: any = { name: question.name, @@ -104,9 +162,9 @@ async function terminalPrompts(questions: PromptsQuestion[]) { inquirerQuestions.push(inquirerQuestion); } try { - const answers = await inquirer.prompt(inquirerQuestions); + const answers = await (inquirer as any).prompt(inquirerQuestions); return answers; } catch (e) { - throw new SfdxError("Error while prompting: " + e.message); + throw new SfError("Error while prompting: " + (e as Error).message); } } diff --git a/src/common/utils/refresh/connectedAppUtils.ts b/src/common/utils/refresh/connectedAppUtils.ts new file mode 100644 index 000000000..5b1ad91a0 --- /dev/null +++ b/src/common/utils/refresh/connectedAppUtils.ts @@ -0,0 +1,533 @@ +import fs from 'fs-extra'; +import * as path from 'path'; +import c from 'chalk'; +import { glob } from 'glob'; +import { execCommand, createTempDir, uxLog } from '../index.js'; +import { writeXmlFile } from '../xmlUtils.js'; +import { getApiVersion } from '../../../config/index.js'; +import { SfCommand } from '@salesforce/sf-plugins-core'; +import { prompts } from '../prompts.js'; +import { GLOB_IGNORE_PATTERNS } from '../projectUtils.js'; + +export interface RefreshSandboxConfig { + connectedApps?: string[]; +} +// Define interface for Connected App metadata +export interface ConnectedApp { + fullName: string; + fileName: string; + type: string; + consumerKey?: string; + consumerSecret?: string; +} + +export function generateConnectedAppPackageXml(connectedApps: ConnectedApp[]): any { + return { + Package: { + $: { + xmlns: 'http://soap.sforce.com/2006/04/metadata' + }, + types: [ + { + members: connectedApps.map(app => app.fullName), + name: ['ConnectedApp'] + } + ], + version: [getApiVersion()] + } + }; +} + +export function generateEmptyPackageXml(): any { + return { + Package: { + $: { + xmlns: 'http://soap.sforce.com/2006/04/metadata' + }, + version: [getApiVersion()] + } + }; +} + +export async function createConnectedAppManifest( + connectedApps: ConnectedApp[], + command: SfCommand +): Promise<{ manifestPath: string; tmpDir: string }> { + // Create a temporary directory for the manifest + const tmpDir = await createTempDir(); + const manifestPath = path.join(tmpDir, 'connected-apps-manifest.xml'); + + // Generate and write the package.xml content + const packageXml = generateConnectedAppPackageXml(connectedApps); + await writeXmlFile(manifestPath, packageXml); + + // Display the XML content for the manifest + const manifestContent = await fs.readFile(manifestPath, 'utf8'); + uxLog("log", command, c.cyan(`Package.xml manifest for ${connectedApps.length} Connected App(s):\n${manifestContent}`)); + + return { manifestPath, tmpDir }; +} + +export async function withConnectedAppIgnoreHandling( + operationFn: (backupInfo: { + forceignorePath: string; + originalContent: string; + tempBackupPath: string + } | null) => Promise, + command: SfCommand +): Promise { + // Temporarily modify .forceignore to allow Connected App operations + const backupInfo = await disableConnectedAppIgnore(command); + + try { + // Perform the operation + return await operationFn(backupInfo); + } finally { + // Always restore .forceignore + await restoreConnectedAppIgnore(backupInfo, command); + } +} + +export async function createDestructiveChangesManifest( + connectedApps: ConnectedApp[], + command: SfCommand +): Promise<{ destructiveChangesPath: string; packageXmlPath: string; tmpDir: string }> { + // Create a temporary directory for the manifest + const tmpDir = await createTempDir(); + const destructiveChangesPath = path.join(tmpDir, 'destructiveChanges.xml'); + const packageXmlPath = path.join(tmpDir, 'package.xml'); + + // Generate destructiveChanges.xml using the Connected App Package XML generator + const destructiveChangesXml = generateConnectedAppPackageXml(connectedApps); + + // Generate empty package.xml required for deployment + const packageXml = generateEmptyPackageXml(); + + await writeXmlFile(destructiveChangesPath, destructiveChangesXml); + await writeXmlFile(packageXmlPath, packageXml); + + // Display the XML content for destructive changes + const destructiveXmlContent = await fs.readFile(destructiveChangesPath, 'utf8'); + uxLog("log", command, c.cyan(`Destructive Changes XML for deleting ${connectedApps.length} Connected App(s):\n${destructiveXmlContent}`)); + + return { destructiveChangesPath, packageXmlPath, tmpDir }; +} + +export async function deleteConnectedApps( + orgUsername: string | undefined, + connectedApps: ConnectedApp[], + command: SfCommand, + saveProjectPath: string +): Promise { + await withConnectedAppValidation(orgUsername, connectedApps, command, 'delete', async () => { + if (!orgUsername) return; // This should never happen due to validation, but TypeScript needs it + + // Use withConnectedAppIgnoreHandling to handle .forceignore modifications + await withConnectedAppIgnoreHandling(async () => { + // Create destructive changes manifests + const { destructiveChangesPath, packageXmlPath, tmpDir } = + await createDestructiveChangesManifest(connectedApps, command); + + // Deploy the destructive changes + uxLog("log", command, c.grey(`Deploying destructive changes to delete ${connectedApps.length} Connected App(s) from org...`)); + try { + await execCommand( + `sf project deploy start --manifest ${packageXmlPath} --post-destructive-changes ${destructiveChangesPath} --target-org ${orgUsername} --ignore-warnings --ignore-conflicts --json`, + command, + { output: true, fail: true, cwd: saveProjectPath } + ); + } catch (deleteError: any) { + throw new Error(`Failed to delete Connected Apps: ${deleteError.message || String(deleteError)}`); + } + + // Clean up + await fs.remove(tmpDir); + uxLog("log", command, c.grey('Removed temporary deployment files')); + }, command); + }); +} + +export async function disableConnectedAppIgnore(command: SfCommand): Promise<{ + forceignorePath: string; + originalContent: string; + tempBackupPath: string +} | null> { + const forceignorePath = path.join(process.cwd(), '.forceignore'); + + // Check if .forceignore exists + if (!await fs.pathExists(forceignorePath)) { + uxLog("log", command, c.grey('No .forceignore file found, no modification needed')); + return null; + } + + // Create backup + const tempBackupPath = path.join(process.cwd(), '.forceignore.backup'); + const originalContent = await fs.readFile(forceignorePath, 'utf8'); + await fs.writeFile(tempBackupPath, originalContent); + + // Read content and remove lines that would ignore Connected Apps + const lines = originalContent.split('\n'); + const filteredLines = lines.filter(line => { + const trimmedLine = line.trim(); + return !( + trimmedLine.includes('connectedApp') || + trimmedLine.includes('ConnectedApp') || + trimmedLine.includes('connectedApps') + ); + }); + + // Check if any lines were filtered out + if (lines.length === filteredLines.length) { + uxLog("log", command, c.grey('No Connected App ignore patterns found in .forceignore')); + return { forceignorePath, originalContent, tempBackupPath }; + } + + // Write modified .forceignore + await fs.writeFile(forceignorePath, filteredLines.join('\n')); + uxLog("warning", command, c.cyan('Temporarily modified .forceignore to allow Connected App metadata operations')); + + return { forceignorePath, originalContent, tempBackupPath }; +} + +export async function restoreConnectedAppIgnore( + backupInfo: { + forceignorePath: string; + originalContent: string; + tempBackupPath: string + } | null, + command: SfCommand +): Promise { + if (!backupInfo) return; + + try { + // Restore original .forceignore if backup exists + if (await fs.pathExists(backupInfo.tempBackupPath)) { + await fs.writeFile(backupInfo.forceignorePath, backupInfo.originalContent); + await fs.remove(backupInfo.tempBackupPath); + uxLog("log", command, c.grey('Restored original .forceignore file')); + } + } catch (error) { + uxLog("warning", command, c.yellow(`Error restoring .forceignore: ${error}`)); + } +} + +export async function retrieveConnectedApps( + orgUsername: string | undefined, + connectedApps: ConnectedApp[], + command: SfCommand, + saveProjectPath: string +): Promise { + await withConnectedAppValidation(orgUsername, connectedApps, command, 'retrieve', async () => { + if (!orgUsername) return; // This should never happen due to validation, but TypeScript needs it + + await performConnectedAppOperationWithManifest( + orgUsername, + connectedApps, + command, + 'retrieve', + async (manifestPath, orgUsername, command) => { + await execCommand( + `sf project retrieve start --manifest ${manifestPath} --target-org ${orgUsername} --ignore-conflicts --json`, + command, + { output: true, fail: true, cwd: saveProjectPath } + ); + } + ); + }); +} + +export async function deployConnectedApps( + orgUsername: string | undefined, + connectedApps: ConnectedApp[], + command: SfCommand, + saveProjectPath: string +): Promise { + await withConnectedAppValidation(orgUsername, connectedApps, command, 'deploy', async () => { + if (!orgUsername) return; // This should never happen due to validation, but TypeScript needs it + + await performConnectedAppOperationWithManifest( + orgUsername, + connectedApps, + command, + 'deploy', + async (manifestPath, orgUsername, command) => { + await execCommand( + `sf project deploy start --manifest ${manifestPath} --target-org ${orgUsername} --ignore-warnings --json`, + command, + { output: true, fail: true, cwd: saveProjectPath } + ); + } + ); + }); +} + +export function toConnectedAppFormat(apps: Array<{ fullName: string; fileName?: string; filePath?: string; }>): ConnectedApp[] { + return apps.map(app => { + return { + fullName: app.fullName, + fileName: app.fileName || app.fullName || (app.filePath ? path.basename(app.filePath, '.connectedApp-meta.xml') : app.fullName), + type: 'ConnectedApp' + }; + }); +} + +export function validateConnectedApps( + requestedApps: string[], + availableApps: string[], + command: SfCommand, + context: 'org' | 'project' +): { missingApps: string[], validApps: string[] } { + // Case-insensitive matching for app names + const missingApps = requestedApps.filter(name => + !availableApps.some(availableName => + availableName.toLowerCase() === name.toLowerCase() + ) + ); + + if (missingApps.length > 0) { + const errorMsg = `The following Connected App(s) could not be found in the ${context}: ${missingApps.join(', ')}`; + uxLog("error", command, c.red(errorMsg)); + + if (availableApps.length > 0) { + uxLog("warning", command, c.yellow(`Available Connected Apps in the ${context}:`)); + availableApps.forEach(name => { + uxLog("log", command, c.grey(` - ${name}`)); + }); + + // Suggest similar names to help the user + missingApps.forEach(missingApp => { + const similarNames = availableApps + .filter(name => + name.toLowerCase().includes(missingApp.toLowerCase()) || + missingApp.toLowerCase().includes(name.toLowerCase()) + ) + .slice(0, 3); + + if (similarNames.length > 0) { + uxLog("warning", command, c.yellow(`Did you mean one of these instead of "${missingApp}"?`)); + similarNames.forEach(name => { + uxLog("log", command, c.grey(` - ${name}`)); + }); + } + }); + } else { + uxLog("warning", command, c.yellow(`No Connected Apps were found in the ${context}.`)); + } + + uxLog("warning", command, c.yellow('Please check the app name(s) and try again.')); + throw new Error(errorMsg); + } + + // Return the list of valid apps + const validApps = requestedApps.filter(name => + availableApps.some(availableName => + availableName.toLowerCase() === name.toLowerCase() + ) + ); + + return { missingApps, validApps }; +} + +export function validateConnectedAppParams( + orgUsername: string | undefined, + connectedApps: Array +): void { + if (!orgUsername) { + throw new Error('Organization username is required'); + } + if (!connectedApps || connectedApps.length === 0) { + throw new Error('No Connected Apps specified'); + } +} + +export async function promptForConnectedAppSelection( + connectedApps: T[], + initialSelection: string[] = [], + promptMessage: string +): Promise { + // Create choices for the prompt + const choices = connectedApps.map(app => { + return { title: app.fullName, value: app.fullName }; + }); + + // Prompt user for selection + const promptResponse = await prompts({ + type: 'multiselect', + name: 'selectedApps', + message: promptMessage, + description: 'Select Connected Apps to process', + choices: choices, + initial: initialSelection, + }); + + if (!promptResponse.selectedApps || promptResponse.selectedApps.length === 0) { + return []; + } + + // Filter apps based on selection + const selectedApps = connectedApps.filter(app => + promptResponse.selectedApps.includes(app.fullName) + ); + + return selectedApps; +} + +export async function findConnectedAppFile( + appName: string, + command: SfCommand, + saveProjectPath: string +): Promise { + uxLog("other", command, c.cyan(`Searching for Connected App: ${appName}`)); + try { + // First, try an exact case-sensitive match + const exactPattern = `**/${appName}.connectedApp-meta.xml`; + const exactMatches = await glob(exactPattern, { ignore: GLOB_IGNORE_PATTERNS, cwd: saveProjectPath }); + + if (exactMatches.length > 0) { + uxLog("success", command, c.green(`✓ Found Connected App: ${exactMatches[0]}`)); + return path.join(saveProjectPath, exactMatches[0]); + } + + // Try standard locations with possible name variations + const possiblePaths = [ + path.join(saveProjectPath, `force-app/main/default/connectedApps/${appName}.connectedApp-meta.xml`), + path.join(saveProjectPath, `force-app/main/default/connectedApps/${appName.replace(/\s/g, '_')}.connectedApp-meta.xml`), + path.join(saveProjectPath, `force-app/main/default/connectedApps/${appName.replace(/\s/g, '')}.connectedApp-meta.xml`) + ]; + + for (const potentialPath of possiblePaths) { + if (fs.existsSync(potentialPath)) { + uxLog("success", command, c.green(`✓ Found Connected App at standard path: ${potentialPath}`)); + return potentialPath; + } + } + + // If no exact match, try case-insensitive search by getting all ConnectedApp files + uxLog("warning", command, c.yellow(`No exact match found, trying case-insensitive search...`)); + const allConnectedAppFiles = await glob('**/*.connectedApp-meta.xml', { ignore: GLOB_IGNORE_PATTERNS, cwd: saveProjectPath }); + + if (allConnectedAppFiles.length === 0) { + uxLog("error", command, c.red(`No Connected App files found in the project.`)); + return null; + } + + // Find a case-insensitive match + const caseInsensitiveMatch = allConnectedAppFiles.find(file => { + const baseName = path.basename(file, '.connectedApp-meta.xml'); + return baseName.toLowerCase() === appName.toLowerCase() || + baseName.toLowerCase() === appName.toLowerCase().replace(/\s/g, '_') || + baseName.toLowerCase() === appName.toLowerCase().replace(/\s/g, ''); + }); + + if (caseInsensitiveMatch) { + uxLog("success", command, c.green(`✓ Found case-insensitive match: ${caseInsensitiveMatch}`)); + return path.join(saveProjectPath, caseInsensitiveMatch); + } + + // If still not found, list available Connected Apps + uxLog("error", command, c.red(`✗ Could not find Connected App "${appName}"`)); + const allConnectedAppNames = allConnectedAppFiles.map(file => "- " + path.basename(file, '.connectedApp-meta.xml')).join('\n'); + uxLog("warning", command, c.yellow(`Available Connected Apps:\n${allConnectedAppNames}`)); + + return null; + } catch (error) { + uxLog("error", command, c.red(`Error searching for Connected App: ${error}`)); + return null; + } +} + +export async function selectConnectedAppsForProcessing( + connectedApps: T[], + initialSelection: string[] = [], + processAll: boolean, + nameFilter: string | undefined, + promptMessage: string, + command: SfCommand +): Promise { + + // If all flag or name is provided, use all connected apps from the list without prompting + if (processAll || nameFilter) { + const selectionReason = processAll ? 'all flag' : 'name filter'; + uxLog("action", command, c.cyan(`Processing ${connectedApps.length} Connected App(s) based on ${selectionReason}`)); + return connectedApps; + } + + // Otherwise, prompt for selection + return await promptForConnectedAppSelection( + connectedApps, + initialSelection, + promptMessage + ); +} + +export async function withConnectedAppValidation( + orgUsername: string | undefined, + connectedApps: ConnectedApp[], + command: SfCommand, + operationName: string, + operationFn: () => Promise +): Promise { + try { + validateConnectedAppParams(orgUsername, connectedApps); + } catch (error: any) { + uxLog("log", command, c.yellow(`Skipping ${operationName} operation: ${error.message}`)); + return; + } + + await operationFn(); +} + +export async function performConnectedAppOperationWithManifest( + orgUsername: string, + connectedApps: ConnectedApp[], + command: SfCommand, + operationName: 'retrieve' | 'deploy', + commandFn: (manifestPath: string, orgUsername: string, command: SfCommand) => Promise +): Promise { + // Use withConnectedAppIgnoreHandling to handle .forceignore modifications + await withConnectedAppIgnoreHandling(async () => { + // Create a manifest for the Connected Apps + const { manifestPath, tmpDir } = await createConnectedAppManifest(connectedApps, command); + + // Execute the operation using the manifest + uxLog("log", command, c.cyan(`${operationName === 'retrieve' ? 'Retrieving' : 'Deploying'} ${connectedApps.length} Connected App(s) ${operationName === 'retrieve' ? 'from' : 'to'} org...`)); + + try { + await commandFn(manifestPath, orgUsername, command); + + // Wait a moment to ensure files are written to disk (especially for retrieve operations) + if (operationName === 'retrieve') { + uxLog("log", command, c.grey('Waiting for files to be written to disk...')); + await new Promise(resolve => setTimeout(resolve, 1000)); + } + } catch (error: any) { + throw new Error(`Failed to ${operationName} Connected Apps: ${error.message || String(error)}`); + } + + // Clean up + await fs.remove(tmpDir); + uxLog("log", command, c.grey('Removed temporary manifest file')); + }, command); +} + +export function createConnectedAppSuccessResponse( + message: string, + processedApps: string[], + additionalData: Record = {} +): { success: true; message: string; connectedAppsProcessed: string[] } & Record { + return { + success: true, + message, + connectedAppsProcessed: processedApps, + ...additionalData + }; +} + +export function handleConnectedAppError( + error: any, + command: SfCommand +): { success: false; error: string } { + const errorMessage = error.message || JSON.stringify(error); + uxLog("error", command, c.red(`Error: ${errorMessage}`)); + return { success: false, error: errorMessage }; +} diff --git a/src/common/utils/rulesBuilderUtil.ts b/src/common/utils/rulesBuilderUtil.ts new file mode 100644 index 000000000..97296d07a --- /dev/null +++ b/src/common/utils/rulesBuilderUtil.ts @@ -0,0 +1,115 @@ +export class RulesBuilderUtil { + + public globalRuleTableLines: string[] = []; + + public async buildInitialMarkDownLinesForRules(ruleGlobal: any) { + + this.globalRuleTableLines = [ + `## ${ruleGlobal.fullName} Rules`, + "| Order | Criteria | Assigned To | Assigned To Type | Email |", + "| :--: | :------------- | :--: | :--: | :--: |", + ]; + + if (ruleGlobal.ruleEntry) { + if (!Array.isArray(ruleGlobal.ruleEntry)) { + ruleGlobal.ruleEntry = [ruleGlobal.ruleEntry]; + } + let orderNum: number = 1; + for (const rule of ruleGlobal.ruleEntry) { + const globalCriteria = rule?.criteriaItems ? this.formatCriteria(rule?.criteriaItems, rule?.booleanFilter) : rule?.formula ? JSON.stringify(rule.formula) : "None"; + this.globalRuleTableLines.push(`| ${orderNum} | ${globalCriteria} | ${rule.assignedTo} | ${rule.assignedToType} | ${(!!rule.template)} |`); + orderNum++; + } + } + } + + public async buildInitialMarkDownLinesFoAutoResponseRules(autoresponseRule: any) { + + this.globalRuleTableLines = [ + `## ${autoresponseRule.fullName} Rules`, + "| Order | Criteria | Sender Email | Sender Name | Reply To |", + "| :--: | :------------- | :--: | :--: | :--: |", + ]; + + if (autoresponseRule.ruleEntry) { + if (!Array.isArray(autoresponseRule.ruleEntry)) { + autoresponseRule.ruleEntry = [autoresponseRule.ruleEntry]; + } + let order: number = 1; + for (const rule of autoresponseRule.ruleEntry) { + const autoResponseCriteria = rule?.criteriaItems ? this.formatCriteria(rule?.criteriaItems, rule?.booleanFilter) : rule?.formula ? JSON.stringify(rule.formula) : "None"; + this.globalRuleTableLines.push(`| ${order} | ${autoResponseCriteria} | ${rule.senderEmail} | ${rule.senderName} | ${rule.replyTo || "None"} |`); + order++; + } + } + } + + public async buildInitialMarkDownLinesForEscalationRules(ruleGlobal: any) { + + this.globalRuleTableLines = [ + `## ${ruleGlobal.fullName} Rules`, + "| Order | Criteria | Actions |", + "| :--: | :------------- | :------------- |", + ]; + + if (ruleGlobal.ruleEntry) { + if (!Array.isArray(ruleGlobal.ruleEntry)) { + ruleGlobal.ruleEntry = [ruleGlobal.ruleEntry]; + } + let order: number = 1; + for (const rule of ruleGlobal.ruleEntry) { + const criteria = rule?.criteriaItems ? this.formatCriteria(rule?.criteriaItems, rule?.booleanFilter) : rule?.formula ? JSON.stringify(rule.formula) : "None"; + const actions = rule?.escalationAction ? this.formatActions(rule?.escalationAction) : "None"; + this.globalRuleTableLines.push(`| ${order} | ${criteria} | ${actions} |`); + order++; + } + } + } + + formatActions(actionItems: any[]): string { + if (!actionItems || actionItems.length === 0) { + return "None"; + } else { + if (!Array.isArray(actionItems)) { + actionItems = [actionItems]; + } + return actionItems + .map((x => this.formatActionItem(x))) + .join(''); + } + } + + formatCriteria(criteriaItems: any[], booleanFilter: string): string { + if (!criteriaItems || criteriaItems.length === 0) { + return 'None'; + } else { + if (!booleanFilter) { + if (!Array.isArray(criteriaItems)) { + criteriaItems = [criteriaItems]; + } + return criteriaItems + .map((x => this.formatCriteriaItem(x))) + .join(' AND '); + } else { + + let booleanResult: string = booleanFilter; + for (let i = 1; i <= criteriaItems.length; i++) { + booleanResult = booleanResult.replace(i.toString(), this.formatCriteriaItem(criteriaItems[i - 1])); + } + return booleanResult; + } + } + } + + formatCriteriaItem(ci: any): string { + return '(**' + + ci.field.split('.')[0] + '**: ' + + ci.field.substring(ci.field.indexOf('.') + 1) + ' _' + + ci.operation + '_ ' + + (ci.value ? String(ci.value).replaceAll(",", ", ") : "' '") + ')
'; + } + + formatActionItem(ai: any): string { + return '
**Mins to escalations**: ' + ai.minutesToEscalation + '
**Assign To**: ' + ai.assignedTo + '
**Notify**: ' + (ai.notifyTo ?? 'None') + '
'; + } +} diff --git a/src/common/utils/workaroundUtils.ts b/src/common/utils/workaroundUtils.ts index dd024b94a..0b62612a8 100644 --- a/src/common/utils/workaroundUtils.ts +++ b/src/common/utils/workaroundUtils.ts @@ -1,15 +1,16 @@ -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { createTempDir, uxLog } from "."; -import { glob } from "glob"; -import { parseXmlFile, writeXmlFile } from "./xmlUtils"; -import { isScratchOrg } from "./orgUtils"; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import { createTempDir, uxLog } from './index.js'; +import { glob } from 'glob'; +import { parseXmlFile, writeXmlFile } from './xmlUtils.js'; +import { isScratchOrg } from './orgUtils.js'; +import { GLOB_IGNORE_PATTERNS } from './projectUtils.js'; // Update files for special cases export async function arrangeFilesBefore(commandThis: any, options: any = {}) { const tempDir = await createTempDir(); - const arrangedFiles = []; + const arrangedFiles: any[] = []; if ((await isScratchOrg(options)) === true) { const arrangedLookupFields = await removeLookupFilters(tempDir, commandThis, options); arrangedFiles.push(...arrangedLookupFields); @@ -19,9 +20,9 @@ export async function arrangeFilesBefore(commandThis: any, options: any = {}) { // Remove lookup filters because they aren't pushed well export async function removeLookupFilters(tempDir: string, commandThis: any, options: any = {}) { - const arrangedFiles = []; - const findFieldsPattern = (options.rootFolder || ".") + `/**/objects/**/fields/**.field-meta.xml`; - const matchingFieldFiles = await glob(findFieldsPattern, { cwd: process.cwd() }); + const arrangedFiles: any = []; + const findFieldsPattern = (options.rootFolder || '.') + `/**/objects/**/fields/**.field-meta.xml`; + const matchingFieldFiles = await glob(findFieldsPattern, { cwd: process.cwd(), ignore: GLOB_IGNORE_PATTERNS }); for (const fieldFile of matchingFieldFiles) { // skip if managed field if ((path.basename(fieldFile).match(/__/g) || []).length === 2) { @@ -35,7 +36,7 @@ export async function removeLookupFilters(tempDir: string, commandThis: any, opt delete fieldXml.CustomField.lookupFilter; await writeXmlFile(fieldFile, fieldXml); arrangedFiles.push({ file: fieldFile, backupFile: backupFile }); - uxLog(commandThis, c.grey(`Removed lookup filter from field ${fieldFile}`)); + uxLog("log", commandThis, c.grey(`Removed lookup filter from field ${fieldFile}`)); } } return arrangedFiles; @@ -45,6 +46,6 @@ export async function removeLookupFilters(tempDir: string, commandThis: any, opt export async function restoreArrangedFiles(arrangedFiles: any[], commandThis: any) { for (const arrangedFile of arrangedFiles) { await fs.copyFile(arrangedFile.backupFile, arrangedFile.file); - uxLog(commandThis, c.grey(`Restored file ${arrangedFile.file}`)); + uxLog("log", commandThis, c.grey(`Restored file ${arrangedFile.file}`)); } } diff --git a/src/common/utils/wrapUtils.ts b/src/common/utils/wrapUtils.ts index edc7cc879..017ac43fc 100644 --- a/src/common/utils/wrapUtils.ts +++ b/src/common/utils/wrapUtils.ts @@ -1,9 +1,10 @@ -import { SfdxCommand } from "@salesforce/command"; -import * as c from "chalk"; -import { execCommand, uxLog } from "."; -import { analyzeDeployErrorLogs } from "./deployTips"; +import { SfCommand } from "@salesforce/sf-plugins-core"; +import c from "chalk"; +import { execCommand, uxLog } from "./index.js"; +import { analyzeDeployErrorLogs } from "./deployTips.js"; +import { generateApexCoverageOutputFile } from "./deployUtils.js"; -export async function wrapSfdxCoreCommand(commandBase: string, argv: string[], commandThis: SfdxCommand, debug = false): Promise { +export async function wrapSfdxCoreCommand(commandBase: string, argv: string[], commandThis: SfCommand, debug = false): Promise { const endArgs = [...argv].map((arg) => { // Add quotes to avoid problems if arguments contain spaces if (!arg.startsWith("-") && !arg.startsWith(`"`) && !arg.startsWith(`'`)) { @@ -43,21 +44,27 @@ export async function wrapSfdxCoreCommand(commandBase: string, argv: string[], c debug: debug, fail: true, }); + process.exitCode = 0; + await generateApexCoverageOutputFile(); } catch (e) { + await generateApexCoverageOutputFile(); // Add deployment tips in error logs - const { errLog } = await analyzeDeployErrorLogs(e.stdout + e.stderr, true, { check: endArgs.includes("--checkonly") }); - uxLog(commandThis, c.red(c.bold("Sadly there has been error(s)"))); + const { errLog } = await analyzeDeployErrorLogs((e as any).stdout + (e as any).stderr, true, { check: endArgs.includes("--checkonly") }); + uxLog("error", commandThis, c.red(c.bold("Sadly there has been error(s)"))); if (process.env?.SFDX_HARDIS_DEPLOY_ERR_COLORS === "false") { - uxLog(this, "\n" + errLog); + uxLog("other", this, "\n" + errLog); } else { - uxLog(this, c.red("\n" + errLog)); + uxLog("error", this, c.red("\n" + errLog)); } deployRes = errLog; - if (e.code) { - process.exitCode = e.code; + if ((e as any).code) { + process.exitCode = (e as any).code; } else { process.exitCode = 1; } } + if (typeof deployRes === 'object') { + deployRes.stdout = JSON.stringify(deployRes); + } return { outputstring: deployRes }; } diff --git a/src/common/utils/xmlUtils.ts b/src/common/utils/xmlUtils.ts index d70c1bb24..7f27806c4 100644 --- a/src/common/utils/xmlUtils.ts +++ b/src/common/utils/xmlUtils.ts @@ -1,27 +1,31 @@ // XML Utils functions -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import * as fs from "fs-extra"; -import * as path from "path"; -import * as util from "util"; -import * as xml2js from "xml2js"; -import { uxLog } from "."; -import { CONSTANTS } from "../../config"; +import { SfError } from '@salesforce/core'; +import c from 'chalk'; +import fs from 'fs-extra'; +import * as path from 'path'; +import * as util from 'util'; +import * as xml2js from 'xml2js'; +import { sortCrossPlatform, uxLog } from './index.js'; +import { getApiVersion } from '../../config/index.js'; export async function parseXmlFile(xmlFile: string) { - const packageXmlString = await fs.readFile(xmlFile, "utf8"); - const parsedXml = await xml2js.parseStringPromise(packageXmlString); - return parsedXml; + const packageXmlString = await fs.readFile(xmlFile, 'utf8'); + try { + const parsedXml = await xml2js.parseStringPromise(packageXmlString); + return parsedXml; + } catch (e: any) { + throw new SfError(`Error parsing ${xmlFile}: ${e.message}`); + } } export async function writeXmlFile(xmlFile: string, xmlObject: any) { const builder = new xml2js.Builder({ renderOpts: { pretty: true, - indent: process.env.SFDX_XML_INDENT || " ", - newline: "\n", + indent: process.env.SFDX_XML_INDENT || ' ', + newline: '\n', }, - xmldec: { version: "1.0", encoding: "UTF-8" }, + xmldec: { version: '1.0', encoding: 'UTF-8' }, }); const updatedFileContent = builder.buildObject(xmlObject); await fs.ensureDir(path.dirname(xmlFile)); @@ -36,7 +40,7 @@ export async function writeXmlFileFormatted(xmlFile: string, xmlString: string) export async function parsePackageXmlFile(packageXmlFile: string) { const targetOrgPackage = await parseXmlFile(packageXmlFile); const targetOrgContent: any = {}; - for (const type of targetOrgPackage.Package.types || []) { + for (const type of targetOrgPackage?.Package?.types || []) { const mdType = type.name[0]; const members = type.members || []; targetOrgContent[mdType] = members; @@ -44,8 +48,17 @@ export async function parsePackageXmlFile(packageXmlFile: string) { return targetOrgContent; } +export async function countPackageXmlItems(packageXmlFile: string): Promise { + const packageXmlParsed = await parsePackageXmlFile(packageXmlFile); + let counter = 0; + for (const type of Object.keys(packageXmlParsed)) { + counter += packageXmlParsed[type].length || 0; + } + return counter; +} + export async function writePackageXmlFile(packageXmlFile: string, packageXmlObject: any) { - let packageXmlContent = { Package: { types: [], version: [CONSTANTS.API_VERSION] } }; + let packageXmlContent: any = { Package: { types: [], version: [getApiVersion()] } }; if (fs.existsSync(packageXmlFile)) { packageXmlContent = await parseXmlFile(packageXmlFile); } @@ -62,17 +75,28 @@ export async function writePackageXmlFile(packageXmlFile: string, packageXmlObje // Check if a package.xml is empty export async function isPackageXmlEmpty( packageXmlFile: string, - options: { ignoreStandaloneParentItems: boolean } = { ignoreStandaloneParentItems: false }, + options: { ignoreStandaloneParentItems: boolean } = { ignoreStandaloneParentItems: false } ) { const packageXmlContent = await parseXmlFile(packageXmlFile); - if (packageXmlContent && packageXmlContent.Package && packageXmlContent.Package.types && packageXmlContent.Package.types.length > 0) { + if ( + packageXmlContent && + packageXmlContent.Package && + packageXmlContent.Package.types && + packageXmlContent.Package.types.length > 0 + ) { if (options.ignoreStandaloneParentItems === true) { // Check if only contains SharingRules without SharingOwnerRule - if (packageXmlContent.Package.types.length === 1 && packageXmlContent.Package.types[0].name[0] === "SharingRules") { + if ( + packageXmlContent.Package.types.length === 1 && + packageXmlContent.Package.types[0].name[0] === 'SharingRules' + ) { return true; } // Check if only contains SharingOwnerRule without SharingRules - if (packageXmlContent.Package.types.length === 1 && packageXmlContent.Package.types[0].name[0] === "SharingOwnerRule") { + if ( + packageXmlContent.Package.types.length === 1 && + packageXmlContent.Package.types[0].name[0] === 'SharingOwnerRule' + ) { return true; } } @@ -82,40 +106,84 @@ export async function isPackageXmlEmpty( return true; } +// Read package.xml files and build concatenated list of items +export async function appendPackageXmlFilesContent(packageXmlFileList: string[], outputXmlFile: string) { + uxLog("log", this, c.grey(`Appending ${packageXmlFileList.join(',')} into ${outputXmlFile}...`)); + let firstPackageXmlContent: any = null; + let allPackageXmlFilesTypes = {}; + // loop on packageXml files + for (const packageXmlFile of packageXmlFileList) { + const result: any = await parseXmlFile(packageXmlFile); + if (firstPackageXmlContent == null) { + firstPackageXmlContent = result; + } + let packageXmlMetadatasTypeLs: any[]; + // Get metadata types in current loop packageXml + try { + packageXmlMetadatasTypeLs = result.Package.types || []; + } catch { + throw new SfError('Unable to find Package XML element in ' + packageXmlFile); + } + // Add metadata members in concatenation list of items & store doublings + for (const typePkg of packageXmlMetadatasTypeLs) { + if (typePkg.name == null) { + continue; + } + const nameKey = typePkg.name[0]; + if (allPackageXmlFilesTypes[nameKey] != null && typePkg.members != null) { + sortCrossPlatform(allPackageXmlFilesTypes[nameKey] = Array.from( + new Set(allPackageXmlFilesTypes[nameKey].concat(typePkg.members)) + )); + } else if (typePkg.members != null) { + sortCrossPlatform(allPackageXmlFilesTypes[nameKey] = Array.from(new Set(typePkg.members))); + } + } + } + // Sort result + allPackageXmlFilesTypes = sortObject(allPackageXmlFilesTypes); + // Write output file + const appendTypesXml: any[] = []; + for (const packageXmlType of Object.keys(allPackageXmlFilesTypes)) { + appendTypesXml.push({ members: allPackageXmlFilesTypes[packageXmlType], name: packageXmlType }); + } + firstPackageXmlContent.Package.types = appendTypesXml; + await writeXmlFile(outputXmlFile, firstPackageXmlContent); +} + // Read package.xml files and remove the content of the export async function removePackageXmlFilesContent( packageXmlFile: string, removePackageXmlFile: string, - { outputXmlFile = null, logFlag = false, removedOnly = false, keepEmptyTypes = false }, + { outputXmlFile = '', logFlag = false, removedOnly = false, keepEmptyTypes = false } ) { // Read package.xml file to update const parsedPackageXml: any = await parseXmlFile(packageXmlFile); if (logFlag) { - uxLog(this, `Parsed ${packageXmlFile} :\n` + util.inspect(parsedPackageXml, false, null)); + uxLog("other", this, `Parsed ${packageXmlFile} :\n` + util.inspect(parsedPackageXml, false, null)); } let packageXmlMetadatasTypeLs: any; // get metadata types in parse result try { packageXmlMetadatasTypeLs = parsedPackageXml.Package.types || []; } catch { - throw new SfdxError("Unable to parse package Xml file " + packageXmlFile); + throw new SfError('Unable to parse package Xml file ' + packageXmlFile); } // Read package.xml file to use for filtering first file const parsedPackageXmlRemove: any = await parseXmlFile(removePackageXmlFile); if (logFlag) { - uxLog(this, c.grey(`Parsed ${removePackageXmlFile} :\n` + util.inspect(parsedPackageXmlRemove, false, null))); + uxLog("log", this, c.grey(`Parsed ${removePackageXmlFile} :\n` + util.inspect(parsedPackageXmlRemove, false, null))); } let packageXmlRemoveMetadatasTypeLs: any; // get metadata types in parse result try { packageXmlRemoveMetadatasTypeLs = parsedPackageXmlRemove.Package.types || []; } catch { - throw new SfdxError("Unable to parse package Xml file " + removePackageXmlFile); + throw new SfError('Unable to parse package Xml file ' + removePackageXmlFile); } // Filter main package.xml file - const processedTypes = []; + const processedTypes: any[] = []; for (const removeType of packageXmlRemoveMetadatasTypeLs) { const removeTypeName = removeType.name[0] || null; if (removeTypeName) { @@ -132,18 +200,36 @@ export async function removePackageXmlFilesContent( const type = types[0]; let typeMembers = type.members || []; // Manage * case contained in target - if (removedOnly === true && typeMembers.includes("*")) { + if (removedOnly === true && typeMembers.includes('*')) { typeMembers = removeTypeMembers; - uxLog(this, c.grey(c.italic(`Found wildcard * on type ${c.bold(type.name)}, kept items: ${typeMembers.length}`))); + uxLog("log", this, c.grey(c.italic(`Found wildcard * on type ${c.bold(type.name)}, kept items: ${typeMembers.length}`))); } // Manage * case contained in source - else if (removeTypeMembers[0] && removeTypeMembers[0] === "*") { + else if (removeTypeMembers[0] && removeTypeMembers[0] === '*') { typeMembers = typeMembers.filter(() => checkRemove(false, removedOnly)); - uxLog(this, c.grey(c.italic(`Found wildcard * on type ${c.bold(type.name)} which have all been ${removedOnly ? "kept" : "removed"}`))); + uxLog( + "log", + this, + c.grey( + c.italic( + `Found wildcard * on type ${c.bold(type.name)} which have all been ${removedOnly ? 'kept' : 'removed'}` + ) + ) + ); } else { // Filter members - typeMembers = typeMembers.filter((member: string) => checkRemove(!removeTypeMembers.includes(member), removedOnly)); - uxLog(this, c.grey(c.italic(`Found type ${c.bold(type.name)}, ${typeMembers.length} items have been ${removedOnly ? "removed" : "kept"}`))); + typeMembers = typeMembers.filter((member: string) => + checkRemove(!removeTypeMembers.includes(member), removedOnly) + ); + uxLog( + "log", + this, + c.grey( + c.italic( + `Found type ${c.bold(type.name)}, ${typeMembers.length} items have been ${removedOnly ? 'removed' : 'kept'}` + ) + ) + ); } if (typeMembers.length > 0 || keepEmptyTypes === true) { // Update members for type @@ -163,12 +249,14 @@ export async function removePackageXmlFilesContent( // If removedOnly mode, remove types which were not present in removePackageXml if (removedOnly) { - packageXmlMetadatasTypeLs = packageXmlMetadatasTypeLs.filter((type1: any) => processedTypes.includes(type1.name[0])); + packageXmlMetadatasTypeLs = packageXmlMetadatasTypeLs.filter((type1: any) => + processedTypes.includes(type1.name[0]) + ); } // display in logs if requested if (logFlag) { - uxLog(this, "Package.xml remove results :\n" + util.inspect(packageXmlMetadatasTypeLs, false, null)); + uxLog("other", this, 'Package.xml remove results :\n' + util.inspect(packageXmlMetadatasTypeLs, false, null)); } // Write in output file if required @@ -176,15 +264,103 @@ export async function removePackageXmlFilesContent( parsedPackageXml.Package.types = packageXmlMetadatasTypeLs; await writeXmlFile(outputXmlFile, parsedPackageXml); if (logFlag) { - uxLog(this, "Generated package.xml file: " + outputXmlFile); + uxLog("other", this, 'Generated package.xml file: ' + outputXmlFile); } } return packageXmlMetadatasTypeLs; } +export function sortObject(o) { + return Object.keys(o) + .sort() + .reduce((r, k) => ((r[k] = o[k]), r), {}); +} + function checkRemove(boolRes, removedOnly = false) { if (removedOnly === true) { return !boolRes; } return boolRes; } + +export async function applyAllReplacementsDefinitions( + allMatchingSourceFiles: string[], + referenceStrings: string[], + replacementDefinitions: any[] +) { + uxLog("action", this, c.cyan(`Initializing replacements in files for ${referenceStrings.join(',')}...`)); + for (const ref of referenceStrings) { + for (const replacementDefinition of replacementDefinitions) { + replacementDefinition.refRegexes = replacementDefinition.refRegexes.map((refRegex) => { + refRegex.regex = refRegex.regex.replace(new RegExp(`{{REF}}`), ref); + return refRegex; + }); + await applyReplacementDefinition(replacementDefinition, allMatchingSourceFiles, ref); + } + } +} + +export async function applyReplacementDefinition( + replacementDefinition: any, + allMatchingSourceFiles: string[], + ref: string +) { + for (const sourceFile of allMatchingSourceFiles.filter((file) => + replacementDefinition.extensions.some((ext) => file.endsWith(ext)) + )) { + let fileText = await fs.readFile(sourceFile, 'utf8'); + let updated = false; + // Replacement in all text + if (replacementDefinition.replaceMode.includes('all')) { + for (const regexReplace of replacementDefinition.refRegexes) { + const updatedfileText = fileText.replace(new RegExp(regexReplace.regex, 'gm'), regexReplace.replace); + if (updatedfileText !== fileText) { + updated = true; + fileText = updatedfileText; + } + } + } + // Replacement by line + let fileLines = fileText.split(/\r?\n/); + if (replacementDefinition.replaceMode.includes('line')) { + const updatedFileLines = fileLines.map((line) => { + const trimLine = line.trim(); + if (trimLine.startsWith('/') || trimLine.startsWith(' ' + : line; + } + return line; + }); + fileLines = updatedFileLines; + } + // Apply updates on file + if (updated) { + const updatedFileText = fileLines.join('\n'); + await fs.writeFile(sourceFile, updatedFileText); + uxLog("log", this, c.grey(`- updated ${replacementDefinition.label}: ${sourceFile}`)); + } + } +} diff --git a/src/common/websocketClient.ts b/src/common/websocketClient.ts index fb9e0bc8b..70f208156 100644 --- a/src/common/websocketClient.ts +++ b/src/common/websocketClient.ts @@ -1,17 +1,30 @@ -import * as c from "chalk"; -import * as util from "util"; -import * as WebSocket from "ws"; -import { isCI, uxLog } from "./utils"; +import c from 'chalk'; +import * as util from 'util'; +import WebSocket from 'ws'; +import { isCI, uxLog } from './utils/index.js'; +import { SfError } from '@salesforce/core'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import { CONSTANTS } from '../config/index.js'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); let globalWs: WebSocketClient | null; let isWsOpen = false; +let userInput = null; const PORT = process.env.SFDX_HARDIS_WEBSOCKET_PORT || 2702; +// Define allowed log types and type alias outside the class +export const LOG_TYPES = ['log', 'action', 'warning', 'error', 'success', 'table', "other"] as const; +export type LogType = typeof LOG_TYPES[number]; + export class WebSocketClient { private ws: any; private wsContext: any; private promptResponse: any; + private isDead = false; + private isInitialized = false; constructor(context: any) { this.wsContext = context; @@ -20,15 +33,37 @@ export class WebSocketClient { this.ws = new WebSocket(wsHostPort); globalWs = this; // eslint-disable-line this.start(); + console.log("WS Client started"); } catch (err) { - uxLog(this, c.yellow("Warning: Unable to start WebSocket client on " + wsHostPort + "\n" + err.message)); + this.isDead = true; + uxLog( + "warning", + this, + c.yellow('Warning: Unable to start WebSocket client on ' + wsHostPort + '\n' + (err as Error).message) + ); } } + static async isInitialized(): Promise { + if (globalWs) { + let retries = 40; // Wait up to 10 seconds + while (!globalWs.isInitialized && retries > 0 && !globalWs.isDead) { + await new Promise((resolve) => setTimeout(resolve, 250)); + retries--; + } + return globalWs.isInitialized; + } + return false; + } + static isAlive(): boolean { return !isCI && globalWs != null && isWsOpen === true; } + static isAliveWithLwcUI(): boolean { + return this.isAlive() && userInput === 'ui-lwc'; + } + static sendMessage(data: any) { if (globalWs) { globalWs.sendMessageToServer(data); @@ -37,32 +72,204 @@ export class WebSocketClient { // Requests open file within VsCode if linked static requestOpenFile(file: string) { - WebSocketClient.sendMessage({ event: "openFile", file: file.replace(/\\/g, "/") }); + WebSocketClient.sendMessage({ event: 'openFile', file: file.replace(/\\/g, '/') }); + } + + // Send refresh status message + static sendRefreshStatusMessage() { + WebSocketClient.sendMessage({ event: 'refreshStatus' }); + } + + // Send refresh commands message + static sendRefreshCommandsMessage() { + WebSocketClient.sendMessage({ event: 'refreshCommands' }); + } + + // Send progress start message + static sendProgressStartMessage(title: string, totalSteps?: number) { + WebSocketClient.sendMessage({ + event: 'progressStart', + title: title || 'Progress', + totalSteps: totalSteps || 0 + }); + } + + // Send progress step message + static sendProgressStepMessage(step: number, totalSteps?: number) { + WebSocketClient.sendMessage({ + event: 'progressStep', + step: step, + totalSteps: totalSteps + }); + } + + // Send progress end message + static sendProgressEndMessage(totalSteps?: number) { + WebSocketClient.sendMessage({ + event: 'progressEnd', + totalSteps: totalSteps + }); + } + + // Send refresh plugins message + static sendRefreshPluginsMessage() { + WebSocketClient.sendMessage({ event: 'refreshPlugins' }); + } + + // Send command sub-command start message + static sendCommandSubCommandStartMessage(command: string, cwd: string, options: any) { + WebSocketClient.sendMessage({ + event: 'commandSubCommandStart', + data: { + command: command, + cwd: cwd, + options: options, + }, + }); + } + + // Send command sub-command end message + static sendCommandSubCommandEndMessage(command: string, cwd: string, options: any, success: boolean, result: any) { + WebSocketClient.sendMessage({ + event: 'commandSubCommandEnd', + data: { + command: command, + cwd: cwd, + options: options, + success: success, + result: result, + }, + }); + } + + // Send command log line message + static sendCommandLogLineMessage(message: string, logType?: LogType, isQuestion?: boolean) { + WebSocketClient.sendMessage({ + event: 'commandLogLine', + logType: logType, + message: message, + isQuestion: isQuestion, + }); + } + + // Send run SFDX Hardis command message + static sendRunSfdxHardisCommandMessage(sfdxHardisCommand: string) { + WebSocketClient.sendMessage({ + event: 'runSfdxHardisCommand', + sfdxHardisCommand: sfdxHardisCommand, + }); + } + + // Sends refresh pipeline message + static sendRefreshPipelineMessage() { + WebSocketClient.sendMessage({ event: 'refreshPipeline' }); + } + + // Sends info about downloadable report file + static sendReportFileMessage( + file: string, + title: string, + type: "actionCommand" | "actionUrl" | "report" | "docUrl" + ) { + WebSocketClient.sendMessage({ + event: 'reportFile', + file: file.replace(/\\/g, '/'), + title: title, + type: type + }); } static sendPrompts(prompts: any): Promise { - return globalWs.promptServer(prompts); + if (globalWs) { + return globalWs.promptServer(prompts); + } + throw new SfError('globalWs should be set in sendPrompts'); + } + + // Send close client message with status + static sendCloseClientMessage(status?: string, error: any = null) { + const message: any = { + event: 'closeClient', + context: globalWs?.wsContext, + status: status, + }; + if (error) { + message.error = { + type: error.type || 'unknown', + message: error.message || 'An error occurred', + stack: error.stack || '', + }; + } + WebSocketClient.sendMessage(message); } - start() { - this.ws.on("open", () => { + // Close the WebSocket connection externally + static closeClient(status?: string) { + if (globalWs) { + globalWs.dispose(status); + } + } + + private getCommandDocUrl(): string | undefined { + // Extract command from context to build documentation URL + if (this.wsContext?.command) { + const command = this.wsContext.command; + // Convert command format like "hardis:doc:flow2markdown" to URL path + const urlPath = command.replace(/:/g, '/'); + return `${CONSTANTS.DOC_URL_ROOT}/${urlPath}/`; + } + // Return undefined if no specific command + return undefined; + } + + async start() { + this.ws.on('open', async () => { isWsOpen = true; - this.ws.send( - JSON.stringify({ - event: "initClient", - context: this.wsContext, - }), - ); - // uxLog(this,c.grey('Initialized WebSocket connection with VsCode SFDX Hardis')); + const commandDocUrl = this.getCommandDocUrl(); + const message = { + event: 'initClient', + context: this.wsContext, + } as any; + if (commandDocUrl) { + message.commandDocUrl = commandDocUrl; + } + // Dynamically import command class and send static uiConfig if present + if (this.wsContext?.command) { + try { + // Convert command string to file path, e.g. hardis:cache:clear -> lib/commands/hardis/cache/clear.js + const commandParts = this.wsContext.command.split(':'); + const commandPath = path.resolve(__dirname, '../../lib/commands', ...commandParts) + '.js'; + const fileUrl = 'file://' + commandPath.replace(/\\/g, '/'); + const imported = await import(fileUrl); + const CommandClass = imported.default; + if (process.env.NO_NEW_COMMAND_TAB === "true") { + message.uiConfig = { hide: true }; + } + else if (CommandClass && CommandClass.uiConfig) { + message.uiConfig = CommandClass.uiConfig; + } + } catch (e) { + uxLog("warning", this, c.yellow(`Warning: Unable to import command class for ${this.wsContext.command}: ${e instanceof Error ? e.message : String(e)}`)); + } + } + // Add link to command log file + if (globalThis?.hardisLogFileStream?.path) { + const logFilePath = String(globalThis.hardisLogFileStream.path).replace(/\\/g, '/'); + message.commandLogFile = logFilePath; + } + this.ws.send(JSON.stringify(message)); + // uxLog("other", this,c.grey('Initialized WebSocket connection with VsCode SFDX Hardis')); }); - this.ws.on("message", (data: any) => { + this.ws.on('message', (data: any) => { this.receiveMessage(JSON.parse(data)); }); - this.ws.on("error", (err) => { + this.ws.on('error', (err) => { this.ws.terminate(); globalWs = null; + isWsOpen = false; + this.isDead = true; if (process.env.DEBUG) { console.error(err); } @@ -71,11 +278,25 @@ export class WebSocketClient { receiveMessage(data: any) { if (process.env.DEBUG) { - console.debug("websocket: received: %s", util.inspect(data)); + console.debug('websocket: received: %s', util.inspect(data)); } - if (data.event === "promptsResponse") { + if (data.event === 'ping') { + // Respond to ping messages to keep the connection alive + this.ws.send(JSON.stringify({ event: 'pong' })); + } + else if (data.event === 'promptsResponse') { this.promptResponse = data.promptsResponse; } + else if (data.event === 'userInput') { + userInput = data.userInput; + this.isInitialized = true; + } + else if (data.event === 'cancelCommand') { + if (this.wsContext?.command === data?.context?.command && this.wsContext.id === data?.context?.id) { + uxLog("error", this, c.red('Command cancelled by user')); + process.exit(1); + } + } } sendMessageToServer(data: any) { @@ -84,16 +305,16 @@ export class WebSocketClient { } promptServer(prompts: any): Promise { - this.sendMessageToServer({ event: "prompts", prompts: prompts }); + this.sendMessageToServer({ event: 'prompts', prompts: prompts }); this.promptResponse = null; let ok = false; return new Promise((resolve, reject) => { - let interval = null; - let timeout = null; + let interval: any = null; + let timeout: any = null; interval = setInterval(() => { if (this.promptResponse != null) { - clearInterval(interval); - clearTimeout(timeout); + clearInterval(interval as NodeJS.Timeout); + clearTimeout(timeout as NodeJS.Timeout); ok = true; resolve(this.promptResponse); } @@ -101,21 +322,18 @@ export class WebSocketClient { timeout = setTimeout(() => { if (ok === false) { clearInterval(interval); - reject("[sfdx-hardis] No response from UI WebSocket Server"); + reject('[sfdx-hardis] No response from UI WebSocket Server'); } }, 7200000); // 2h timeout }); } - dispose() { - this.ws.send( - JSON.stringify({ - event: "closeClient", - context: this.wsContext, - }), - ); + dispose(status?: string, error: any = null) { + WebSocketClient.sendCloseClientMessage(status, error); this.ws.terminate(); + this.isDead = true; + isWsOpen = false; globalWs = null; - // uxLog(this,c.grey('Closed WebSocket connection with VsCode SFDX Hardis')); + // uxLog("other", this,c.grey('Closed WebSocket connection with VsCode SFDX Hardis')); } } diff --git a/src/config/index.ts b/src/config/index.ts index b35ef748e..e1cd0af68 100644 --- a/src/config/index.ts +++ b/src/config/index.ts @@ -10,40 +10,83 @@ getConfig(layer) returns: - project if layer is project */ -import { SfdxError } from "@salesforce/core"; -import axios from "axios"; -import * as c from "chalk"; -import { cosmiconfig } from "cosmiconfig"; -import * as fs from "fs-extra"; -import * as yaml from "js-yaml"; -import * as os from "os"; -import * as path from "path"; -import { getCurrentGitBranch, isCI, isGitRepo, uxLog } from "../common/utils"; -import { prompts } from "../common/utils/prompts"; - -const moduleName = "sfdx-hardis"; -const projectConfigFiles = ["package.json", `.${moduleName}.yaml`, `.${moduleName}.yml`, `config/.${moduleName}.yaml`, `config/.${moduleName}.yml`]; +import { SfError } from '@salesforce/core'; +import axios from 'axios'; +import c from 'chalk'; +import { cosmiconfig } from 'cosmiconfig'; +import fs from 'fs-extra'; +import * as yaml from 'js-yaml'; +import * as os from 'os'; +import * as path from 'path'; +import { getCurrentGitBranch, isCI, isGitRepo, uxLog } from '../common/utils/index.js'; +import { prompts } from '../common/utils/prompts.js'; + +const moduleName = 'sfdx-hardis'; +const projectConfigFiles = [ + 'package.json', + `.${moduleName}.yaml`, + `.${moduleName}.yml`, + `config/.${moduleName}.yaml`, + `config/.${moduleName}.yml`, +]; const username = os.userInfo().username; const userConfigFiles = [`config/user/.${moduleName}.${username}.yaml`, `config/user/.${moduleName}.${username}.yml`]; const REMOTE_CONFIGS: any = {}; +export const getApiVersion = () => { + // globalThis.currentOrgApiVersion is set during authentication check (so not set if --skipauth option is used) + return process.env.SFDX_API_VERSION || globalThis.currentOrgApiVersion || '63.0'; +} + +export const CONSTANTS = { + DOC_URL_ROOT: "https://sfdx-hardis.cloudity.com", + WEBSITE_URL: "https://cloudity.com", + CONTACT_URL: "https://cloudity.com/#form", + NOT_IMPACTING_METADATA_TYPES: process.env.NOT_IMPACTING_METADATA_TYPES?.split(",") ?? [ + "Audience", + "AuraDefinitionBundle", + "Bot", + "BotVersion", + "ContentAsset", + "CustomObjectTranslation", + "CustomSite", + "CustomTab", + "Dashboard", + "ExperienceBundle", + "Flexipage", + "GlobalValueSetTranslation", + "Layout", + "LightningComponentBundle", + "NavigationMenu", + "ReportType", + "Report", + "SiteDotCom", + "StandardValueSetTranslation", + "StaticResource", + "Translations" + ] +}; + async function getBranchConfigFiles() { if (!isGitRepo()) { return []; } const gitBranchFormatted = process.env.CONFIG_BRANCH || (await getCurrentGitBranch({ formatted: true })); - const branchConfigFiles = [`config/branches/.${moduleName}.${gitBranchFormatted}.yaml`, `config/branches/.${moduleName}.${gitBranchFormatted}.yml`]; + const branchConfigFiles = [ + `config/branches/.${moduleName}.${gitBranchFormatted}.yaml`, + `config/branches/.${moduleName}.${gitBranchFormatted}.yml`, + ]; return branchConfigFiles; } -export const getConfig = async (layer = "user"): Promise => { +export const getConfig = async (layer: "project" | "branch" | "user" = 'user'): Promise => { const defaultConfig = await loadFromConfigFile(projectConfigFiles); - if (layer === "project") { + if (layer === 'project') { return defaultConfig; } let branchConfig = await loadFromConfigFile(await getBranchConfigFiles()); branchConfig = Object.assign(defaultConfig, branchConfig); - if (layer === "branch") { + if (layer === 'branch') { return branchConfig; } let userConfig = await loadFromConfigFile(userConfigFiles); @@ -52,20 +95,22 @@ export const getConfig = async (layer = "user"): Promise => { }; // Set data in configuration file -export const setConfig = async (layer: string, propValues: any): Promise => { - if (layer === "user" && (fs.readdirSync(process.cwd()).length === 0 || !isGitRepo())) { - if (process?.argv?.includes("--debug")) { - uxLog(this, c.grey("Skip update user config file because current directory is not a salesforce project")); +export const setConfig = async (layer: string, propValues: any): Promise => { + if (layer === 'user' && (fs.readdirSync(process.cwd()).length === 0 || !isGitRepo())) { + if (process?.argv?.includes('--debug')) { + uxLog("log", this, c.grey('Skip update user config file because current directory is not a salesforce project')); } return; } const configSearchPlaces = - layer === "project" ? projectConfigFiles : layer === "user" ? userConfigFiles : layer === "branch" ? await getBranchConfigFiles() : []; - await setInConfigFile(configSearchPlaces, propValues); -}; - -export const CONSTANTS = { - API_VERSION: process.env.SFDX_API_VERSION || "61.0", + layer === 'project' + ? projectConfigFiles + : layer === 'user' + ? userConfigFiles + : layer === 'branch' + ? await getBranchConfigFiles() + : []; + return await setInConfigFile(configSearchPlaces, propValues); }; // Load configuration from file @@ -87,7 +132,9 @@ async function loadFromRemoteConfigFile(url) { } const remoteConfigResp = await axios.get(url); if (remoteConfigResp.status !== 200) { - throw new SfdxError("[sfdx-hardis] Unable to read remote configuration file at " + url + "\n" + JSON.stringify(remoteConfigResp)); + throw new SfError( + '[sfdx-hardis] Unable to read remote configuration file at ' + url + '\n' + JSON.stringify(remoteConfigResp) + ); } const remoteConfig = yaml.load(remoteConfigResp.data); REMOTE_CONFIGS[url] = remoteConfig; @@ -95,33 +142,38 @@ async function loadFromRemoteConfigFile(url) { } // Update configuration file -export async function setInConfigFile(searchPlaces: string[], propValues: any, configFile: string = null) { - let explorer = null; - if (configFile == null) { +export async function setInConfigFile(searchPlaces: string[], propValues: any, configFile: string = '') { + let explorer; + if (configFile === '') { explorer = cosmiconfig(moduleName, { searchPlaces }); const configExplorer = await explorer.search(); configFile = configExplorer != null ? configExplorer.filepath : searchPlaces.slice(-1)[0]; } - let doc = {}; + let doc: any = {}; if (fs.existsSync(configFile)) { - doc = yaml.load(fs.readFileSync(configFile, "utf-8")); + doc = yaml.load(fs.readFileSync(configFile, 'utf-8')); } doc = Object.assign(doc, propValues); await fs.ensureDir(path.dirname(configFile)); await fs.writeFile(configFile, yaml.dump(doc)); - if (explorer != null) { + if (explorer) { explorer.clearCaches(); } if (!isCI) { - uxLog(this, c.magentaBright(`Updated config file ${c.bold(configFile)} with values: \n${JSON.stringify(propValues, null, 2)}`)); + uxLog( + "other", + this, + c.magentaBright(`Updated config file ${c.bold(configFile)} with values: \n${JSON.stringify(propValues, null, 2)}`) + ); } + return configFile; } // Check configuration of project so it works with sfdx-hardis export const checkConfig = async (options: any) => { // Skip hooks from other commands than hardis:scratch commands - const commandId = options?.Command?.id || options?.id || ""; - if (!commandId.startsWith("hardis")) { + const commandId = options?.Command?.id || options?.id || ''; + if (!commandId.startsWith('hardis')) { return; } @@ -134,19 +186,13 @@ export const checkConfig = async (options: any) => { options?.flags?.devhub === true || options.devHub === true) ) { - const configProject = await getConfig("project"); + const configProject = await getConfig('project'); let projectName = process.env.PROJECT_NAME || configProject.projectName; devHubAliasOk = (process.env.DEVHUB_ALIAS || configProject.devHubAlias) != null; // If not found, prompt user project name and store it in user config file if (projectName == null) { - const promptResponse = await prompts({ - type: "text", - name: "value", - message: c.cyanBright("Please input your project name without spaces or special characters (ex: MonClient)"), - validate: (value: string) => !value.match(/^[0-9a-z]+$/), // check only alphanumeric - }); - projectName = promptResponse.value; - await setConfig("project", { + projectName = promptForProjectName(); + await setConfig('project', { projectName, devHubAlias: `DevHub_${projectName}`, }); @@ -156,10 +202,10 @@ export const checkConfig = async (options: any) => { // Set DevHub username if not set if (devHubAliasOk === false && options.Command && options.Command.supportsDevhubUsername === true) { - const configProject = await getConfig("project"); + const configProject = await getConfig('project'); const devHubAlias = process.env.DEVHUB_ALIAS || configProject.devHubAlias; if (devHubAlias == null) { - await setConfig("project", { + await setConfig('project', { devHubAlias: `DevHub_${configProject.projectName}`, }); } @@ -167,8 +213,8 @@ export const checkConfig = async (options: any) => { }; export async function getReportDirectory() { - const configProject = await getConfig("project"); - const defaultReportDir = path.join(process.cwd(), "hardis-report"); + const configProject = await getConfig('project'); + const defaultReportDir = path.join(process.cwd(), 'hardis-report'); const reportDir = configProject.reportDirectory || defaultReportDir; await fs.ensureDir(reportDir); return reportDir; @@ -182,3 +228,36 @@ export function getEnvVar(envVarName: string) { } return varValue; } + +export async function promptForProjectName() { + const projectRes = await prompts({ + type: 'text', + name: 'projectName', + message: 'What is the name of your project ?', + description: 'Used to generate environment variables and configuration files for your Salesforce project', + placeholder: 'Ex: MyClient', + }); + const userProjectName = projectRes.projectName + ''; + let projectName = projectRes.projectName.toLowerCase().replace(' ', '_'); + // Make sure that projectName is compliant with the format of an environment variable + projectName = projectName.replace(/[^a-zA-Z0-9_]/g, '_').replace(/^[^a-zA-Z_]+/, ''); + if (projectName !== userProjectName) { + uxLog( + "warning", + this, + c.yellow( + `Project name has been changed to ${projectName} because it must be compliant with the format of an environment variable.` + ) + ); + const promptResp = await prompts({ + type: 'confirm', + message: `Are you ok with updated project name "${projectName}" ?`, + description: 'Confirms the use of the sanitized project name which must be compliant with environment variable format', + }); + if (promptResp.value === true) { + return projectName; + } + return promptForProjectName(); + } + return projectName; +} diff --git a/src/hooks/auth/auth.ts b/src/hooks/auth/auth.ts new file mode 100644 index 000000000..39d22fa96 --- /dev/null +++ b/src/hooks/auth/auth.ts @@ -0,0 +1,49 @@ +import { + getCurrentGitBranch, + isCI, +} from '../../common/utils/index.js'; +import c from "chalk" +import { checkConfig, getConfig } from '../../config/index.js'; +import { Hook } from '@oclif/core'; +import { authOrg } from '../../common/utils/authUtils.js'; + +const hook: Hook<'auth'> = async (options: any) => { + const commandId = options?.Command?.id || ''; + console.log(c.grey("Entering login Auth hook...")); + let configInfo = await getConfig('user'); + + // Manage authentication if DevHub is required but current user is disconnected + if ((options as any)?.devHub === true) { + console.log(c.grey("We'll try to authenticate to the DevHub")); + let devHubAlias = configInfo.devHubAlias || process.env.DEVHUB_ALIAS; + if (devHubAlias == null) { + await checkConfig(options); + configInfo = await getConfig('user'); + devHubAlias = configInfo.devHubAlias || 'DevHub'; + } + await authOrg(devHubAlias, options); + } + // Manage authentication if org is required but current user is disconnected + if ( + (options as any)?.checkAuth === true && + !((options as any)?.devHub === true) + ) { + const orgAlias = (options as any)?.alias + ? (options as any).alias + : process.env.ORG_ALIAS + ? process.env.ORG_ALIAS + : isCI && configInfo.scratchOrgAlias + ? configInfo.scratchOrgAlias + : isCI && (options as any)?.scratch && configInfo.sfdxAuthUrl + ? configInfo.sfdxAuthUrl + : isCI + ? await getCurrentGitBranch({ formatted: true }) + : commandId === 'hardis:auth:login' && configInfo.orgAlias + ? configInfo.orgAlias + : configInfo.scratchOrgAlias || ''; // Can be '' and it's ok if we're not in scratch org context + console.log(c.grey(`We'll try to authenticate to the org related to ${orgAlias !== configInfo.sfdxAuthUrl ? (orgAlias || "DEFAULT ORG") : "sfdxAuthUrl"}`)); + await authOrg(orgAlias, options); + } +}; + +export default hook; diff --git a/src/hooks/finally/notify.ts b/src/hooks/finally/notify.ts new file mode 100644 index 000000000..c8e1757b0 --- /dev/null +++ b/src/hooks/finally/notify.ts @@ -0,0 +1,43 @@ +import { Hook } from '@oclif/core'; + +// The use of this method is deprecated: use NotifProvider.sendNotification :) + +const hook: Hook<"finally"> = async (options) => { + // Skip hooks from other commands than hardis commands + const commandId = options?.Command?.id || ''; + if (!commandId.startsWith('hardis')) { + return; + } + + // Dynamic import to save perfs when other CLI commands are called + const c = (await import('chalk')).default; + const { elapseEnd, uxLog } = await import('../../common/utils/index.js'); + + if (globalThis.hardisLogFileStream) { + globalThis.hardisLogFileStream.end(); + globalThis.hardisLogFileStream = null; + } + + const status = options?.error ? 'error' : 'success'; + const error = options?.error || null; + + // Close WebSocketClient if existing + if (globalThis.webSocketClient) { + try { + globalThis.webSocketClient.dispose(status, error); + } catch (e) { + if (options?.Command?.flags?.debug) { + uxLog("warning", this, c.yellow('Unable to close websocketClient.js') + '\n' + (e as Error).message); + } + } + globalThis.webSocketClient = null; + } + + const aiCounter = globalThis?.aiCallsNumber || 0; + if (aiCounter > 0) { + uxLog("log", this, c.grey(c.italic(`AI prompts API calls: ${aiCounter}`))); + } + elapseEnd(`${options?.Command?.id} execution time`); +}; + +export default hook; diff --git a/src/hooks/init/check-local-sfdx-hardis-files.ts b/src/hooks/init/check-local-sfdx-hardis-files.ts deleted file mode 100644 index 37fea8ec5..000000000 --- a/src/hooks/init/check-local-sfdx-hardis-files.ts +++ /dev/null @@ -1,170 +0,0 @@ -import * as c from "chalk"; -import * as fs from "fs-extra"; -import { isCI, isMonitoringJob, uxLog } from "../../common/utils"; -import { prompts } from "../../common/utils/prompts"; -import { getConfig, setConfig } from "../../config"; - -export const hook = async (options: any) => { - // Skip hooks from other commands than hardis:scratch commands - const commandId = options?.id || ""; - - // Disable this hook for now - if ((process.env?.AUTO_UPDATE || "false") !== "true") { - return; - } - - if (!process.argv.includes("--json")) { - await manageGitIgnoreForceIgnore(commandId); - } -}; - -async function manageGitIgnoreForceIgnore(commandId: string) { - if (!commandId.startsWith("hardis")) { - return; - } - const isMon = await isMonitoringJob(); - if (!commandId.includes("monitoring") && !isMon) { - if ( - commandId.startsWith("hardis:work:task:new") || - commandId.startsWith("hardis:doc") || - commandId.startsWith("hardis:scratch") || - commandId.startsWith("hardis:org") - ) { - return; - } - - if (!isCI && process.env.AUTO_UPDATE !== "true" && process.env.AUTO_UPDATE_CI_CONFIG !== "true") { - return; - } - } - const config = await getConfig("user"); - // Manage .gitignore - if (!config.skipUpdateGitIgnore === true) { - const gitIgnoreFile = "./.gitignore"; - if (fs.existsSync(gitIgnoreFile)) { - const gitIgnore = await fs.readFile(gitIgnoreFile, "utf-8"); - const gitIgnoreLines = gitIgnore - .replace("\r\n", "\n") - .split("\n") - .map((line) => line.trim()) - .filter((line) => line !== ""); - let updated = false; - for (const gitIgnoreMandatoryLine of await getHardisGitRepoIgnoreContent()) { - if (!gitIgnoreLines.includes(gitIgnoreMandatoryLine)) { - gitIgnoreLines.push(gitIgnoreMandatoryLine); - updated = true; - } - } - // Remove duplicates - const gitIgnoreLinesUnique = Array.from(new Set(gitIgnoreLines)); - // Propose user to apply updates - if ((updated || gitIgnoreLines.length !== gitIgnoreLinesUnique.length) && !isCI) { - const confirm = await prompts({ - type: "select", - name: "value", - initial: true, - message: c.cyanBright("Your .gitignore is deprecated, do you agree to upgrade it ? (If you hesitate, just trust us and accept)"), - choices: [ - { title: "Yes", value: "true" }, - { title: "No ", value: "false" }, - { title: "Never ask again ", value: "never" }, - ], - }); - if (confirm.value === "true" || isCI) { - await fs.writeFile(gitIgnoreFile, gitIgnoreLinesUnique.join("\n") + "\n", "utf-8"); - uxLog(this, c.cyan("[sfdx-hardis] Updated .gitignore")); - } - if (confirm.value === "never") { - await setConfig("project", { skipUpdateGitIgnore: true }); - } - } - } - } - - // Manage .forceignore - if (!config.skipUpdateForceIgnore === true) { - const forceIgnoreFile = "./.forceignore"; - if (fs.existsSync(forceIgnoreFile)) { - const forceIgnore = await fs.readFile(forceIgnoreFile, "utf-8"); - const forceIgnoreLines = forceIgnore - .replace("\r\n", "\n") - .split("\n") - .map((line) => line.trim()) - .filter((line) => line !== ""); - let updated = false; - for (const forceIgnoreMandatoryLine of await getHardisForceIgnoreContent()) { - if (!forceIgnoreLines.includes(forceIgnoreMandatoryLine)) { - forceIgnoreLines.push(forceIgnoreMandatoryLine); - updated = true; - } - } - // Remove duplicates - const forceIgnoreLinesUnique = Array.from(new Set(forceIgnoreLines)); - // Propose user to apply updates - /* jscpd:ignore-start */ - if ((updated || forceIgnoreLines.length !== forceIgnoreLinesUnique.length) && !isCI) { - const confirm = await prompts({ - type: "select", - name: "value", - initial: true, - message: c.cyanBright("Your .forceignore is deprecated, do you agree to upgrade it ?"), - choices: [ - { title: "Yes", value: "true" }, - { title: "No ", value: "false" }, - { title: "Never ask again ", value: "never" }, - ], - }); - /* jscpd:ignore-end */ - if (confirm.value === "true" || isCI) { - await fs.writeFile(forceIgnoreFile, forceIgnoreLinesUnique.join("\n") + "\n", "utf-8"); - uxLog(this, c.cyan("[sfdx-hardis] Updated .forceignore")); - } - if (confirm.value === "never") { - await setConfig("project", { skipUpdateForceIgnore: true }); - } - } - } - } -} - -async function getHardisGitRepoIgnoreContent() { - const gitIgnoreContent = [ - ".cache/", - "config/user/", - "hardis-report/", - "tmp/", - "**/__tests__/**", - // Metadatas to be ignored - "**/cleanDataServices/", - "**/siteDotComSites/*.site", - // SFDX Items to be ignored - "**/data/**/source/**", - "**/data/**/target/**", - "force-app/main/default/appMenus/AppSwitcher.appMenu-meta.xml", - ]; - return gitIgnoreContent; -} - -async function getHardisForceIgnoreContent() { - const forceIgnoreContent = [ - "**/appMenu/**", - "**/appSwitcher/**", - "**/appMenus/AppSwitcher.appMenu-meta.xml", - - "**/connectedApps/**", - "**/certs/**", - "**/profilePasswordPolicies/**", - - //"**/objectTranslations/**", - // "**/profiles/**", - // "**/settings/**", - - "**/jsconfig.json", - "**/.eslintrc.json", - - "**/__tests__/**", - "**/pubsub/**", - "**SfdxHardisDeferSharingRecalc**", - ]; - return forceIgnoreContent; -} diff --git a/src/hooks/init/check-upgrade.ts b/src/hooks/init/check-upgrade.ts index 4c6177532..dbfd619f1 100644 --- a/src/hooks/init/check-upgrade.ts +++ b/src/hooks/init/check-upgrade.ts @@ -1,36 +1,52 @@ -import * as c from "chalk"; -import * as readPkgUp from "read-pkg-up"; -import * as updateNotifier from "update-notifier"; -import * as semver from "semver"; +import { Hook } from '@oclif/core'; -export const hook = async (options: any) => { +const hook: Hook<'init'> = async (options) => { // Skip hooks from other commands than hardis commands - const commandId = options?.id || ""; - if (!commandId.startsWith("hardis")) { + const commandId = options?.id || ''; + if (!commandId.startsWith('hardis')) { return; } + // Dynamically import libraries to avoid loading them if not needed + const c = (await import('chalk')).default; + const { fileURLToPath } = await import('url'); + const path = await import('path'); + const semver = (await import('semver')).default; + const updateNotifier = (await import('update-notifier')).default; + const { readPackageUp } = await import('read-package-up'); + + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + // Check if an upgrade of sfdx-hardis is required // Use promise + then to not block plugin execution during that - const pkg = await readPkgUp({ cwd: __dirname }); + const pkg = await readPackageUp({ cwd: __dirname }); const notifier = updateNotifier({ - pkg: pkg.packageJson, - updateCheckInterval: 900, // check every 15 mn + pkg: pkg?.packageJson, + updateCheckInterval: 1000 * 60 * 60 * 6, // check every 6 hours }); if ( - notifier && - notifier.update && + notifier?.update && notifier.update.current !== notifier.update.latest && semver.compare(notifier.update.latest, notifier.update.current) === 1 ) { - console.warn(c.yellow("***********************************************************************************************************************")); console.warn( c.yellow( - `WARNING: You are using sfdx-hardis v${notifier.update.current}: Please upgrade to v${notifier.update.latest} by running ${c.green( - "sf plugins install sfdx-hardis", - )}`, - ), + '***********************************************************************************************************************' + ) + ); + console.warn( + c.yellow( + `WARNING: You are using sfdx-hardis v${notifier.update.current}: Please upgrade to v${notifier.update.latest + } by running ${c.green('sf plugins install sfdx-hardis')}` + ) + ); + console.warn( + c.yellow( + '***********************************************************************************************************************' + ) ); - console.warn(c.yellow("***********************************************************************************************************************")); } }; + +export default hook; diff --git a/src/hooks/init/log.ts b/src/hooks/init/log.ts index da6afbdec..72ae685a2 100644 --- a/src/hooks/init/log.ts +++ b/src/hooks/init/log.ts @@ -1,29 +1,38 @@ -import * as fs from "fs-extra"; -import * as path from "path"; -import * as os from "os"; -import { isCI } from "../../common/utils"; +import { Hook } from '@oclif/core'; -export const hook = async (options: any) => { - // Set argv as global as sf arch messes with it ! - globalThis.processArgv = [...options.argv]; +const hook: Hook<'init'> = async (options) => { // Skip hooks from other commands than hardis commands - const commandId = options?.id || "unknown"; - if (!commandId.startsWith("hardis")) { + const commandId = options?.id || 'unknown'; + if (!commandId.startsWith('hardis')) { return; } - if (process.env.SFDX_HARDIS_DEBUG_ENV === "true") { - console.log("ENV VARS:\n" + JSON.stringify(process.env, null, 2)); - process.env.SFDX_ENV = "development"; // So when there is an error, the stack is displayed + // Set argv as global as sf arch messes with it ! + globalThis.processArgv = [...options.argv]; + // Dynamically import libraries to improve perfs when other commands are called + const fs = (await import('fs-extra')).default; + const path = await import('path'); + const os = await import('os'); + const { isCI } = await import('../../common/utils/index.js'); + const dotenv = await import('dotenv'); + // Handle variables defined in .env file + dotenv.config(); + // Debug env variables + if (process.env.SFDX_HARDIS_DEBUG_ENV === 'true') { + console.log('ENV VARS:\n' + JSON.stringify(process.env, null, 2)); + process.env.SFDX_ENV = 'development'; // So when there is an error, the stack is displayed } if (!isCI) { // Initialize log file name (in the current directory if not empty) - const reportsDir = fs.readdirSync(process.cwd()).length === 0 ? path.join(os.tmpdir(), "hardis-report") : "./hardis-report"; + const reportsDir = + fs.readdirSync(process.cwd()).length === 0 ? path.join(os.tmpdir(), 'hardis-report') : './hardis-report'; await fs.ensureDir(reportsDir); - const commandsLogFolder = path.join(reportsDir, "commands"); + const commandsLogFolder = path.join(reportsDir, 'commands'); await fs.ensureDir(commandsLogFolder); - const logFileName = (new Date().toJSON().slice(0, 19) + "-" + commandId + ".log").replace(/:/g, "-"); + const logFileName = (new Date().toJSON().slice(0, 19) + '-' + commandId + '.log').replace(/:/g, '-'); const hardisLogFile = path.resolve(path.join(commandsLogFolder, logFileName)); - globalThis.hardisLogFileStream = fs.createWriteStream(hardisLogFile, { flags: "a" }); - globalThis.hardisLogFileStream.write(process.argv.join(" ")); + globalThis.hardisLogFileStream = fs.createWriteStream(hardisLogFile, { flags: 'a' }); + globalThis.hardisLogFileStream.write(commandId + ' ' + globalThis.processArgv.join(' ') + '\n'); } }; + +export default hook; diff --git a/src/hooks/init/start-ws-client.ts b/src/hooks/init/start-ws-client.ts index 5ca121af2..f8b1ef4e6 100644 --- a/src/hooks/init/start-ws-client.ts +++ b/src/hooks/init/start-ws-client.ts @@ -1,20 +1,26 @@ -import { isCI } from "../../common/utils"; -import { WebSocketClient } from "../../common/websocketClient"; +import { Hook } from '@oclif/core'; -export const hook = async (options: any) => { +const hook: Hook<'init'> = async (options) => { // Skip hooks from other commands than hardis commands - const commandId = options?.id || ""; - if (!commandId.startsWith("hardis")) { + const commandId = options?.id || ''; + if (!commandId.startsWith('hardis')) { return; } + // Dynamically import libraries to avoid loading it if not needed + const { isCI } = await import('../../common/utils/index.js'); + const { WebSocketClient } = await import('../../common/websocketClient.js'); + // Initialize WebSocketClient to communicate with VsCode SFDX Hardis extension if (!isCI) { const context: any = { command: commandId, id: process.pid }; - const websocketArgIndex = options?.argv?.indexOf("--websocket"); + const websocketArgIndex = options?.argv?.indexOf('--websocket'); if (websocketArgIndex || websocketArgIndex === 0) { context.websocketHostPort = options.argv[websocketArgIndex + 1]; } globalThis.webSocketClient = new WebSocketClient(context); + await WebSocketClient.isInitialized(); } }; + +export default hook; diff --git a/src/hooks/postrun/notify.ts b/src/hooks/postrun/notify.ts deleted file mode 100644 index 1bdc70480..000000000 --- a/src/hooks/postrun/notify.ts +++ /dev/null @@ -1,33 +0,0 @@ -import * as c from "chalk"; -import { elapseEnd, uxLog } from "../../common/utils"; - -// The use of this method is deprecated: use NotifProvider.sendNotification :) - -export const hook = async (options: any) => { - if (globalThis.hardisLogFileStream) { - globalThis.hardisLogFileStream.end(); - globalThis.hardisLogFileStream = null; - } - - // Skip hooks from other commands than hardis commands - const commandId = options?.Command?.id || ""; - if (!commandId.startsWith("hardis")) { - return; - } - elapseEnd(`${options?.Command?.id} execution time`); - if (commandId.startsWith("hardis:doc")) { - return; - } - - // Close WebSocketClient if existing - if (globalThis.webSocketClient) { - try { - globalThis.webSocketClient.dispose(); - } catch (e) { - if (options.debug) { - uxLog(this, c.yellow("Unable to close webSocketClient") + "\n" + e.message); - } - } - globalThis.webSocketClient = null; - } -}; diff --git a/src/hooks/postrun/store-cache.ts b/src/hooks/postrun/store-cache.ts index 85e01eada..f3a29466c 100644 --- a/src/hooks/postrun/store-cache.ts +++ b/src/hooks/postrun/store-cache.ts @@ -1,13 +1,18 @@ -import { copyLocalSfdxInfo } from "../../common/utils"; +import { Hook } from '@oclif/core'; -export const hook = async (options: any) => { +const hook: Hook<'postrun'> = async (options) => { // Skip hooks from other commands than hardis commands - const commandId = options?.Command?.id || ""; - if (!commandId.startsWith("hardis:scratch:create")) { + const commandId = options?.Command?.id || ''; + if (!commandId.startsWith('hardis:scratch:create')) { return; } + // Dynamic import to improve perfs + const { copyLocalSfdxInfo } = await import('../../common/utils/index.js'); + // Copy local SFDX cache for CI await copyLocalSfdxInfo(); return; }; + +export default hook; diff --git a/src/hooks/prerun/auth.ts b/src/hooks/prerun/auth.ts index fd2b94e02..fe9342c89 100644 --- a/src/hooks/prerun/auth.ts +++ b/src/hooks/prerun/auth.ts @@ -1,441 +1,87 @@ -import { SfdxError } from "@salesforce/core"; -import * as c from "chalk"; -import * as crossSpawn from "cross-spawn"; -import * as fs from "fs-extra"; -import * as path from "path"; -import { clearCache } from "../../common/cache"; -import { decryptFile } from "../../common/cryptoUtils"; -import { - createTempDir, - elapseStart, - execCommand, - execSfdxJson, - getCurrentGitBranch, - isCI, - promptInstanceUrl, - restoreLocalSfdxInfo, - uxLog, -} from "../../common/utils"; -import { WebSocketClient } from "../../common/websocketClient"; -import { checkConfig, getConfig } from "../../config"; -import { prompts } from "../../common/utils/prompts"; -export const hook = async (options: any) => { - // Skip hooks from other commands than hardis commands - const commandId = options?.Command?.id || ""; +import { Hook } from '@oclif/core'; - if (commandId.startsWith("hardis")) { - elapseStart(`${options?.Command?.id} execution time`); - } +const hook: Hook<'prerun'> = async (options) => { + // Skip hooks from other commands than hardis commands + const commandId = options?.Command?.id || ''; if ( - !commandId.startsWith("hardis") || + !commandId.startsWith('hardis') || [ - "hardis:doc:plugin:generate", - "hardis:source:push", - "hardis:source:pull", - "hardis:scratch:pool:view", - "hardis:source:deploy", - "hardis:source:push", - "hardis:mdapi:deploy", + 'hardis:doc:plugin:generate', + 'hardis:source:push', + 'hardis:source:pull', + 'hardis:scratch:pool:view', + 'hardis:source:deploy', + 'hardis:source:push', + 'hardis:mdapi:deploy', + 'hardis:project:deploy:simulate' ].includes(commandId) ) { return; } // skip if during mocha tests - if (typeof global.it === "function") { + if (typeof global.it === 'function') { return; } + + // Dynamic imports to improve performances when other CLI commands are called + const { authOrg } = await import('../../common/utils/authUtils.js'); + const c = (await import('chalk')).default; + const { checkConfig, getConfig } = await import('../../config/index.js'); + const { + elapseStart, + getCurrentGitBranch, + isCI, + restoreLocalSfdxInfo, + } = await import('../../common/utils/index.js'); + + if (commandId.startsWith('hardis')) { + elapseStart(`${options?.Command?.id} execution time`); + } + await restoreLocalSfdxInfo(); - let configInfo = await getConfig("user"); + let configInfo = await getConfig('user'); if (configInfo.skipAuthCheck === true) { - uxLog(this, c.yellow("No authentication check, you better know what you are doing ;)")); + console.log(c.yellow('No authentication check, you better know what you are doing ;)')); return; } // Manage authentication if DevHub is required but current user is disconnected - if ((options.Command && options.Command.requiresDevhubUsername === true) || options.devHub === true) { + if ( + (options.Command && (options?.Command?.flags as any)['target-dev-hub']?.required === true) || + (options as any)?.devHub === true + ) { let devHubAlias = configInfo.devHubAlias || process.env.DEVHUB_ALIAS; if (devHubAlias == null) { await checkConfig(options); - configInfo = await getConfig("user"); - devHubAlias = configInfo.devHubAlias || "DevHub"; + configInfo = await getConfig('user'); + devHubAlias = configInfo.devHubAlias || 'DevHub'; } await authOrg(devHubAlias, options); } // Manage authentication if org is required but current user is disconnected if ( - ((options?.Command?.requiresUsername === true && !options?.argv?.includes("--skipauth")) || options.checkAuth === true) && - !(options.devHub === true) + (((options?.Command?.flags as any)['target-org']?.required === true && !options?.argv?.includes('--skipauth')) || + (options as any)?.checkAuth === true) && + !((options as any)?.devHub === true) ) { - const orgAlias = options.alias - ? options.alias + const orgAlias = (options as any)?.alias + ? (options as any).alias : process.env.ORG_ALIAS ? process.env.ORG_ALIAS : isCI && configInfo.scratchOrgAlias ? configInfo.scratchOrgAlias - : isCI && options.scratch && configInfo.sfdxAuthUrl + : isCI && (options as any)?.scratch && configInfo.sfdxAuthUrl ? configInfo.sfdxAuthUrl : isCI ? await getCurrentGitBranch({ formatted: true }) - : commandId === "hardis:auth:login" && configInfo.orgAlias + : commandId === 'hardis:auth:login' && configInfo.orgAlias ? configInfo.orgAlias - : configInfo.scratchOrgAlias || "MY_ORG"; // Can be null and it's ok if we're not in scratch org context + : configInfo.scratchOrgAlias || ''; // Can be '' and it's ok if we're not in scratch org context await authOrg(orgAlias, options); } }; -// Authorize an org with sfdxAuthUrl, manually or with JWT -async function authOrg(orgAlias: string, options: any) { - const isDevHub = orgAlias.includes("DevHub"); - - let doConnect = true; - if (!options.checkAuth) { - // Check if we are already authenticated - let orgDisplayCommand = "sfdx force:org:display"; - let setDefaultUsername = false; - if (orgAlias !== "MY_ORG" && (isCI || isDevHub) && !orgAlias.includes("force://")) { - orgDisplayCommand += " --targetusername " + orgAlias; - setDefaultUsername = true; - } else { - if (process.argv.includes("-u") || process.argv.includes("--targetusername")) { - const posUsername = process.argv.indexOf("-u") > -1 ? process.argv.indexOf("-u") + 1 : process.argv.indexOf("--targetusername") + 1; - orgDisplayCommand += " --targetusername " + process.argv[posUsername]; - } - } - const orgInfoResult = await execSfdxJson(orgDisplayCommand, this, { - fail: false, - output: false, - debug: options.debug, - }); - if ( - orgInfoResult.result && - orgInfoResult.result.connectedStatus !== "RefreshTokenAuthError" && - ((orgInfoResult.result.connectedStatus && orgInfoResult.result.connectedStatus.includes("Connected")) || - (options.scratch && orgInfoResult.result.connectedStatus.includes("Unknown")) || - (orgInfoResult.result.alias === orgAlias && orgInfoResult.result.id != null) || - (orgInfoResult.result.username === orgAlias && orgInfoResult.result.id != null) || - (isDevHub && orgInfoResult.result.id != null)) - ) { - // Set as default username or devhubusername - uxLog( - this, - `[sfdx-hardis] You are already ${c.green("connected")} as ${c.green(orgInfoResult.result.username)} on org ${c.green( - orgInfoResult.result.instanceUrl, - )}`, - ); - if (orgInfoResult.result.expirationDate) { - uxLog(this, c.cyan(`[sfdx-hardis] Org expiration date: ${c.yellow(orgInfoResult.result.expirationDate)}`)); - } - if (!isCI) { - uxLog( - this, - c.yellow( - c.italic( - `[sfdx-hardis] If this is NOT the org you want to play with, ${c.whiteBright(c.bold("hit CTRL+C"))}, then input ${c.whiteBright( - c.bold("sfdx hardis:org:select"), - )}`, - ), - ), - ); - } - if (setDefaultUsername) { - const setDefaultUsernameCommand = `sfdx config:set ${isDevHub ? "defaultdevhubusername" : "defaultusername"}=${ - orgInfoResult.result.username - }`; - await execSfdxJson(setDefaultUsernameCommand, this, { fail: false }); - } - doConnect = false; - } - } - // Perform authentication - if (doConnect) { - let logged = false; - const config = await getConfig("user"); - - // Manage auth with sfdxAuthUrl (CI & scratch org only) - const authUrlVarName = `SFDX_AUTH_URL_${orgAlias}`; - const authUrlVarNameUpper = `SFDX_AUTH_URL_${orgAlias.toUpperCase()}`; - let authUrl = process.env[authUrlVarName] || process.env[authUrlVarNameUpper] || orgAlias || ""; - if (isDevHub) { - authUrl = process.env[authUrlVarName] || process.env[authUrlVarNameUpper] || process.env.SFDX_AUTH_URL_DEV_HUB || orgAlias || ""; - } - if (authUrl.includes("force://")) { - const authFile = path.join(await createTempDir(), "sfdxScratchAuth.txt"); - await fs.writeFile(authFile, authUrl, "utf8"); - const authCommand = - `sfdx auth:sfdxurl:store -f ${authFile}` + - (isDevHub ? ` --setdefaultdevhubusername` : ` --setdefaultusername`) + - (!orgAlias.includes("force://") ? ` --setalias ${orgAlias}` : ""); - await execCommand(authCommand, this, { fail: true, output: false }); - uxLog(this, c.cyan("Successfully logged using sfdxAuthUrl")); - await fs.remove(authFile); - return; - } - - // Get auth variables, with priority CLI arguments, environment variables, then .hardis-sfdx.yml config file - let username = - typeof options.Command.flags?.targetusername === "string" - ? options.Command.flags?.targetusername - : process.env.TARGET_USERNAME || isDevHub - ? config.devHubUsername - : config.targetUsername; - if (username == null && isCI) { - const gitBranchFormatted = await getCurrentGitBranch({ formatted: true }); - console.error( - c.yellow( - `[sfdx-hardis][WARNING] You may have to define ${c.bold( - isDevHub - ? "devHubUsername in .sfdx-hardis.yml" - : options.scratch - ? 'cache between your CI jobs: folder ".cache/sfdx-hardis/.sfdx"' - : `targetUsername in config/branches/.sfdx-hardis.${gitBranchFormatted}.yml`, - )} `, - ), - ); - process.exit(1); - } - let instanceUrl = - typeof options.Command?.flags?.instanceurl === "string" && (options.Command?.flags?.instanceurl || "").startsWith("https") - ? options.Command.flags.instanceurl - : (process.env.INSTANCE_URL || "").startsWith("https") - ? process.env.INSTANCE_URL - : config.instanceUrl - ? config.instanceUrl - : "https://login.salesforce.com"; - // Get JWT items clientId and certificate key - const sfdxClientId = await getSfdxClientId(orgAlias, config); - const crtKeyfile = await getCertificateKeyFile(orgAlias, config); - const usernameArg = options.setDefault === false ? "" : isDevHub ? "--setdefaultdevhubusername" : "--setdefaultusername"; - if (crtKeyfile && sfdxClientId && username) { - // Login with JWT - const loginCommand = - "sfdx auth:jwt:grant" + - ` ${usernameArg}` + - ` --clientid ${sfdxClientId}` + - ` --jwtkeyfile ${crtKeyfile}` + - ` --username ${username}` + - ` --instanceurl ${instanceUrl}` + - (orgAlias !== "MY_ORG" ? ` --setalias ${orgAlias}` : ""); - const jwtAuthRes = await execSfdxJson(loginCommand, this, { - fail: false, - }); - // await fs.remove(crtKeyfile); // Delete private key file from temp folder TODO: move to postrun hook - logged = jwtAuthRes.status === 0; - if (!logged) { - console.error(c.red(`[sfdx-hardis][ERROR] JWT login error: \n${JSON.stringify(jwtAuthRes)}`)); - process.exit(1); - } - } else if (!isCI) { - // Login with web auth - const orgLabel = `org ${orgAlias}`; - console.warn( - c.yellow(c.bold(`[sfdx-hardis] You must be connected to ${orgLabel} to perform this command. Please login in the open web browser`)), - ); - - if (isCI) { - console.error(c.red(`See CI authentication doc at https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/`)); - throw new SfdxError( - `In CI context, you may define: - - a .sfdx-hardis.yml file with instanceUrl and targetUsername properties (or INSTANCE_URL and TARGET_USERNAME repo variables) - - a repository secret variable SFDX_CLIENT_ID with consumer key of sfdx connected app - - store server.key file within ssh folder - `, - ); - } - const orgTypes = isDevHub ? ["login"] : ["login", "test"]; - instanceUrl = await promptInstanceUrl(orgTypes, orgAlias); - const configInfoUsr = await getConfig("user"); - // Prompt user for Web or Device login - const loginTypeRes = await prompts({ - name: "loginType", - type: "select", - message: "Select a login type (if you don't know, use Web)", - choices: [ - { - title: "🌐 Web Login (If VsCode is locally installed on your computer)", - value: "web", - }, - { - title: "📟 Device Login (Useful for CodeBuilder / CodeSpaces)", - value: "device", - description: "Look at the instructions in the console terminal if you select this option", - }, - ], - default: "web", - initial: "web", - }); - - let loginResult: any = null; - // Manage device login - if (loginTypeRes.loginType === "device") { - const loginCommandArgs = ["org:login:device", "--instanceurl", instanceUrl]; - if (orgAlias !== "MY_ORG" && orgAlias !== configInfoUsr?.scratchOrgAlias) { - loginCommandArgs.push(...["--alias", orgAlias]); - } - if (options.setDefault === true && isDevHub) { - loginCommandArgs.push("--setdefaultdevhubusername"); - } - if (options.setDefault === true && !isDevHub) { - loginCommandArgs.push("--set-default"); - } - const commandStr = "sfdx " + loginCommandArgs.join(" "); - uxLog(this, `[sfdx-hardis][command] ${c.bold(c.bgWhite(c.grey(commandStr)))}`); - loginResult = crossSpawn.sync("sfdx", loginCommandArgs, { stdio: "inherit" }); - } - // Web Login if device login not used - if (loginResult == null) { - const loginCommand = - "sfdx auth:web:login" + - (options.setDefault === false ? "" : isDevHub ? " --setdefaultdevhubusername" : " --setdefaultusername") + - ` --instanceurl ${instanceUrl}` + - (orgAlias !== "MY_ORG" && orgAlias !== configInfoUsr?.scratchOrgAlias ? ` --setalias ${orgAlias}` : ""); - try { - loginResult = await execCommand(loginCommand, this, { output: true, fail: true, spinner: false }); - } catch (e) { - // Give instructions if server is unavailable - if ((e?.message || "").includes("Cannot start the OAuth redirect server on port")) { - uxLog( - this, - c.yellow(c.bold("You might have a ghost sfdx command. Open Task Manager, search for Node.js processes, kill them, then try again")), - ); - } - throw e; - } - } - await clearCache("force:org:list"); - uxLog(this, c.grey(JSON.stringify(loginResult, null, 2))); - logged = loginResult.status === 0; - username = loginResult?.username || "err"; - instanceUrl = loginResult?.instanceUrl || instanceUrl; - } else { - console.error(c.red(`[sfdx-hardis] Unable to connect to org ${orgAlias} with browser. Please try again :)`)); - } - if (logged) { - // Retrieve default username or dev hub username if not returned by command - if (username === "err") { - const configGetRes = await execSfdxJson("sfdx config:get " + (isDevHub ? "defaultdevhubusername" : "defaultusername"), this, { - output: false, - fail: false, - }); - username = configGetRes?.result[0]?.value || ""; - } - uxLog(this, `Successfully logged to ${c.green(instanceUrl)} with ${c.green(username)}`); - WebSocketClient.sendMessage({ event: "refreshStatus" }); - // Assign org to SfdxCommands - if (isDevHub) { - options.Command.flags.targetdevhubusername = username; - // options.Command.assignHubOrg(); // seems to be automatically done by SfdxCommand under the hook - } else { - options.Command.flags.targetusername = username; - // options.Command.assignOrg(); // seems to be automatically done by SfdxCommand under the hook - } - // Display warning message in case of local usage (not CI), and not login command - // if (!(options?.Command?.id || "").startsWith("hardis:auth:login")) { - // console.warn(c.yellow("*** IF YOU SEE AN AUTH ERROR PLEASE RUN AGAIN THE SAME COMMAND :) ***")); - // } - } else { - console.error(c.red("[sfdx-hardis][ERROR] You must be logged to an org to perform this action")); - process.exit(1); // Exit because we should succeed to connect - } - } -} - -// Get clientId for SFDX connected app -async function getSfdxClientId(orgAlias: string, config: any) { - // Try to find in global variables - const sfdxClientIdVarName = `SFDX_CLIENT_ID_${orgAlias}`; - if (process.env[sfdxClientIdVarName]) { - return process.env[sfdxClientIdVarName]; - } - const sfdxClientIdVarNameUpper = sfdxClientIdVarName.toUpperCase(); - if (process.env[sfdxClientIdVarNameUpper]) { - return process.env[sfdxClientIdVarNameUpper]; - } - if (process.env.SFDX_CLIENT_ID) { - console.warn( - c.yellow( - `[sfdx-hardis] If you use CI on multiple branches & orgs, you should better define CI variable ${c.bold( - sfdxClientIdVarNameUpper, - )} than SFDX_CLIENT_ID`, - ), - ); - console.warn(c.yellow(`See CI authentication doc at https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/`)); - return process.env.SFDX_CLIENT_ID; - } - // Try to find in config files ONLY IN LOCAL MODE (in CI, it's supposed to be a CI variable) - if (!isCI && config.devHubSfdxClientId) { - return config.devHubSfdxClientId; - } - if (isCI) { - console.error( - c.red(`[sfdx-hardis] You must set env variable ${c.bold(sfdxClientIdVarNameUpper)} with the Consumer Key value defined on SFDX Connected app`), - ); - console.error(c.red(`See CI authentication doc at https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/`)); - } - return null; -} - -// Get clientId for SFDX connected app -async function getKey(orgAlias: string, config: any) { - // Try to find in global variables - const sfdxClientKeyVarName = `SFDX_CLIENT_KEY_${orgAlias}`; - if (process.env[sfdxClientKeyVarName]) { - return process.env[sfdxClientKeyVarName]; - } - const sfdxClientKeyVarNameUpper = sfdxClientKeyVarName.toUpperCase(); - if (process.env[sfdxClientKeyVarNameUpper]) { - return process.env[sfdxClientKeyVarNameUpper]; - } - if (process.env.SFDX_CLIENT_KEY) { - console.warn( - c.yellow( - `[sfdx-hardis] If you use CI on multiple branches & orgs, you should better define CI variable ${c.bold( - sfdxClientKeyVarNameUpper, - )} than SFDX_CLIENT_KEY`, - ), - ); - console.warn(c.yellow(`See CI authentication doc at https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/`)); - return process.env.SFDX_CLIENT_KEY; - } - // Try to find in config files ONLY IN LOCAL MODE (in CI, it's supposed to be a CI variable) - if (!isCI && config.devHubSfdxClientKey) { - return config.devHubSfdxClientKey; - } - if (isCI) { - console.error( - c.red(`[sfdx-hardis] You must set env variable ${c.bold(sfdxClientKeyVarNameUpper)} with the value of SSH private key encryption key`), - ); - console.error(c.red(`See CI authentication doc at https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/`)); - } - return null; -} - -// Try to find certificate key file for sfdx connected app in different locations -async function getCertificateKeyFile(orgAlias: string, config: any) { - const filesToTry = [ - `./config/branches/.jwt/${orgAlias}.key`, - `./config/.jwt/${orgAlias}.key`, - `./ssh/${orgAlias}.key`, - `./.ssh/${orgAlias}.key`, - "./ssh/server.key", - ]; - for (const file of filesToTry) { - if (fs.existsSync(file)) { - // Decrypt SSH private key and write a temporary file - const sshKey = await getKey(orgAlias, config); - if (sshKey == null) { - continue; - } - const tmpSshKeyFile = path.join(await createTempDir(), `${orgAlias}.key`); - await decryptFile(file, tmpSshKeyFile, sshKey); - return tmpSshKeyFile; - } - } - if (isCI) { - console.error(c.red(`[sfdx-hardis] You must put a certificate key to connect via JWT.Possible locations:\n -${filesToTry.join("\n -")}`)); - console.error(c.red(`See CI authentication doc at https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-auth/`)); - } - return null; -} +export default hook; diff --git a/src/hooks/prerun/check-dependencies.ts b/src/hooks/prerun/check-dependencies.ts index 13d40dce2..3d4def688 100644 --- a/src/hooks/prerun/check-dependencies.ts +++ b/src/hooks/prerun/check-dependencies.ts @@ -1,19 +1,25 @@ /* jscpd:ignore-start */ +import { Hook } from '@oclif/core'; -import * as os from "os"; -import { checkSfdxPlugin, git, uxLog, isCI, checkAppDependency, isGitRepo } from "../../common/utils"; -import { getConfig } from "../../config"; - -export const hook = async (options: any) => { +const hook: Hook<'prerun'> = async (options) => { // Skip hooks from other commands than hardis commands - const commandId = options?.Command?.id || ""; - if (!commandId.startsWith("hardis")) { + const commandId = options?.Command?.id || ''; + if (!commandId.startsWith('hardis')) { return; } - if (commandId.startsWith("hardis:doc") || commandId.startsWith("hardis:org:files") || commandId.startsWith("hardis:org:data")) { + if ( + commandId.startsWith('hardis:doc') || + commandId.startsWith('hardis:org:files') || + commandId.startsWith('hardis:org:data') + ) { return; } + // Dynamic imports to improve perfs + const os = await import('os'); + const { checkSfdxPlugin, git, uxLog, isCI, checkAppDependency, isGitRepo } = await import('../../common/utils/index.js'); + const { getConfig } = await import('../../config/index.js'); + /* jscpd:ignore-end */ // Check Git config and complete it if necessary (asynchronously so the script is not stopped) if (!isCI && isGitRepo()) { @@ -22,48 +28,206 @@ export const hook = async (options: any) => { .then(async (gitConfig) => { const allConfigs = gitConfig.all; // User - if (allConfigs["user.name"] == null) { + if (allConfigs['user.name'] == null) { const username = os.userInfo().username; - await git({ output: true }).addConfig("user.name", username); - uxLog(this, `Defined ${username} as git user.name`); + await git({ output: true }).addConfig('user.name', username); + uxLog("log", this, `Defined ${username} as git user.name`); } // Email - if (allConfigs["user.email"] == null) { - const config = await getConfig("user"); - const email = config.userEmail || "default@cloudity.com"; - await git({ output: true }).addConfig("user.email", email); - uxLog(this, `Defined ${email} as git user.email` + (email === "default@cloudity.com") ? " (temporary)" : ""); + if (allConfigs['user.email'] == null) { + const config = await getConfig('user'); + const email = config.userEmail || 'default@cloudity.com'; + await git({ output: true }).addConfig('user.email', email); + uxLog("log", this, `Defined ${email} as git user.email` + (email === 'default@cloudity.com') ? ' (temporary)' : ''); } // Manage special characters in git file / folder names - if (allConfigs["core.quotepath"] == null || allConfigs["core.quotepath"] == "true") { - await git({ output: true }).addConfig("core.quotepath", "false"); - uxLog(this, `Defined "false" as git core.quotepath`); + if (allConfigs['core.quotepath'] == null || allConfigs['core.quotepath'] == 'true') { + await git({ output: true }).addConfig('core.quotepath', 'false'); + uxLog("log", this, `Defined "false" as git core.quotepath`); } // Merge tool - if (allConfigs["merge.tool"] == null) { - await git({ output: true }).addConfig("merge.tool", "vscode"); - await git({ output: true }).addConfig("mergetool.vscode.cmd", "code --wait $MERGED"); - uxLog(this, "Defined vscode as git merge tool "); + if (allConfigs['merge.tool'] == null) { + await git({ output: true }).addConfig('merge.tool', 'vscode'); + await git({ output: true }).addConfig('mergetool.vscode.cmd', 'code --wait $MERGED'); + uxLog("log", this, 'Defined vscode as git merge tool '); } // Diff tool - if (allConfigs["diff.tool"] == null) { - await git({ output: true }).addConfig("diff.tool", "vscode"); - await git({ output: true }).addConfig("difftool.vscode.cmd", "code --wait --diff $LOCAL $REMOTE"); - uxLog(this, "Defined vscode as git diff tool "); + if (allConfigs['diff.tool'] == null) { + await git({ output: true }).addConfig('diff.tool', 'vscode'); + await git({ output: true }).addConfig('difftool.vscode.cmd', 'code --wait --diff $LOCAL $REMOTE'); + uxLog("log", this, 'Defined vscode as git diff tool '); } }); } // Check required sfdx-plugins to be installed - const requiresSfdxPlugins = options?.Command?.requiresSfdxPlugins || []; + const requiresSfdxPlugins = (options?.Command as any)?.requiresSfdxPlugins || []; for (const sfdxPluginName of requiresSfdxPlugins) { await checkSfdxPlugin(sfdxPluginName); } // Check required dependencies installed - const requiresDependencies = options?.Command?.requiresDependencies || []; - requiresDependencies.push("git"); + const requiresDependencies = (options?.Command as any).requiresDependencies || []; + requiresDependencies.push('git'); for (const appName of requiresDependencies) { await checkAppDependency(appName); } + + // Check if gitignore and forceignore are right + + if (!options.argv.includes('--json')) { + await manageGitIgnoreForceIgnore(commandId); + } }; + +async function manageGitIgnoreForceIgnore(commandId: string) { + // Dynamic imports to improve performances when other CLI commands are called + const { isCI, isMonitoringJob, uxLog } = await import('../../common/utils/index.js'); + // Run this command only during a monitoring job, or a Release Manager local operation + const isMon = await isMonitoringJob(); + if ( + !((isMon && commandId.includes('backup')) || commandId.startsWith("hardis:project:configure:auth") || commandId.startsWith("hardis:doc:mkdocs-to-salesforce") || commandId.startsWith("hardis:doc:project2markdown")) + ) { + return; + } + // Dynamic imports to improve performances when other CLI commands are called + const c = (await import('chalk')).default; + const fs = (await import('fs-extra')).default; + const { getConfig, setConfig } = await import('../../config/index.js'); + const { prompts } = await import('../../common/utils/prompts.js'); + + const config = await getConfig('user'); + // Manage .gitignore + if (!config.skipUpdateGitIgnore === true) { + const gitIgnoreFile = './.gitignore'; + if (fs.existsSync(gitIgnoreFile)) { + const gitIgnore = await fs.readFile(gitIgnoreFile, 'utf-8'); + const gitIgnoreLines = gitIgnore + .replace('\r\n', '\n') + .split('\n') + .map((line) => line.trim()) + .filter((line) => line !== ''); + let updated = false; + for (const gitIgnoreMandatoryLine of await getHardisGitRepoIgnoreContent()) { + if (!gitIgnoreLines.includes(gitIgnoreMandatoryLine)) { + gitIgnoreLines.push(gitIgnoreMandatoryLine); + updated = true; + } + } + // Remove duplicates + const gitIgnoreLinesUnique = Array.from(new Set(gitIgnoreLines)); + // Propose user to apply updates + if ((updated || gitIgnoreLines.length !== gitIgnoreLinesUnique.length) && !isCI) { + const confirm = await prompts({ + type: 'select', + name: 'value', + initial: true, + message: c.cyanBright('Your .gitignore is deprecated, do you agree to upgrade it ?'), + description: 'Updates your .gitignore file with latest sfdx-hardis best practices and removes duplicate entries', + choices: [ + { title: 'Yes', value: 'true' }, + { title: 'No ', value: 'false' }, + { title: 'Never ask again ', value: 'never' }, + ], + }); + if (confirm.value === 'true' || isCI) { + await fs.writeFile(gitIgnoreFile, gitIgnoreLinesUnique.join('\n') + '\n', 'utf-8'); + uxLog("action", this, c.cyan('[sfdx-hardis] Updated .gitignore')); + } + if (confirm.value === 'never') { + await setConfig('project', { skipUpdateGitIgnore: true }); + } + } + } + } + + // Manage .forceignore + if (!config.skipUpdateForceIgnore === true) { + const forceIgnoreFile = './.forceignore'; + if (fs.existsSync(forceIgnoreFile)) { + const forceIgnore = await fs.readFile(forceIgnoreFile, 'utf-8'); + const forceIgnoreLines = forceIgnore + .replace('\r\n', '\n') + .split('\n') + .map((line) => line.trim()) + .filter((line) => line !== ''); + let updated = false; + for (const forceIgnoreMandatoryLine of await getHardisForceIgnoreContent()) { + if (!forceIgnoreLines.includes(forceIgnoreMandatoryLine)) { + forceIgnoreLines.push(forceIgnoreMandatoryLine); + updated = true; + } + } + // Remove duplicates + const forceIgnoreLinesUnique = Array.from(new Set(forceIgnoreLines)); + // Propose user to apply updates + /* jscpd:ignore-start */ + if ((updated || forceIgnoreLines.length !== forceIgnoreLinesUnique.length) && !isCI) { + const confirm = await prompts({ + type: 'select', + name: 'value', + initial: true, + message: c.cyanBright('Your .forceignore is deprecated, do you agree to upgrade it ?'), + description: 'Updates your .forceignore file with latest sfdx-hardis best practices and removes duplicate entries', + choices: [ + { title: 'Yes', value: 'true' }, + { title: 'No ', value: 'false' }, + { title: 'Never ask again ', value: 'never' }, + ], + }); + /* jscpd:ignore-end */ + if (confirm.value === 'true' || isCI) { + await fs.writeFile(forceIgnoreFile, forceIgnoreLinesUnique.join('\n') + '\n', 'utf-8'); + uxLog("action", this, c.cyan('[sfdx-hardis] Updated .forceignore')); + } + if (confirm.value === 'never') { + await setConfig('project', { skipUpdateForceIgnore: true }); + } + } + } + } +} + +async function getHardisGitRepoIgnoreContent() { + const gitIgnoreContent = [ + '.cache/', + 'config/user/', + 'hardis-report/', + 'site/', + 'tmp/', + '**/__tests__/**', + // Metadatas to be ignored + '**/cleanDataServices/', + '**/siteDotComSites/*.site', + // SFDX Items to be ignored + '**/data/**/source/**', + '**/data/**/target/**', + 'force-app/main/default/appMenus/AppSwitcher.appMenu-meta.xml', + ]; + return gitIgnoreContent; +} + +async function getHardisForceIgnoreContent() { + const forceIgnoreContent = [ + '**/appMenu/**', + '**/appSwitcher/**', + '**/appMenus/AppSwitcher.appMenu-meta.xml', + + '**/connectedApps/**', + '**/certs/**', + '**/profilePasswordPolicies/**', + + //"**/objectTranslations/**", + // "**/profiles/**", + // "**/settings/**", + + '**/jsconfig.json', + '**/.eslintrc.json', + + '**/__tests__/**', + '**SfdxHardisDeferSharingRecalc**', + ]; + return forceIgnoreContent; +} + +export default hook; diff --git a/src/settings.ts b/src/settings.ts index cba61076f..4409d931f 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -1,3 +1,7 @@ -import * as path from "path"; +import * as path from 'path'; +import { fileURLToPath } from 'url'; -export const PACKAGE_ROOT_DIR = path.resolve(__dirname, ".."); +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +export const PACKAGE_ROOT_DIR = path.resolve(__dirname, '..'); diff --git a/test/.eslintrc.cjs b/test/.eslintrc.cjs new file mode 100644 index 000000000..e2cf76d5d --- /dev/null +++ b/test/.eslintrc.cjs @@ -0,0 +1,19 @@ +module.exports = { + extends: '../.eslintrc.json', + // Allow describe and it + env: { mocha: true }, + rules: { + // Allow assert style expressions. i.e. expect(true).to.be.true + 'no-unused-expressions': 'off', + + // It is common for tests to stub out method. + + // Return types are defined by the source code. Allows for quick overwrites. + '@typescript-eslint/explicit-function-return-type': 'off', + // Mocked out the methods that shouldn't do anything in the tests. + '@typescript-eslint/no-empty-function': 'off', + // Easily return a promise in a mocked method. + '@typescript-eslint/require-await': 'off', + header: 'off', + }, +}; diff --git a/test/.sfdx-hardis.yml b/test/.sfdx-hardis.yml index fe4014a79..c2c3004f3 100644 --- a/test/.sfdx-hardis.yml +++ b/test/.sfdx-hardis.yml @@ -7,13 +7,13 @@ customCommands: label: Generate manifest icon: file.svg tooltip: Generates a manifest package.xml using local sfdx source files - command: sfdx force:source:manifest:create --sourcepath force-app --manifestname myNewManifest + command: sf project manifest create --source-path force-app --name myNewManifest helpUrl: https://megalinter.io/ - id: list-all-orgs label: List all orgs icon: salesforce.svg tooltip: List all orgs that has already been authenticated using sfdx - command: sfdx force:org:list --all + command: sf org list --all - id: custom-menu-2 label: Another custom menu commands: diff --git a/test/commands/hello/world.nut.ts b/test/commands/hello/world.nut.ts new file mode 100644 index 000000000..e7f219988 --- /dev/null +++ b/test/commands/hello/world.nut.ts @@ -0,0 +1,27 @@ +import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; +import { expect } from 'chai'; +import { HelloWorldResult } from '../../../src/commands/hello/world.js'; + +let testSession: TestSession; + +describe('hello world NUTs', () => { + before('prepare session', async () => { + testSession = await TestSession.create(); + }); + + after(async () => { + await testSession?.clean(); + }); + + it('should say hello to the world', () => { + const result = execCmd('hello world --json', { ensureExitCode: 0 }).jsonOutput?.result; + expect(result?.name).to.equal('World'); + }); + + it('should say hello to a given person', () => { + const result = execCmd('hello world --name Astro --json', { + ensureExitCode: 0, + }).jsonOutput?.result; + expect(result?.name).to.equal('Astro'); + }); +}); diff --git a/test/commands/hello/world.test.ts b/test/commands/hello/world.test.ts new file mode 100644 index 000000000..f2d1ae529 --- /dev/null +++ b/test/commands/hello/world.test.ts @@ -0,0 +1,45 @@ +import { TestContext } from '@salesforce/core/testSetup'; +import { expect } from 'chai'; +import { stubSfCommandUx } from '@salesforce/sf-plugins-core'; +import World from '../../../src/commands/hello/world.js'; + +describe('hello world', () => { + const $$ = new TestContext(); + let sfCommandStubs: ReturnType; + + beforeEach(() => { + sfCommandStubs = stubSfCommandUx($$.SANDBOX); + }); + + afterEach(() => { + $$.restore(); + }); + + it('runs hello world', async () => { + await World.run([]); + const output = sfCommandStubs.log + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(output).to.include('Hello World'); + }); + + it('runs hello world with --json and no provided name', async () => { + const result = await World.run([]); + expect(result.name).to.equal('World'); + }); + + it('runs hello world --name Astro', async () => { + await World.run(['--name', 'Astro']); + const output = sfCommandStubs.log + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(output).to.include('Hello Astro'); + }); + + it('runs hello world --name Astro --json', async () => { + const result = await World.run(['--name', 'Astro', '--json']); + expect(result.name).to.equal('Astro'); + }); +}); diff --git a/test/common/utils/deltaUtils.test.ts b/test/common/utils/deltaUtils.test.ts new file mode 100644 index 000000000..5f2d36264 --- /dev/null +++ b/test/common/utils/deltaUtils.test.ts @@ -0,0 +1,333 @@ +import { expect } from 'chai'; +import fs from 'fs-extra'; +import path from 'path'; +import * as os from 'node:os' +import { mkdtemp } from 'node:fs/promises'; +import * as xml2js from 'xml2js'; + +import { extendPackageFileWithDependencies } from '../../../src/common/utils/deltaUtils.js'; + +describe('deployUtils.extendPackageFileWithDependencies', async () => { + const tmpDir = await mkdtemp(path.join(os.tmpdir(), 'extendPackageFileWithDependencies')); + const deltaXmlFile = path.join(tmpDir, 'deltaPackage.xml'); + const fullXmlFile = path.join(tmpDir, 'package.xml'); + + before(async () => { + await fs.ensureDir(tmpDir); + fs.writeFileSync( + fullXmlFile, + ` + + + Opportunity + SomeDataType__mdt + CustomObject + + + Opportunity-Some layout 1 + Opportunity-Some layout 2 + Layout + + + Opportunity-de + Opportunity-jp + Account-de + CustomObjectTranslation + + + SomeDataType.record_one + SomeDataType.record_two + SomeAnotherDataType.record_one + SomeAnotherDataType.record_two + CustomMetadata + + + SomeDataType__mdt.SomeField1__c + SomeDataType__mdt.SomeField2__c + CustomField + + + Opportunity.Type1 + Opportunity.Type2 + RecordType + + + LeadConvertSettings + LeadConvertSettings + + + de + fr + Translations + + 63.0 + `, + 'utf8' + ); + }); + + after(async () => { + await fs.remove(tmpDir); + }); + + const expectXmlEquals = async (expectedXmlString, deltaXmlFile) => { + const fileXml = await xml2js.parseStringPromise(fs.readFileSync(deltaXmlFile, 'utf8')); + const expectedXml = await xml2js.parseStringPromise(expectedXmlString); + expect(fileXml.Package.types).to.have.deep.members(expectedXml.Package.types); + } + + it('should add all custom metadata records if CustomField was changed', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + + SomeDataType__mdt.SomeField__c + CustomField + + 63.0 + `, + 'utf8' + ); + + const expectedXmlString = ` + + + SomeDataType.record_one + SomeDataType.record_two + CustomMetadata + + + SomeDataType__mdt.SomeField__c + CustomField + + 63.0 + `; + + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + await expectXmlEquals(expectedXmlString, deltaXmlFile); + }); + + it('should add object translation, record types, LeadConvertSettings if Opportunity.CustomField was changed', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + + Opportunity.SomeField__c + CustomField + + 63.0 + `, + 'utf8' + ); + + const expectedXmlString = ` + + + Opportunity.SomeField__c + CustomField + + + Opportunity-de + Opportunity-fr + CustomObjectTranslation + + + Opportunity.Type1 + Opportunity.Type2 + RecordType + + + LeadConvertSettings + LeadConvertSettings + + 63.0 + `; + + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + await expectXmlEquals(expectedXmlString, deltaXmlFile); + }); + + it('should add object translations to any layout', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + + Opportunity-Some layout 1 + Opportunity-Some layout 2 + Layout + + 63.0 + `, + 'utf8' + ); + + const expectedXmlString = ` + + + Opportunity-Some layout 1 + Opportunity-Some layout 2 + Layout + + + Opportunity-de + Opportunity-fr + CustomObjectTranslation + + 63.0 + `; + + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + await expectXmlEquals(expectedXmlString, deltaXmlFile); + }); + + it('should add object translations to validation rules', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + + Case.PreventInvalidQuotes + ValidationRule + + 63.0 + `, + 'utf8' + ); + + const expectedXmlString = ` + + + Case.PreventInvalidQuotes + ValidationRule + + + Case-de + Case-fr + CustomObjectTranslation + + 63.0 + `; + + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + await expectXmlEquals(expectedXmlString, deltaXmlFile); + }); + + it('should add object translations to any object', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + + WorkPlan + CustomObject + + 63.0 + `, + 'utf8' + ); + + const expectedXmlString = ` + + + WorkPlan + CustomObject + + + WorkPlan-de + WorkPlan-fr + CustomObjectTranslation + + 63.0 + `; + + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + await expectXmlEquals(expectedXmlString, deltaXmlFile); + }); + + it('should add all fields when custom metadata record changes', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + + SomeDataType.record_one + CustomMetadata + + 63.0 + `, + 'utf8' + ); + + const expectedXmlString = ` + + + SomeDataType.record_one + CustomMetadata + + + SomeDataType__mdt.SomeField1__c + SomeDataType__mdt.SomeField2__c + CustomField + + 63.0 + `; + + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + await expectXmlEquals(expectedXmlString, deltaXmlFile); + }); + + it('should add global translations to custom labels', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + + SomeLabel + CustomLabel + + + Global_quick_action + QuickAction + + 63.0 + `, + 'utf8' + ); + + const expectedXmlString = ` + + + SomeLabel + CustomLabel + + + Global_quick_action + QuickAction + + + de + fr + Translations + + 63.0 + `; + + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + await expectXmlEquals(expectedXmlString, deltaXmlFile); + }); + + it('should not fail when package is empty', async () => { + fs.writeFileSync( + deltaXmlFile, + ` + + `, + 'utf8' + ); + await extendPackageFileWithDependencies(deltaXmlFile, fullXmlFile); + }); +}); + diff --git a/test/common/utils/deployTips.test.ts b/test/common/utils/deployTips.test.ts new file mode 100644 index 000000000..395e29584 --- /dev/null +++ b/test/common/utils/deployTips.test.ts @@ -0,0 +1,118 @@ +import { expect } from 'chai'; +import { analyzeDeployErrorLogs } from '../../../src/common/utils/deployTips.js'; +// import { stubSfCommandUx } from '@salesforce/sf-plugins-core'; +import { TestContext } from '@salesforce/core/testSetup'; + +describe('Deployment Tips', () => { + + const $$ = new TestContext(); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + //let sfCommandStubs: ReturnType; + + beforeEach(() => { + // sfCommandStubs = stubSfCommandUx($$.SANDBOX); + }); + + afterEach(() => { + $$.restore(); + }); + + it('Finds a single issue in deployment output log', async () => { + const sampleOutput = ` +───── Deploying Metadata (dry-run) ───── +Stages: +1. Preparing +2. Waiting for the org to respond +3. Deploying Metadata +4. Running Tests +5. Updating Source Tracking +6. Done +▶ Preparing… + Deploying (dry-run) v59.0 metadata to mathieu.rodrigues@oxxo.com.integ using the v62.0 SOAP API. + Deploy ID: xxxx + Target Org: mathieu.rodrigues@oxxo.com.integ + Deploy URL: https://xxx-xxx-5344--integ.sandbox.my.salesforce.com/lightning/setup/DeployStatus/page?address=%2Fchangemgmt%2FmonitorDeploymentsDetails.apexp%3FasyncId%3D0AfKJ0000062A1M0AU%26retURL%3D%252Fchangemgmt%252FmonitorDeployment.apexp + Size: 55.67 KB of ~39 MB limit + Files: 34 of 10,000 limit +✔ Preparing (68ms) +◯ Waiting for the org to respond - Skipped +▶ Deploying Metadata… + Components: 2/43 (5%) + Components: 21/43 (49%) + Components: 41/43 (95%) +✘ Deploying Metadata (11.83s) + Components: 41/42 (98%) +Deploying (dry-run) v59.0 metadata to mathieu.rodrigues@oxxo.com.integ using the v62.0 SOAP API. +Status: Failed +Deploy ID: 0AfKJ0000062A1M0AU +Target Org: mathieu.rodrigues@xxx.com.integ +Deploy URL: https://xxx-xxx-5344--integ.sandbox.my.salesforce.com/lightning/setup/DeployStatus/page?address=%2Fchangemgmt%2FmonitorDeploymentsDetails.apexp%3FasyncId%3D0AfKJ0000062A1M0AU%26retURL%3D%252Fchangemgmt%252FmonitorDeployment.apexp +Size: 55.67 KB of ~39 MB limit +Files: 34 of 10,000 limit +Elapsed time: 11.90s +Component Failures [1] + Type Name Problem Line:Column +------------------------------------------------------------------------------------------ + Error Sales_Leader FormFactors must be Large for Salesforce Classic apps. +Test Results Summary +Passing: 0 +Failing: 0 +Total: 0 +Code Coverage formats, [json-summary], written to coverage/coverage/ +Dry-run complete. +Warning: GlobalValueSet, SousFamille__gvs, returned from org, but not found in the local project +Warning: GlobalValueSet, Fonction__gvs, returned from org, but not found in the local project + `; + const { errorsAndTips } = await analyzeDeployErrorLogs(sampleOutput, true, { check: true }); + expect(errorsAndTips).to.be.length.greaterThanOrEqual(1); + }); + + it('Add default issue in case of problem parsing error output', async () => { + const sampleOutput = `───── Deploying Metadata (dry-run) ───── +Stages: +1. Preparing +2. Waiting for the org to respond +3. Deploying Metadata +4. Running Tests +5. Updating Source Tracking +6. Done +▶ Preparing… + Deploying (dry-run) v59.0 metadata to mathieu.rodrigues@oxxo.com.integ using the v62.0 SOAP API. + Deploy ID: 0AfKJ0000063Eq60AE + Target Org: mathieu.rodrigues@xxx.com.integ + Deploy URL: https://xxx-xxx-5344--integ.sandbox.my.salesforce.com/lightning/setup/DeployStatus/page?address=%2Fchangemgmt%2FmonitorDeploymentsDetails.apexp%3FasyncId%3D0AfKJ0000063Eq60AE%26retURL%3D%252Fchangemgmt%252FmonitorDeployment.apexp + Size: 55.67 KB of ~39 MB limit + Files: 34 of 10,000 limit +✔ Preparing (65ms) +▶ Waiting for the org to respond… +✔ Waiting for the org to respond (3.03s) +▶ Deploying Metadata… + Components: 6/43 (14%) + Components: 21/43 (49%) + Components: 40/43 (93%) +✘ Deploying Metadata (11.29s) + Components: 40/42 (95%) +Deploying (dry-run) v59.0 metadata to mathieu.rodrigues@oxxo.com.integ using the v62.0 SOAP API. +Status: Failed +Deploy ID: 0AfKJ0000063Eq60AE +Target Org: mathieu.rodrigues@xxx.com.integ +Deploy URL: https://xxx-xxx-5344--integ.sandbox.my.salesforce.com/lightning/setup/DeployStatus/page?address=%2Fchangemgmt%2FmonitorDeploymentsDetails.apexp%3FasyncId%3D0AfKJ0000063Eq60AE%26retURL%3D%252Fchangemgmt%252FmonitorDeployment.apexp +Size: 55.67 KB of ~39 MB limit +Files: 34 of 10,000 limit +Elapsed time: 14.39s +Component Failures [2] + Type Name Problem Line:Column +------------------------------------------------------------------------------------------------------------------------------------------------------------ + MatchingRule Lead.Rule_correspondance_Rule_duplication_Pistes_Sample Before you change a matching rule, you must deactivate it. (3:20) 3:20 + CustomApplication Sales_Leader FormFactors must be Large for Salesforce Classic apps. +Test Results Summary +Passing: 0 +Failing: 0 +Total: 0 +Code Coverage formats, [json-summary], written to coverage/coverage/ +Dry-run complete.`; + const { errorsAndTips } = await analyzeDeployErrorLogs(sampleOutput, true, { check: true }); + expect(errorsAndTips).to.be.length.greaterThanOrEqual(1); + }); + +}); \ No newline at end of file diff --git a/test/tsconfig.json b/test/tsconfig.json index 9df43c15e..2cc75cfe8 100644 --- a/test/tsconfig.json +++ b/test/tsconfig.json @@ -1,3 +1,12 @@ { - "extends": "../tsconfig" -} + "extends": "@salesforce/dev-config/tsconfig-test-strict-esm", + "include": [ + "./**/*.ts" + ], + "compilerOptions": { + "noImplicitAny": false, + "noImplicitThis": false, + "strictPropertyInitialization": false, + "skipLibCheck": true + } +} \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json index d6c5062a6..4d65e3fd1 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,18 +1,14 @@ { + "extends": "@salesforce/dev-config/tsconfig-strict-esm.js", "compilerOptions": { - "moduleResolution": "node", - "module": "commonjs", - "target": "es2019", - "lib": ["es2019"], - "alwaysStrict": true, - "noUnusedLocals": true, - "sourceMap": true, - "declaration": true, - "outDir": "./lib", - "importHelpers": true, - "resolveJsonModule": true, - "types": ["node", "mocha"], + "outDir": "lib", + "rootDir": "src", + "noImplicitAny": false, + "noImplicitThis": false, + "strictPropertyInitialization": false, "skipLibCheck": true }, - "include": ["./src/**/*"] -} + "include": [ + "./src/**/*.ts" + ] +} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 85cac707b..865c57094 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,371 +2,1081 @@ # yarn lockfile v1 -"@actions/github@^5.1.1": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@actions/github/-/github-5.1.1.tgz#40b9b9e1323a5efcf4ff7dadd33d8ea51651bbcb" - integrity sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g== +"@actions/github@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/@actions/github/-/github-6.0.1.tgz#76e5f96df062c90635a7181ef45ff1c4ac21306e" + integrity sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw== + dependencies: + "@actions/http-client" "^2.2.0" + "@octokit/core" "^5.0.1" + "@octokit/plugin-paginate-rest" "^9.2.2" + "@octokit/plugin-rest-endpoint-methods" "^10.4.0" + "@octokit/request" "^8.4.1" + "@octokit/request-error" "^5.1.1" + undici "^5.28.5" + +"@actions/http-client@^2.2.0": + version "2.2.3" + resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.2.3.tgz#31fc0b25c0e665754ed39a9f19a8611fc6dab674" + integrity sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA== dependencies: - "@actions/http-client" "^2.0.1" - "@octokit/core" "^3.6.0" - "@octokit/plugin-paginate-rest" "^2.17.0" - "@octokit/plugin-rest-endpoint-methods" "^5.13.0" + tunnel "^0.0.6" + undici "^5.25.4" -"@actions/http-client@^2.0.1": - version "2.1.1" - resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.1.1.tgz#a8e97699c315bed0ecaeaaeb640948470d4586a0" - integrity sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw== +"@ampproject/remapping@^2.2.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" + integrity sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw== dependencies: - tunnel "^0.0.6" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.24" -"@adobe/node-fetch-retry@^1.1.2": - version "1.1.2" - resolved "https://registry.npmjs.org/@adobe/node-fetch-retry/-/node-fetch-retry-1.1.2.tgz" - integrity sha512-Fg57i/0otfuuL5wciBKMEd1NR6tz0sGSOx6nsF5+6QX6zI5kCOqnadj5i/28xWC4Y04Iby/yQ+AAFqtaGyBGwg== +"@anthropic-ai/sdk@^0.56.0": + version "0.56.0" + resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.56.0.tgz#8b6366d5d22235c3ec978c05b2c9420fdf426ed9" + integrity sha512-SLCB8M8+VMg1cpCucnA1XWHGWqVSZtIWzmOdDOEu3eTFZMB+A0sGZ1ESO5MHDnqrNTXz3safMrWx9x4rMZSOqA== + +"@asamuzakjp/css-color@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@asamuzakjp/css-color/-/css-color-3.2.0.tgz#cc42f5b85c593f79f1fa4f25d2b9b321e61d1794" + integrity sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw== dependencies: - abort-controller "^3.0.0" - node-fetch "^2.6.1" + "@csstools/css-calc" "^2.1.3" + "@csstools/css-color-parser" "^3.0.9" + "@csstools/css-parser-algorithms" "^3.0.4" + "@csstools/css-tokenizer" "^3.0.3" + lru-cache "^10.4.3" -"@amplitude/identify@^1.5.0": - version "1.5.0" - resolved "https://registry.npmjs.org/@amplitude/identify/-/identify-1.5.0.tgz" - integrity sha512-GCDxvwZvfFInZQ6m5LYnb9sVmJP+n+Re6vkBdtclp9uTNVPpdsXtdw2ann5Ts5pV10fivDpgdZ3OHehSxe742A== - dependencies: - "@amplitude/types" "^1.5.0" - "@amplitude/utils" "^1.5.0" - tslib "^1.9.3" - -"@amplitude/node@^1.3.2": - version "1.5.0" - resolved "https://registry.npmjs.org/@amplitude/node/-/node-1.5.0.tgz" - integrity sha512-/jfmYHanhln/OAuL8QG76EjThnaRDnfxLxO4oYstMFZKRktnUahE5i0sH02+tJ7I2nzq+4KTYY2hc8Rvi+Vt4Q== - dependencies: - "@amplitude/identify" "^1.5.0" - "@amplitude/types" "^1.5.0" - "@amplitude/utils" "^1.5.0" - tslib "^1.9.3" - -"@amplitude/types@^1.5.0": - version "1.5.0" - resolved "https://registry.npmjs.org/@amplitude/types/-/types-1.5.0.tgz" - integrity sha512-XspuOsUzUcxwAptHeGiIn4giuLWs285xTJa7h8kAEEynxtEI3/krWCoDYZSB9PekaPXB6phxiO/tMd9t5V9LgQ== - -"@amplitude/utils@^1.5.0": - version "1.5.0" - resolved "https://registry.npmjs.org/@amplitude/utils/-/utils-1.5.0.tgz" - integrity sha512-1DrDJkb4dVX+FiBXhGpO2Dn2cRKdP+gtrVR8vZcE8wz/V2XxUI3DDx7uQbIS6WbQf6swv6Uo2eMHYtrwebostw== - dependencies: - "@amplitude/types" "^1.5.0" - tslib "^1.9.3" - -"@babel/code-frame@7.12.11": - version "7.12.11" - resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz" - integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== - dependencies: - "@babel/highlight" "^7.10.4" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13": - version "7.12.13" - resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz" - integrity sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g== - dependencies: - "@babel/highlight" "^7.12.13" - -"@babel/code-frame@^7.22.13": - version "7.22.13" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" - integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== - dependencies: - "@babel/highlight" "^7.22.13" - chalk "^2.4.2" - -"@babel/compat-data@^7.13.8": - version "7.13.8" - resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.13.8.tgz" - integrity sha512-EaI33z19T4qN3xLXsGf48M2cDqa6ei9tPZlfLdb2HC+e/cFtREiRd8hdSqDbwdLB0/+gLwqJmCYASH0z2bUdog== - -"@babel/core@^7.7.5": - version "7.13.10" - resolved "https://registry.npmjs.org/@babel/core/-/core-7.13.10.tgz" - integrity sha512-bfIYcT0BdKeAZrovpMqX2Mx5NrgAckGbwT982AkdS5GNfn3KMGiprlBAtmBcFZRUmpaufS6WZFP8trvx8ptFDw== - dependencies: - "@babel/code-frame" "^7.12.13" - "@babel/generator" "^7.13.9" - "@babel/helper-compilation-targets" "^7.13.10" - "@babel/helper-module-transforms" "^7.13.0" - "@babel/helpers" "^7.13.10" - "@babel/parser" "^7.13.10" - "@babel/template" "^7.12.13" - "@babel/traverse" "^7.13.0" - "@babel/types" "^7.13.0" - convert-source-map "^1.7.0" +"@aws-crypto/crc32@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-5.2.0.tgz#cfcc22570949c98c6689cfcbd2d693d36cdae2e1" + integrity sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg== + dependencies: + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + tslib "^2.6.2" + +"@aws-crypto/crc32c@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32c/-/crc32c-5.2.0.tgz#4e34aab7f419307821509a98b9b08e84e0c1917e" + integrity sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag== + dependencies: + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + tslib "^2.6.2" + +"@aws-crypto/sha1-browser@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha1-browser/-/sha1-browser-5.2.0.tgz#b0ee2d2821d3861f017e965ef3b4cb38e3b6a0f4" + integrity sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg== + dependencies: + "@aws-crypto/supports-web-crypto" "^5.2.0" + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-locate-window" "^3.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" + +"@aws-crypto/sha256-browser@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz#153895ef1dba6f9fce38af550e0ef58988eb649e" + integrity sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw== + dependencies: + "@aws-crypto/sha256-js" "^5.2.0" + "@aws-crypto/supports-web-crypto" "^5.2.0" + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-locate-window" "^3.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" + +"@aws-crypto/sha256-js@5.2.0", "@aws-crypto/sha256-js@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz#c4fdb773fdbed9a664fc1a95724e206cf3860042" + integrity sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA== + dependencies: + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + tslib "^2.6.2" + +"@aws-crypto/supports-web-crypto@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz#a1e399af29269be08e695109aa15da0a07b5b5fb" + integrity sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg== + dependencies: + tslib "^2.6.2" + +"@aws-crypto/util@5.2.0", "@aws-crypto/util@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-5.2.0.tgz#71284c9cffe7927ddadac793c14f14886d3876da" + integrity sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ== + dependencies: + "@aws-sdk/types" "^3.222.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" + +"@aws-sdk/client-cloudfront@^3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-cloudfront/-/client-cloudfront-3.864.0.tgz#00ae05fc18282c8036b76db19db74266f2b9ecd3" + integrity sha512-DpUrEonVs7I7J+tXjvllVNCg7e9mnQwg8eMWqc7S7/c82SUC+U2Xqe6M5Ey5pWkCghy0GDgnn+iUA43W7H0WKg== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.864.0" + "@aws-sdk/credential-provider-node" "3.864.0" + "@aws-sdk/middleware-host-header" "3.862.0" + "@aws-sdk/middleware-logger" "3.862.0" + "@aws-sdk/middleware-recursion-detection" "3.862.0" + "@aws-sdk/middleware-user-agent" "3.864.0" + "@aws-sdk/region-config-resolver" "3.862.0" + "@aws-sdk/types" "3.862.0" + "@aws-sdk/util-endpoints" "3.862.0" + "@aws-sdk/util-user-agent-browser" "3.862.0" + "@aws-sdk/util-user-agent-node" "3.864.0" + "@aws-sdk/xml-builder" "3.862.0" + "@smithy/config-resolver" "^4.1.5" + "@smithy/core" "^3.8.0" + "@smithy/fetch-http-handler" "^5.1.1" + "@smithy/hash-node" "^4.0.5" + "@smithy/invalid-dependency" "^4.0.5" + "@smithy/middleware-content-length" "^4.0.5" + "@smithy/middleware-endpoint" "^4.1.18" + "@smithy/middleware-retry" "^4.1.19" + "@smithy/middleware-serde" "^4.0.9" + "@smithy/middleware-stack" "^4.0.5" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/node-http-handler" "^4.1.1" + "@smithy/protocol-http" "^5.1.3" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/url-parser" "^4.0.5" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-body-length-node" "^4.0.0" + "@smithy/util-defaults-mode-browser" "^4.0.26" + "@smithy/util-defaults-mode-node" "^4.0.26" + "@smithy/util-endpoints" "^3.0.7" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-retry" "^4.0.7" + "@smithy/util-stream" "^4.2.4" + "@smithy/util-utf8" "^4.0.0" + "@smithy/util-waiter" "^4.0.7" + tslib "^2.6.2" + +"@aws-sdk/client-s3@^3.850.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.864.0.tgz#ffbcbf0ba861fad711261b4174da3be19b1c7d5f" + integrity sha512-QGYi9bWliewxumsvbJLLyx9WC0a4DP4F+utygBcq0zwPxaM0xDfBspQvP1dsepi7mW5aAjZmJ2+Xb7X0EhzJ/g== + dependencies: + "@aws-crypto/sha1-browser" "5.2.0" + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.864.0" + "@aws-sdk/credential-provider-node" "3.864.0" + "@aws-sdk/middleware-bucket-endpoint" "3.862.0" + "@aws-sdk/middleware-expect-continue" "3.862.0" + "@aws-sdk/middleware-flexible-checksums" "3.864.0" + "@aws-sdk/middleware-host-header" "3.862.0" + "@aws-sdk/middleware-location-constraint" "3.862.0" + "@aws-sdk/middleware-logger" "3.862.0" + "@aws-sdk/middleware-recursion-detection" "3.862.0" + "@aws-sdk/middleware-sdk-s3" "3.864.0" + "@aws-sdk/middleware-ssec" "3.862.0" + "@aws-sdk/middleware-user-agent" "3.864.0" + "@aws-sdk/region-config-resolver" "3.862.0" + "@aws-sdk/signature-v4-multi-region" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@aws-sdk/util-endpoints" "3.862.0" + "@aws-sdk/util-user-agent-browser" "3.862.0" + "@aws-sdk/util-user-agent-node" "3.864.0" + "@aws-sdk/xml-builder" "3.862.0" + "@smithy/config-resolver" "^4.1.5" + "@smithy/core" "^3.8.0" + "@smithy/eventstream-serde-browser" "^4.0.5" + "@smithy/eventstream-serde-config-resolver" "^4.1.3" + "@smithy/eventstream-serde-node" "^4.0.5" + "@smithy/fetch-http-handler" "^5.1.1" + "@smithy/hash-blob-browser" "^4.0.5" + "@smithy/hash-node" "^4.0.5" + "@smithy/hash-stream-node" "^4.0.5" + "@smithy/invalid-dependency" "^4.0.5" + "@smithy/md5-js" "^4.0.5" + "@smithy/middleware-content-length" "^4.0.5" + "@smithy/middleware-endpoint" "^4.1.18" + "@smithy/middleware-retry" "^4.1.19" + "@smithy/middleware-serde" "^4.0.9" + "@smithy/middleware-stack" "^4.0.5" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/node-http-handler" "^4.1.1" + "@smithy/protocol-http" "^5.1.3" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/url-parser" "^4.0.5" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-body-length-node" "^4.0.0" + "@smithy/util-defaults-mode-browser" "^4.0.26" + "@smithy/util-defaults-mode-node" "^4.0.26" + "@smithy/util-endpoints" "^3.0.7" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-retry" "^4.0.7" + "@smithy/util-stream" "^4.2.4" + "@smithy/util-utf8" "^4.0.0" + "@smithy/util-waiter" "^4.0.7" + "@types/uuid" "^9.0.1" + tslib "^2.6.2" + uuid "^9.0.1" + +"@aws-sdk/client-sso@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.864.0.tgz#4099313516d61ed61791551c6f0683259b9cbf5e" + integrity sha512-THiOp0OpQROEKZ6IdDCDNNh3qnNn/kFFaTSOiugDpgcE5QdsOxh1/RXq7LmHpTJum3cmnFf8jG59PHcz9Tjnlw== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.864.0" + "@aws-sdk/middleware-host-header" "3.862.0" + "@aws-sdk/middleware-logger" "3.862.0" + "@aws-sdk/middleware-recursion-detection" "3.862.0" + "@aws-sdk/middleware-user-agent" "3.864.0" + "@aws-sdk/region-config-resolver" "3.862.0" + "@aws-sdk/types" "3.862.0" + "@aws-sdk/util-endpoints" "3.862.0" + "@aws-sdk/util-user-agent-browser" "3.862.0" + "@aws-sdk/util-user-agent-node" "3.864.0" + "@smithy/config-resolver" "^4.1.5" + "@smithy/core" "^3.8.0" + "@smithy/fetch-http-handler" "^5.1.1" + "@smithy/hash-node" "^4.0.5" + "@smithy/invalid-dependency" "^4.0.5" + "@smithy/middleware-content-length" "^4.0.5" + "@smithy/middleware-endpoint" "^4.1.18" + "@smithy/middleware-retry" "^4.1.19" + "@smithy/middleware-serde" "^4.0.9" + "@smithy/middleware-stack" "^4.0.5" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/node-http-handler" "^4.1.1" + "@smithy/protocol-http" "^5.1.3" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/url-parser" "^4.0.5" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-body-length-node" "^4.0.0" + "@smithy/util-defaults-mode-browser" "^4.0.26" + "@smithy/util-defaults-mode-node" "^4.0.26" + "@smithy/util-endpoints" "^3.0.7" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-retry" "^4.0.7" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/core@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/core/-/core-3.864.0.tgz#5ea4e400bb479faf4e0aa71a32ec89e8a3f2ceaf" + integrity sha512-LFUREbobleHEln+Zf7IG83lAZwvHZG0stI7UU0CtwyuhQy5Yx0rKksHNOCmlM7MpTEbSCfntEhYi3jUaY5e5lg== + dependencies: + "@aws-sdk/types" "3.862.0" + "@aws-sdk/xml-builder" "3.862.0" + "@smithy/core" "^3.8.0" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/property-provider" "^4.0.5" + "@smithy/protocol-http" "^5.1.3" + "@smithy/signature-v4" "^5.1.3" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-utf8" "^4.0.0" + fast-xml-parser "5.2.5" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-env@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.864.0.tgz#32e048eafaad51e3c67ef34d1310cc19f2f67c38" + integrity sha512-StJPOI2Rt8UE6lYjXUpg6tqSZaM72xg46ljPg8kIevtBAAfdtq9K20qT/kSliWGIBocMFAv0g2mC0hAa+ECyvg== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/property-provider" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-http@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-http/-/credential-provider-http-3.864.0.tgz#e312b137c1fdce87adb5140b039516c077726f5c" + integrity sha512-E/RFVxGTuGnuD+9pFPH2j4l6HvrXzPhmpL8H8nOoJUosjx7d4v93GJMbbl1v/fkDLqW9qN4Jx2cI6PAjohA6OA== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/fetch-http-handler" "^5.1.1" + "@smithy/node-http-handler" "^4.1.1" + "@smithy/property-provider" "^4.0.5" + "@smithy/protocol-http" "^5.1.3" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/util-stream" "^4.2.4" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-ini@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.864.0.tgz#3149745e91d030f191ad618e7ee15c92101ad24e" + integrity sha512-PlxrijguR1gxyPd5EYam6OfWLarj2MJGf07DvCx9MAuQkw77HBnsu6+XbV8fQriFuoJVTBLn9ROhMr/ROAYfUg== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/credential-provider-env" "3.864.0" + "@aws-sdk/credential-provider-http" "3.864.0" + "@aws-sdk/credential-provider-process" "3.864.0" + "@aws-sdk/credential-provider-sso" "3.864.0" + "@aws-sdk/credential-provider-web-identity" "3.864.0" + "@aws-sdk/nested-clients" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/credential-provider-imds" "^4.0.7" + "@smithy/property-provider" "^4.0.5" + "@smithy/shared-ini-file-loader" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-node@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.864.0.tgz#d01277b53ac179d2ea97ba16147ba0cb3f710aae" + integrity sha512-2BEymFeXURS+4jE9tP3vahPwbYRl0/1MVaFZcijj6pq+nf5EPGvkFillbdBRdc98ZI2NedZgSKu3gfZXgYdUhQ== + dependencies: + "@aws-sdk/credential-provider-env" "3.864.0" + "@aws-sdk/credential-provider-http" "3.864.0" + "@aws-sdk/credential-provider-ini" "3.864.0" + "@aws-sdk/credential-provider-process" "3.864.0" + "@aws-sdk/credential-provider-sso" "3.864.0" + "@aws-sdk/credential-provider-web-identity" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/credential-provider-imds" "^4.0.7" + "@smithy/property-provider" "^4.0.5" + "@smithy/shared-ini-file-loader" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-process@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.864.0.tgz#5f39e34a084cfa07966874955fa3aa0f966bcf15" + integrity sha512-Zxnn1hxhq7EOqXhVYgkF4rI9MnaO3+6bSg/tErnBQ3F8kDpA7CFU24G1YxwaJXp2X4aX3LwthefmSJHwcVP/2g== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/property-provider" "^4.0.5" + "@smithy/shared-ini-file-loader" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-sso@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.864.0.tgz#1556640016f9bd3dd1c2e140270098a75c922ca3" + integrity sha512-UPyPNQbxDwHVGmgWdGg9/9yvzuedRQVF5jtMkmP565YX9pKZ8wYAcXhcYdNPWFvH0GYdB0crKOmvib+bmCuwkw== + dependencies: + "@aws-sdk/client-sso" "3.864.0" + "@aws-sdk/core" "3.864.0" + "@aws-sdk/token-providers" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/property-provider" "^4.0.5" + "@smithy/shared-ini-file-loader" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-web-identity@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.864.0.tgz#5cf54ec064957552e4c8c9070fd2b313f152a776" + integrity sha512-nNcjPN4SYg8drLwqK0vgVeSvxeGQiD0FxOaT38mV2H8cu0C5NzpvA+14Xy+W6vT84dxgmJYKk71Cr5QL2Oz+rA== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/nested-clients" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/property-provider" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/middleware-bucket-endpoint@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.862.0.tgz#8d318eccfa987cfa4e6c5f62539d99bcbe6dec30" + integrity sha512-Wcsc7VPLjImQw+CP1/YkwyofMs9Ab6dVq96iS8p0zv0C6YTaMjvillkau4zFfrrrTshdzFWKptIFhKK8Zsei1g== + dependencies: + "@aws-sdk/types" "3.862.0" + "@aws-sdk/util-arn-parser" "3.804.0" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + "@smithy/util-config-provider" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-expect-continue@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.862.0.tgz#f53c28c41f63859362797fd76e993365b598d0ba" + integrity sha512-oG3AaVUJ+26p0ESU4INFn6MmqqiBFZGrebST66Or+YBhteed2rbbFl7mCfjtPWUFgquQlvT1UP19P3LjQKeKpw== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/middleware-flexible-checksums@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.864.0.tgz#fcbb40ae1513f96185ec961693c0f55ec1f4da18" + integrity sha512-MvakvzPZi9uyP3YADuIqtk/FAcPFkyYFWVVMf5iFs/rCdk0CUzn02Qf4CSuyhbkS6Y0KrAsMgKR4MgklPU79Wg== + dependencies: + "@aws-crypto/crc32" "5.2.0" + "@aws-crypto/crc32c" "5.2.0" + "@aws-crypto/util" "5.2.0" + "@aws-sdk/core" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/is-array-buffer" "^4.0.0" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-stream" "^4.2.4" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-host-header@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.862.0.tgz#9b5fa0ad4c17a84816b4bfde7cda949116374042" + integrity sha512-jDje8dCFeFHfuCAxMDXBs8hy8q9NCTlyK4ThyyfAj3U4Pixly2mmzY2u7b7AyGhWsjJNx8uhTjlYq5zkQPQCYw== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/middleware-location-constraint@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.862.0.tgz#d55babadc9f9b7150c56b028fc6953021a5a565a" + integrity sha512-MnwLxCw7Cc9OngEH3SHFhrLlDI9WVxaBkp3oTsdY9JE7v8OE38wQ9vtjaRsynjwu0WRtrctSHbpd7h/QVvtjyA== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/middleware-logger@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.862.0.tgz#fba26924421135c824dec7e1cd0f75990a588fdb" + integrity sha512-N/bXSJznNBR/i7Ofmf9+gM6dx/SPBK09ZWLKsW5iQjqKxAKn/2DozlnE54uiEs1saHZWoNDRg69Ww4XYYSlG1Q== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/middleware-recursion-detection@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.862.0.tgz#d83433251e550b7ed9cd731a447c92aaec378f01" + integrity sha512-KVoo3IOzEkTq97YKM4uxZcYFSNnMkhW/qj22csofLegZi5fk90ztUnnaeKfaEJHfHp/tm1Y3uSoOXH45s++kKQ== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/middleware-sdk-s3@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.864.0.tgz#5142210471ed702452277ad653af483147c42598" + integrity sha512-GjYPZ6Xnqo17NnC8NIQyvvdzzO7dm+Ks7gpxD/HsbXPmV2aEfuFveJXneGW9e1BheSKFff6FPDWu8Gaj2Iu1yg== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@aws-sdk/util-arn-parser" "3.804.0" + "@smithy/core" "^3.8.0" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/protocol-http" "^5.1.3" + "@smithy/signature-v4" "^5.1.3" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/util-config-provider" "^4.0.0" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-stream" "^4.2.4" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-ssec@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-ssec/-/middleware-ssec-3.862.0.tgz#d6c7d03c966cb6642acec8c7f046afd3a72c0f7c" + integrity sha512-72VtP7DZC8lYTE2L3Efx2BrD98oe9WTK8X6hmd3WTLkbIjvgWQWIdjgaFXBs8WevsXkewIctfyA3KEezvL5ggw== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/middleware-user-agent@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.864.0.tgz#7c8a5e7f09eb2855f9a045cdfeee56e099e15552" + integrity sha512-wrddonw4EyLNSNBrApzEhpSrDwJiNfjxDm5E+bn8n32BbAojXASH8W8jNpxz/jMgNkkJNxCfyqybGKzBX0OhbQ== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@aws-sdk/util-endpoints" "3.862.0" + "@smithy/core" "^3.8.0" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/nested-clients@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/nested-clients/-/nested-clients-3.864.0.tgz#8d8b7e8e481649ae0f6ef37339b07cd8f6405e74" + integrity sha512-H1C+NjSmz2y8Tbgh7Yy89J20yD/hVyk15hNoZDbCYkXg0M358KS7KVIEYs8E2aPOCr1sK3HBE819D/yvdMgokA== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.864.0" + "@aws-sdk/middleware-host-header" "3.862.0" + "@aws-sdk/middleware-logger" "3.862.0" + "@aws-sdk/middleware-recursion-detection" "3.862.0" + "@aws-sdk/middleware-user-agent" "3.864.0" + "@aws-sdk/region-config-resolver" "3.862.0" + "@aws-sdk/types" "3.862.0" + "@aws-sdk/util-endpoints" "3.862.0" + "@aws-sdk/util-user-agent-browser" "3.862.0" + "@aws-sdk/util-user-agent-node" "3.864.0" + "@smithy/config-resolver" "^4.1.5" + "@smithy/core" "^3.8.0" + "@smithy/fetch-http-handler" "^5.1.1" + "@smithy/hash-node" "^4.0.5" + "@smithy/invalid-dependency" "^4.0.5" + "@smithy/middleware-content-length" "^4.0.5" + "@smithy/middleware-endpoint" "^4.1.18" + "@smithy/middleware-retry" "^4.1.19" + "@smithy/middleware-serde" "^4.0.9" + "@smithy/middleware-stack" "^4.0.5" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/node-http-handler" "^4.1.1" + "@smithy/protocol-http" "^5.1.3" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/url-parser" "^4.0.5" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-body-length-node" "^4.0.0" + "@smithy/util-defaults-mode-browser" "^4.0.26" + "@smithy/util-defaults-mode-node" "^4.0.26" + "@smithy/util-endpoints" "^3.0.7" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-retry" "^4.0.7" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@aws-sdk/region-config-resolver@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.862.0.tgz#99e7942be513abacb715d06781e6f4d62b3e9cf2" + integrity sha512-VisR+/HuVFICrBPY+q9novEiE4b3mvDofWqyvmxHcWM7HumTz9ZQSuEtnlB/92GVM3KDUrR9EmBHNRrfXYZkcQ== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/types" "^4.3.2" + "@smithy/util-config-provider" "^4.0.0" + "@smithy/util-middleware" "^4.0.5" + tslib "^2.6.2" + +"@aws-sdk/signature-v4-multi-region@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.864.0.tgz#75e24f5382aa77b7e629f8feb366bcf2a358ffb8" + integrity sha512-w2HIn/WIcUyv1bmyCpRUKHXB5KdFGzyxPkp/YK5g+/FuGdnFFYWGfcO8O+How4jwrZTarBYsAHW9ggoKvwr37w== + dependencies: + "@aws-sdk/middleware-sdk-s3" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/protocol-http" "^5.1.3" + "@smithy/signature-v4" "^5.1.3" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/token-providers@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.864.0.tgz#c5f88c34bf268435a5b64b7814193c63ae330a68" + integrity sha512-gTc2QHOBo05SCwVA65dUtnJC6QERvFaPiuppGDSxoF7O5AQNK0UR/kMSenwLqN8b5E1oLYvQTv3C1idJLRX0cg== + dependencies: + "@aws-sdk/core" "3.864.0" + "@aws-sdk/nested-clients" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/property-provider" "^4.0.5" + "@smithy/shared-ini-file-loader" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/types@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.862.0.tgz#2f5622e1aa3a5281d4f419f5d2c90f87dd5ff0cf" + integrity sha512-Bei+RL0cDxxV+lW2UezLbCYYNeJm6Nzee0TpW0FfyTRBhH9C1XQh4+x+IClriXvgBnRquTMMYsmJfvx8iyLKrg== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/types@^3.222.0": + version "3.840.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.840.0.tgz#aadc6843d5c1f24b3d1d228059e702a355bf07c3" + integrity sha512-xliuHaUFZxEx1NSXeLLZ9Dyu6+EJVQKEoD+yM+zqUo3YDZ7medKJWY6fIOKiPX/N7XbLdBYwajb15Q7IL8KkeA== + dependencies: + "@smithy/types" "^4.3.1" + tslib "^2.6.2" + +"@aws-sdk/util-arn-parser@3.804.0": + version "3.804.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-arn-parser/-/util-arn-parser-3.804.0.tgz#d0b52bf5f9ae5b2c357a635551e5844dcad074c8" + integrity sha512-wmBJqn1DRXnZu3b4EkE6CWnoWMo1ZMvlfkqU5zPz67xx1GMaXlDCchFvKAXMjk4jn/L1O3tKnoFDNsoLV1kgNQ== + dependencies: + tslib "^2.6.2" + +"@aws-sdk/util-endpoints@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.862.0.tgz#d66975bbedc1899721e3bf2a548fadfaee2ba2ee" + integrity sha512-eCZuScdE9MWWkHGM2BJxm726MCmWk/dlHjOKvkM0sN1zxBellBMw5JohNss1Z8/TUmnW2gb9XHTOiHuGjOdksA== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/types" "^4.3.2" + "@smithy/url-parser" "^4.0.5" + "@smithy/util-endpoints" "^3.0.7" + tslib "^2.6.2" + +"@aws-sdk/util-locate-window@^3.0.0": + version "3.804.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.804.0.tgz#a2ee8dc5d9c98276986e8e1ba03c0c84d9afb0f5" + integrity sha512-zVoRfpmBVPodYlnMjgVjfGoEZagyRF5IPn3Uo6ZvOZp24chnW/FRstH7ESDHDDRga4z3V+ElUQHKpFDXWyBW5A== + dependencies: + tslib "^2.6.2" + +"@aws-sdk/util-user-agent-browser@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.862.0.tgz#0fc887393f13399bc402e1d8c45d3af3306a322e" + integrity sha512-BmPTlm0r9/10MMr5ND9E92r8KMZbq5ltYXYpVcUbAsnB1RJ8ASJuRoLne5F7mB3YMx0FJoOTuSq7LdQM3LgW3Q== + dependencies: + "@aws-sdk/types" "3.862.0" + "@smithy/types" "^4.3.2" + bowser "^2.11.0" + tslib "^2.6.2" + +"@aws-sdk/util-user-agent-node@3.864.0": + version "3.864.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.864.0.tgz#2fd8276a6d7d0ee3d6fe75421c5565e63ae6a0d5" + integrity sha512-d+FjUm2eJEpP+FRpVR3z6KzMdx1qwxEYDz8jzNKwxYLBBquaBaP/wfoMtMQKAcbrR7aT9FZVZF7zDgzNxUvQlQ== + dependencies: + "@aws-sdk/middleware-user-agent" "3.864.0" + "@aws-sdk/types" "3.862.0" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@aws-sdk/xml-builder@3.862.0": + version "3.862.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.862.0.tgz#d368c76f0f129d43b3ffbc2dc18f53ddd64ec328" + integrity sha512-6Ed0kmC1NMbuFTEgNmamAUU1h5gShgxL1hBVLbEzUa3trX5aJBz1vU4bXaBTvOYUAnOHtiy1Ml4AMStd6hJnFA== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.26.2", "@babel/code-frame@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.27.1.tgz#200f715e66d52a23b221a9435534a91cc13ad5be" + integrity sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg== + dependencies: + "@babel/helper-validator-identifier" "^7.27.1" + js-tokens "^4.0.0" + picocolors "^1.1.1" + +"@babel/compat-data@^7.27.2": + version "7.28.0" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.28.0.tgz#9fc6fd58c2a6a15243cd13983224968392070790" + integrity sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw== + +"@babel/core@^7.23.9": + version "7.28.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.28.0.tgz#55dad808d5bf3445a108eefc88ea3fdf034749a4" + integrity sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.27.1" + "@babel/generator" "^7.28.0" + "@babel/helper-compilation-targets" "^7.27.2" + "@babel/helper-module-transforms" "^7.27.3" + "@babel/helpers" "^7.27.6" + "@babel/parser" "^7.28.0" + "@babel/template" "^7.27.2" + "@babel/traverse" "^7.28.0" + "@babel/types" "^7.28.0" + convert-source-map "^2.0.0" debug "^4.1.0" gensync "^1.0.0-beta.2" - json5 "^2.1.2" - lodash "^4.17.19" - semver "^6.3.0" - source-map "^0.5.0" - -"@babel/generator@^7.13.9": - version "7.13.9" - resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.13.9.tgz" - integrity sha512-mHOOmY0Axl/JCTkxTU6Lf5sWOg/v8nUa+Xkt4zMTftX0wqmb6Sh7J8gvcehBw7q0AhrhAR+FDacKjCZ2X8K+Sw== - dependencies: - "@babel/types" "^7.13.0" - jsesc "^2.5.1" - source-map "^0.5.0" - -"@babel/generator@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.3.tgz#86e6e83d95903fbe7613f448613b8b319f330a8e" - integrity "sha1-huboPZWQP752E/RIYTuLMZ8zCo4= sha512-keeZWAV4LU3tW0qRi19HRpabC/ilM0HRBBzf9/k8FFiG4KVpiv0FIy4hHfLfFQZNhziCTPTmd59zoyv6DNISzg==" - dependencies: - "@babel/types" "^7.23.3" - "@jridgewell/gen-mapping" "^0.3.2" - "@jridgewell/trace-mapping" "^0.3.17" - jsesc "^2.5.1" - -"@babel/helper-compilation-targets@^7.13.10": - version "7.13.10" - resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.13.10.tgz" - integrity sha512-/Xju7Qg1GQO4mHZ/Kcs6Au7gfafgZnwm+a7sy/ow/tV1sHeraRUHbjdat8/UvDor4Tez+siGKDk6zIKtCPKVJA== - dependencies: - "@babel/compat-data" "^7.13.8" - "@babel/helper-validator-option" "^7.12.17" - browserslist "^4.14.5" - semver "^6.3.0" - -"@babel/helper-environment-visitor@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" - integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== - -"@babel/helper-function-name@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" - integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== - dependencies: - "@babel/template" "^7.22.15" - "@babel/types" "^7.23.0" - -"@babel/helper-hoist-variables@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" - integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-member-expression-to-functions@^7.13.0": - version "7.13.0" - resolved "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.13.0.tgz" - integrity sha512-yvRf8Ivk62JwisqV1rFRMxiSMDGnN6KH1/mDMmIrij4jztpQNRoHqqMG3U6apYbGRPJpgPalhva9Yd06HlUxJQ== - dependencies: - "@babel/types" "^7.13.0" - -"@babel/helper-module-imports@^7.12.13": - version "7.12.13" - resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz" - integrity sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g== - dependencies: - "@babel/types" "^7.12.13" - -"@babel/helper-module-transforms@^7.13.0": - version "7.13.0" - resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.13.0.tgz" - integrity sha512-Ls8/VBwH577+pw7Ku1QkUWIyRRNHpYlts7+qSqBBFCW3I8QteB9DxfcZ5YJpOwH6Ihe/wn8ch7fMGOP1OhEIvw== - dependencies: - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-replace-supers" "^7.13.0" - "@babel/helper-simple-access" "^7.12.13" - "@babel/helper-split-export-declaration" "^7.12.13" - "@babel/helper-validator-identifier" "^7.12.11" - "@babel/template" "^7.12.13" - "@babel/traverse" "^7.13.0" - "@babel/types" "^7.13.0" - lodash "^4.17.19" - -"@babel/helper-optimise-call-expression@^7.12.13": - version "7.12.13" - resolved "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz" - integrity sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA== - dependencies: - "@babel/types" "^7.12.13" - -"@babel/helper-replace-supers@^7.13.0": - version "7.13.0" - resolved "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.13.0.tgz" - integrity sha512-Segd5me1+Pz+rmN/NFBOplMbZG3SqRJOBlY+mA0SxAv6rjj7zJqr1AVr3SfzUVTLCv7ZLU5FycOM/SBGuLPbZw== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.13.0" - "@babel/helper-optimise-call-expression" "^7.12.13" - "@babel/traverse" "^7.13.0" - "@babel/types" "^7.13.0" - -"@babel/helper-simple-access@^7.12.13": - version "7.12.13" - resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz" - integrity sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA== - dependencies: - "@babel/types" "^7.12.13" - -"@babel/helper-split-export-declaration@^7.12.13": - version "7.12.13" - resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz" - integrity sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg== - dependencies: - "@babel/types" "^7.12.13" - -"@babel/helper-split-export-declaration@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" - integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-string-parser@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" - integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== - -"@babel/helper-validator-identifier@^7.12.11": - version "7.12.11" - resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz" - integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== - -"@babel/helper-validator-identifier@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" - integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== - -"@babel/helper-validator-option@^7.12.17": - version "7.12.17" - resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.12.17.tgz" - integrity sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw== - -"@babel/helpers@^7.13.10": - version "7.13.10" - resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.13.10.tgz" - integrity sha512-4VO883+MWPDUVRF3PhiLBUFHoX/bsLTGFpFK/HqvvfBZz2D57u9XzPVNFVBTc0PW/CWR9BXTOKt8NF4DInUHcQ== - dependencies: - "@babel/template" "^7.12.13" - "@babel/traverse" "^7.13.0" - "@babel/types" "^7.13.0" - -"@babel/highlight@^7.10.4", "@babel/highlight@^7.12.13": - version "7.13.10" - resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.13.10.tgz" - integrity sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg== - dependencies: - "@babel/helper-validator-identifier" "^7.12.11" - chalk "^2.0.0" - js-tokens "^4.0.0" + json5 "^2.2.3" + semver "^6.3.1" + +"@babel/generator@^7.28.0": + version "7.28.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.28.0.tgz#9cc2f7bd6eb054d77dc66c2664148a0c5118acd2" + integrity sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg== + dependencies: + "@babel/parser" "^7.28.0" + "@babel/types" "^7.28.0" + "@jridgewell/gen-mapping" "^0.3.12" + "@jridgewell/trace-mapping" "^0.3.28" + jsesc "^3.0.2" + +"@babel/helper-compilation-targets@^7.27.2": + version "7.27.2" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz#46a0f6efab808d51d29ce96858dd10ce8732733d" + integrity sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ== + dependencies: + "@babel/compat-data" "^7.27.2" + "@babel/helper-validator-option" "^7.27.1" + browserslist "^4.24.0" + lru-cache "^5.1.1" + semver "^6.3.1" + +"@babel/helper-globals@^7.28.0": + version "7.28.0" + resolved "https://registry.yarnpkg.com/@babel/helper-globals/-/helper-globals-7.28.0.tgz#b9430df2aa4e17bc28665eadeae8aa1d985e6674" + integrity sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw== + +"@babel/helper-module-imports@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz#7ef769a323e2655e126673bb6d2d6913bbead204" + integrity sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w== + dependencies: + "@babel/traverse" "^7.27.1" + "@babel/types" "^7.27.1" + +"@babel/helper-module-transforms@^7.27.3": + version "7.27.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz#db0bbcfba5802f9ef7870705a7ef8788508ede02" + integrity sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg== + dependencies: + "@babel/helper-module-imports" "^7.27.1" + "@babel/helper-validator-identifier" "^7.27.1" + "@babel/traverse" "^7.27.3" + +"@babel/helper-string-parser@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz#54da796097ab19ce67ed9f88b47bb2ec49367687" + integrity sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA== + +"@babel/helper-validator-identifier@^7.22.20", "@babel/helper-validator-identifier@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz#a7054dcc145a967dd4dc8fee845a57c1316c9df8" + integrity sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow== + +"@babel/helper-validator-option@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz#fa52f5b1e7db1ab049445b421c4471303897702f" + integrity sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg== + +"@babel/helpers@^7.27.6": + version "7.27.6" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.27.6.tgz#6456fed15b2cb669d2d1fabe84b66b34991d812c" + integrity sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug== + dependencies: + "@babel/template" "^7.27.2" + "@babel/types" "^7.27.6" + +"@babel/parser@^7.20.15", "@babel/parser@^7.23.9", "@babel/parser@^7.27.2", "@babel/parser@^7.28.0": + version "7.28.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.28.0.tgz#979829fbab51a29e13901e5a80713dbcb840825e" + integrity sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g== + dependencies: + "@babel/types" "^7.28.0" -"@babel/highlight@^7.22.13": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54" - integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== +"@babel/runtime@^7.6.0": + version "7.27.6" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.27.6.tgz#ec4070a04d76bae8ddbb10770ba55714a417b7c6" + integrity sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q== + +"@babel/template@^7.27.2": + version "7.27.2" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.27.2.tgz#fa78ceed3c4e7b63ebf6cb39e5852fca45f6809d" + integrity sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw== + dependencies: + "@babel/code-frame" "^7.27.1" + "@babel/parser" "^7.27.2" + "@babel/types" "^7.27.1" + +"@babel/traverse@^7.27.1", "@babel/traverse@^7.27.3", "@babel/traverse@^7.28.0": + version "7.28.0" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.28.0.tgz#518aa113359b062042379e333db18380b537e34b" + integrity sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg== + dependencies: + "@babel/code-frame" "^7.27.1" + "@babel/generator" "^7.28.0" + "@babel/helper-globals" "^7.28.0" + "@babel/parser" "^7.28.0" + "@babel/template" "^7.27.2" + "@babel/types" "^7.28.0" + debug "^4.3.1" + +"@babel/types@^7.27.1", "@babel/types@^7.27.6", "@babel/types@^7.28.0": + version "7.28.1" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.28.1.tgz#2aaf3c10b31ba03a77ac84f52b3912a0edef4cf9" + integrity sha512-x0LvFTekgSX+83TI28Y9wYPUfzrnl2aT5+5QLnO6v7mSJYtEEevuDRN0F0uSHRk1G1IWZC43o00Y0xDDrpBGPQ== + dependencies: + "@babel/helper-string-parser" "^7.27.1" + "@babel/helper-validator-identifier" "^7.27.1" + +"@cfworker/json-schema@^4.0.2": + version "4.1.1" + resolved "https://registry.yarnpkg.com/@cfworker/json-schema/-/json-schema-4.1.1.tgz#4a2a3947ee9fa7b7c24be981422831b8674c3be6" + integrity sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og== + +"@commitlint/cli@^17.1.2": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/cli/-/cli-17.8.1.tgz#10492114a022c91dcfb1d84dac773abb3db76d33" + integrity sha512-ay+WbzQesE0Rv4EQKfNbSMiJJ12KdKTDzIt0tcK4k11FdsWmtwP0Kp1NWMOUswfIWo6Eb7p7Ln721Nx9FLNBjg== + dependencies: + "@commitlint/format" "^17.8.1" + "@commitlint/lint" "^17.8.1" + "@commitlint/load" "^17.8.1" + "@commitlint/read" "^17.8.1" + "@commitlint/types" "^17.8.1" + execa "^5.0.0" + lodash.isfunction "^3.0.9" + resolve-from "5.0.0" + resolve-global "1.0.0" + yargs "^17.0.0" + +"@commitlint/config-conventional@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/config-conventional/-/config-conventional-17.8.1.tgz#e5bcf0cfec8da7ac50bc04dc92e0a4ea74964ce0" + integrity sha512-NxCOHx1kgneig3VLauWJcDWS40DVjg7nKOpBEEK9E5fjJpQqLCilcnKkIIjdBH98kEO1q3NpE5NSrZ2kl/QGJg== + dependencies: + conventional-changelog-conventionalcommits "^6.1.0" + +"@commitlint/config-validator@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/config-validator/-/config-validator-17.8.1.tgz#5cc93b6b49d5524c9cc345a60e5bf74bcca2b7f9" + integrity sha512-UUgUC+sNiiMwkyiuIFR7JG2cfd9t/7MV8VB4TZ+q02ZFkHoduUS4tJGsCBWvBOGD9Btev6IecPMvlWUfJorkEA== + dependencies: + "@commitlint/types" "^17.8.1" + ajv "^8.11.0" + +"@commitlint/ensure@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/ensure/-/ensure-17.8.1.tgz#59183557844999dbb6aab6d03629a3d104d01a8d" + integrity sha512-xjafwKxid8s1K23NFpL8JNo6JnY/ysetKo8kegVM7c8vs+kWLP8VrQq+NbhgVlmCojhEDbzQKp4eRXSjVOGsow== + dependencies: + "@commitlint/types" "^17.8.1" + lodash.camelcase "^4.3.0" + lodash.kebabcase "^4.1.1" + lodash.snakecase "^4.1.1" + lodash.startcase "^4.4.0" + lodash.upperfirst "^4.3.1" + +"@commitlint/execute-rule@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/execute-rule/-/execute-rule-17.8.1.tgz#504ed69eb61044eeb84fdfd10cc18f0dab14f34c" + integrity sha512-JHVupQeSdNI6xzA9SqMF+p/JjrHTcrJdI02PwesQIDCIGUrv04hicJgCcws5nzaoZbROapPs0s6zeVHoxpMwFQ== + +"@commitlint/format@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/format/-/format-17.8.1.tgz#6108bb6b4408e711006680649927e1b559bdc5f8" + integrity sha512-f3oMTyZ84M9ht7fb93wbCKmWxO5/kKSbwuYvS867duVomoOsgrgljkGGIztmT/srZnaiGbaK8+Wf8Ik2tSr5eg== + dependencies: + "@commitlint/types" "^17.8.1" + chalk "^4.1.0" + +"@commitlint/is-ignored@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/is-ignored/-/is-ignored-17.8.1.tgz#cf25bcd8409c79684b63f8bdeb35df48edda244e" + integrity sha512-UshMi4Ltb4ZlNn4F7WtSEugFDZmctzFpmbqvpyxD3la510J+PLcnyhf9chs7EryaRFJMdAKwsEKfNK0jL/QM4g== + dependencies: + "@commitlint/types" "^17.8.1" + semver "7.5.4" + +"@commitlint/lint@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/lint/-/lint-17.8.1.tgz#bfc21215f6b18d41d4d43e2aa3cb79a5d7726cd8" + integrity sha512-aQUlwIR1/VMv2D4GXSk7PfL5hIaFSfy6hSHV94O8Y27T5q+DlDEgd/cZ4KmVI+MWKzFfCTiTuWqjfRSfdRllCA== + dependencies: + "@commitlint/is-ignored" "^17.8.1" + "@commitlint/parse" "^17.8.1" + "@commitlint/rules" "^17.8.1" + "@commitlint/types" "^17.8.1" + +"@commitlint/load@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/load/-/load-17.8.1.tgz#fa061e7bfa53281eb03ca8517ca26d66a189030c" + integrity sha512-iF4CL7KDFstP1kpVUkT8K2Wl17h2yx9VaR1ztTc8vzByWWcbO/WaKwxsnCOqow9tVAlzPfo1ywk9m2oJ9ucMqA== + dependencies: + "@commitlint/config-validator" "^17.8.1" + "@commitlint/execute-rule" "^17.8.1" + "@commitlint/resolve-extends" "^17.8.1" + "@commitlint/types" "^17.8.1" + "@types/node" "20.5.1" + chalk "^4.1.0" + cosmiconfig "^8.0.0" + cosmiconfig-typescript-loader "^4.0.0" + lodash.isplainobject "^4.0.6" + lodash.merge "^4.6.2" + lodash.uniq "^4.5.0" + resolve-from "^5.0.0" + ts-node "^10.8.1" + typescript "^4.6.4 || ^5.2.2" + +"@commitlint/message@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/message/-/message-17.8.1.tgz#a5cd226c419be20ee03c3d237db6ac37b95958b3" + integrity sha512-6bYL1GUQsD6bLhTH3QQty8pVFoETfFQlMn2Nzmz3AOLqRVfNNtXBaSY0dhZ0dM6A2MEq4+2d7L/2LP8TjqGRkA== + +"@commitlint/parse@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/parse/-/parse-17.8.1.tgz#6e00b8f50ebd63562d25dcf4230da2c9f984e626" + integrity sha512-/wLUickTo0rNpQgWwLPavTm7WbwkZoBy3X8PpkUmlSmQJyWQTj0m6bDjiykMaDt41qcUbfeFfaCvXfiR4EGnfw== + dependencies: + "@commitlint/types" "^17.8.1" + conventional-changelog-angular "^6.0.0" + conventional-commits-parser "^4.0.0" + +"@commitlint/read@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/read/-/read-17.8.1.tgz#b3f28777607c756078356cc133368b0e8c08092f" + integrity sha512-Fd55Oaz9irzBESPCdMd8vWWgxsW3OWR99wOntBDHgf9h7Y6OOHjWEdS9Xzen1GFndqgyoaFplQS5y7KZe0kO2w== + dependencies: + "@commitlint/top-level" "^17.8.1" + "@commitlint/types" "^17.8.1" + fs-extra "^11.0.0" + git-raw-commits "^2.0.11" + minimist "^1.2.6" + +"@commitlint/resolve-extends@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/resolve-extends/-/resolve-extends-17.8.1.tgz#9af01432bf2fd9ce3dd5a00d266cce14e4c977e7" + integrity sha512-W/ryRoQ0TSVXqJrx5SGkaYuAaE/BUontL1j1HsKckvM6e5ZaG0M9126zcwL6peKSuIetJi7E87PRQF8O86EW0Q== dependencies: - "@babel/helper-validator-identifier" "^7.22.20" - chalk "^2.4.2" - js-tokens "^4.0.0" + "@commitlint/config-validator" "^17.8.1" + "@commitlint/types" "^17.8.1" + import-fresh "^3.0.0" + lodash.mergewith "^4.6.2" + resolve-from "^5.0.0" + resolve-global "^1.0.0" + +"@commitlint/rules@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/rules/-/rules-17.8.1.tgz#da49cab1b7ebaf90d108de9f58f684dc4ccb65a0" + integrity sha512-2b7OdVbN7MTAt9U0vKOYKCDsOvESVXxQmrvuVUZ0rGFMCrCPJWWP1GJ7f0lAypbDAhaGb8zqtdOr47192LBrIA== + dependencies: + "@commitlint/ensure" "^17.8.1" + "@commitlint/message" "^17.8.1" + "@commitlint/to-lines" "^17.8.1" + "@commitlint/types" "^17.8.1" + execa "^5.0.0" -"@babel/parser@^7.12.13", "@babel/parser@^7.13.10": - version "7.13.10" - resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.13.10.tgz" - integrity sha512-0s7Mlrw9uTWkYua7xWr99Wpk2bnGa0ANleKfksYAES8LpWH4gW1OUr42vqKNf0us5UQNfru2wPqMqRITzq/SIQ== +"@commitlint/to-lines@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/to-lines/-/to-lines-17.8.1.tgz#a5c4a7cf7dff3dbdd69289fc0eb19b66f3cfe017" + integrity sha512-LE0jb8CuR/mj6xJyrIk8VLz03OEzXFgLdivBytoooKO5xLt5yalc8Ma5guTWobw998sbR3ogDd+2jed03CFmJA== -"@babel/parser@^7.22.15", "@babel/parser@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.3.tgz#0ce0be31a4ca4f1884b5786057cadcb6c3be58f9" - integrity "sha1-DOC+MaTKTxiEtXhgV8rctsO+WPk= sha512-uVsWNvlVsIninV2prNz/3lHCb+5CJ+e+IUBfbjToAHODtfGYLfCFuY4AU7TskI+dAKk+njsPiBjq1gKTvZOBaw==" +"@commitlint/top-level@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/top-level/-/top-level-17.8.1.tgz#206d37d6782f33c9572e44fbe3758392fdeea7bc" + integrity sha512-l6+Z6rrNf5p333SHfEte6r+WkOxGlWK4bLuZKbtf/2TXRN+qhrvn1XE63VhD8Oe9oIHQ7F7W1nG2k/TJFhx2yA== + dependencies: + find-up "^5.0.0" -"@babel/runtime@^7.6.0": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d" - integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ== - dependencies: - regenerator-runtime "^0.14.0" - -"@babel/template@^7.12.13": - version "7.12.13" - resolved "https://registry.npmjs.org/@babel/template/-/template-7.12.13.tgz" - integrity sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA== - dependencies: - "@babel/code-frame" "^7.12.13" - "@babel/parser" "^7.12.13" - "@babel/types" "^7.12.13" - -"@babel/template@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" - integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== - dependencies: - "@babel/code-frame" "^7.22.13" - "@babel/parser" "^7.22.15" - "@babel/types" "^7.22.15" - -"@babel/traverse@^7.13.0": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.3.tgz#26ee5f252e725aa7aca3474aa5b324eaf7908b5b" - integrity "sha1-Ju5fJS5yWqeso0dKpbMk6veQi1s= sha512-+K0yF1/9yR0oHdE0StHuEj3uTPzwwbrLGfNOndVJVV2TqA5+j3oljJUb4nmB954FLGjNem976+B+eDuLIjesiQ==" - dependencies: - "@babel/code-frame" "^7.22.13" - "@babel/generator" "^7.23.3" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.23.3" - "@babel/types" "^7.23.3" - debug "^4.1.0" - globals "^11.1.0" +"@commitlint/types@^17.8.1": + version "17.8.1" + resolved "https://registry.yarnpkg.com/@commitlint/types/-/types-17.8.1.tgz#883a0ad35c5206d5fef7bc6ce1bbe648118af44e" + integrity sha512-PXDQXkAmiMEG162Bqdh9ChML/GJZo6vU+7F03ALKDK8zYc6SuAr47LjG7hGYRqUOz+WK0dU7bQ0xzuqFMdxzeQ== + dependencies: + chalk "^4.1.0" -"@babel/types@^7.12.13", "@babel/types@^7.13.0": - version "7.13.0" - resolved "https://registry.npmjs.org/@babel/types/-/types-7.13.0.tgz" - integrity sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA== +"@cparra/apex-reflection@2.19.0": + version "2.19.0" + resolved "https://registry.yarnpkg.com/@cparra/apex-reflection/-/apex-reflection-2.19.0.tgz#adb6d181ac5027823df8cbb05228a89fa1c56ff0" + integrity sha512-BtZnykjiHxox5XYkGYKZ+lZ+2J168F9cUV40O4TiNHjR0d16bJ+gauzaprNtl8+C31J0lW7ZOPQS7M66QGek7Q== + +"@cparra/apexdocs@^3.14.1": + version "3.14.1" + resolved "https://registry.yarnpkg.com/@cparra/apexdocs/-/apexdocs-3.14.1.tgz#8ae939621663616c24d1785462b5635f8743bb93" + integrity sha512-rin57h+bP4voMcIXZo+3SzPiVpJ5DRwFHpi7Geb4jY5YodgBz96jO3VmKN/MT2tEdieg3lW7vOUC4yY6Z5Dzcg== dependencies: - "@babel/helper-validator-identifier" "^7.12.11" - lodash "^4.17.19" - to-fast-properties "^2.0.0" + "@cparra/apex-reflection" "2.19.0" + "@salesforce/source-deploy-retrieve" "^12.20.1" + "@types/js-yaml" "^4.0.9" + "@types/yargs" "^17.0.32" + chalk "^4.1.2" + cosmiconfig "^9.0.0" + cosmiconfig-typescript-loader "^5.0.0" + fast-xml-parser "^4.4.0" + fp-ts "^2.16.8" + handlebars "^4.7.8" + js-yaml "^4.1.0" + minimatch "^10.0.1" + yargs "^17.7.2" -"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.3.tgz#d5ea892c07f2ec371ac704420f4dcdb07b5f9598" - integrity "sha1-1eqJLAfy7DcaxwRCD03NsHtflZg= sha512-OZnvoH2l8PK5eUvEcUyCt/sXgr/h+UWpVuBbOljwcrAgUl6lpchoQ++PHGyQy1AtYnVA6CEq3y5xeEI10brpXw==" +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== dependencies: - "@babel/helper-string-parser" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.20" - to-fast-properties "^2.0.0" + "@jridgewell/trace-mapping" "0.3.9" -"@cspotcode/source-map-consumer@0.8.0": - version "0.8.0" - resolved "https://registry.npmjs.org/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz" - integrity sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg== +"@csstools/color-helpers@^5.0.2": + version "5.0.2" + resolved "https://registry.yarnpkg.com/@csstools/color-helpers/-/color-helpers-5.0.2.tgz#82592c9a7c2b83c293d9161894e2a6471feb97b8" + integrity sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA== -"@cspotcode/source-map-support@0.7.0": - version "0.7.0" - resolved "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz" - integrity sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA== +"@csstools/css-calc@^2.1.3", "@csstools/css-calc@^2.1.4": + version "2.1.4" + resolved "https://registry.yarnpkg.com/@csstools/css-calc/-/css-calc-2.1.4.tgz#8473f63e2fcd6e459838dd412401d5948f224c65" + integrity sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ== + +"@csstools/css-color-parser@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@csstools/css-color-parser/-/css-color-parser-3.0.10.tgz#79fc68864dd43c3b6782d2b3828bc0fa9d085c10" + integrity sha512-TiJ5Ajr6WRd1r8HSiwJvZBiJOqtH86aHpUjq5aEKWHiII2Qfjqd/HCWKPOW8EP4vcspXbHnXrwIDlu5savQipg== dependencies: - "@cspotcode/source-map-consumer" "0.8.0" + "@csstools/color-helpers" "^5.0.2" + "@csstools/css-calc" "^2.1.4" -"@eslint/eslintrc@^0.4.0": - version "0.4.0" - resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.0.tgz" - integrity sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog== +"@csstools/css-parser-algorithms@^3.0.4": + version "3.0.5" + resolved "https://registry.yarnpkg.com/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz#5755370a9a29abaec5515b43c8b3f2cf9c2e3076" + integrity sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ== + +"@csstools/css-tokenizer@^3.0.3": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz#333fedabc3fd1a8e5d0100013731cf19e6a8c5d3" + integrity sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw== + +"@es-joy/jsdoccomment@~0.41.0": + version "0.41.0" + resolved "https://registry.yarnpkg.com/@es-joy/jsdoccomment/-/jsdoccomment-0.41.0.tgz#4a2f7db42209c0425c71a1476ef1bdb6dcd836f6" + integrity sha512-aKUhyn1QI5Ksbqcr3fFJj16p99QdjUxXAEuFst1Z47DRyoiMwivIH9MV/ARcJOCXVjPfjITciej8ZD2O/6qUmw== + dependencies: + comment-parser "1.4.1" + esquery "^1.5.0" + jsdoc-type-pratt-parser "~4.0.0" + +"@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": + version "4.7.0" + resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz#607084630c6c033992a082de6e6fbc1a8b52175a" + integrity sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw== + dependencies: + eslint-visitor-keys "^3.4.3" + +"@eslint-community/regexpp@^4.5.1", "@eslint-community/regexpp@^4.6.1": + version "4.12.1" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.12.1.tgz#cfc6cffe39df390a3841cde2abccf92eaa7ae0e0" + integrity sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ== + +"@eslint/eslintrc@^2.1.4": + version "2.1.4" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.4.tgz#388a269f0f25c1b6adc317b5a2c55714894c70ad" + integrity sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ== dependencies: ajv "^6.12.4" - debug "^4.1.1" - espree "^7.3.0" - globals "^12.1.0" - ignore "^4.0.6" + debug "^4.3.2" + espree "^9.6.0" + globals "^13.19.0" + ignore "^5.2.0" import-fresh "^3.2.1" - js-yaml "^3.13.1" - minimatch "^3.0.4" + js-yaml "^4.1.0" + minimatch "^3.1.2" strip-json-comments "^3.1.1" +"@eslint/js@8.57.1": + version "8.57.1" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.1.tgz#de633db3ec2ef6a3c89e2f19038063e8a122e2c2" + integrity sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q== + "@fast-csv/format@4.3.5": version "4.3.5" resolved "https://registry.yarnpkg.com/@fast-csv/format/-/format-4.3.5.tgz#90d83d1b47b6aaf67be70d6118f84f3e12ee1ff3" @@ -392,69 +1102,153 @@ lodash.isundefined "^3.0.1" lodash.uniq "^4.5.0" -"@gitbeaker/core@^35.8.0": - version "35.8.0" - resolved "https://registry.yarnpkg.com/@gitbeaker/core/-/core-35.8.0.tgz#8e55950dd6c45e6b48791432a1fa2c13b9460d39" - integrity sha512-l/LgTmPFeUBnqyxU/VbFmqKsanCITBBMp7A0yXVbiTQCvNWSV6JJyUL3ILR3q825RRU/AzRm40FFli0AgBpXTw== +"@fastify/busboy@^2.0.0": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d" + integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA== + +"@gitbeaker/core@^35.8.1": + version "35.8.1" + resolved "https://registry.yarnpkg.com/@gitbeaker/core/-/core-35.8.1.tgz#b4ce2d08d344ff50e76c38ff81b800bec6dfe851" + integrity sha512-KBrDykVKSmU9Q9Gly8KeHOgdc0lZSa435srECxuO0FGqqBcUQ82hPqUc13YFkkdOI9T1JRA3qSFajg8ds0mZKA== dependencies: - "@gitbeaker/requester-utils" "^35.8.0" + "@gitbeaker/requester-utils" "^35.8.1" form-data "^4.0.0" li "^1.3.0" mime "^3.0.0" query-string "^7.0.0" xcase "^2.0.1" -"@gitbeaker/node@^35.8.0": - version "35.8.0" - resolved "https://registry.yarnpkg.com/@gitbeaker/node/-/node-35.8.0.tgz#cd6d175ffa119ed323251d6e88c7441a18930b07" - integrity sha512-n8xbGemNs3aZb7gaYsEya0FKxemjyAJ4UyaF2MWM6mrj5rCnL3Y9Siko2rT/AuSJwjx82Z7BdKxV9QH/ihqjOQ== +"@gitbeaker/node@^35.8.1": + version "35.8.1" + resolved "https://registry.yarnpkg.com/@gitbeaker/node/-/node-35.8.1.tgz#d67885c827f2d7405afd7e39538a230721756e5c" + integrity sha512-g6rX853y61qNhzq9cWtxIEoe2KDeFBtXAeWMGWJnc3nz3WRump2pIICvJqw/yobLZqmTNt+ea6w3/n92Mnbn3g== dependencies: - "@gitbeaker/core" "^35.8.0" - "@gitbeaker/requester-utils" "^35.8.0" + "@gitbeaker/core" "^35.8.1" + "@gitbeaker/requester-utils" "^35.8.1" delay "^5.0.0" got "^11.8.3" xcase "^2.0.1" -"@gitbeaker/requester-utils@^35.8.0": - version "35.8.0" - resolved "https://registry.yarnpkg.com/@gitbeaker/requester-utils/-/requester-utils-35.8.0.tgz#e4894e2c67e2ae00e5aa5c869a0d87ec190b63d9" - integrity sha512-d/cseQQUvj1V02jXo6HBpuMarf6e6GdrxEaiWrjAiS2nDEQFRGxDGtPHzqgU84aN11nEBFnFa0vaSMqcZG/+9w== +"@gitbeaker/requester-utils@^35.8.1": + version "35.8.1" + resolved "https://registry.yarnpkg.com/@gitbeaker/requester-utils/-/requester-utils-35.8.1.tgz#f345cdd05abd4169cfcd239d202db6283eb17dc8" + integrity sha512-MFzdH+Z6eJaCZA5ruWsyvm6SXRyrQHjYVR6aY8POFraIy7ceIHOprWCs1R+0ydDZ8KtBnd8OTHjlJ0sLtSFJCg== dependencies: form-data "^4.0.0" qs "^6.10.1" xcase "^2.0.1" -"@inquirer/checkbox@^2.4.7": - version "2.4.7" - resolved "https://registry.yarnpkg.com/@inquirer/checkbox/-/checkbox-2.4.7.tgz#0a2867a3a8c5853c79e43e99634e80c1721934ca" - integrity sha512-5YwCySyV1UEgqzz34gNsC38eKxRBtlRDpJLlKcRtTjlYA/yDKuc1rfw+hjw+2WJxbAZtaDPsRl5Zk7J14SBoBw== +"@google/generative-ai@^0.24.0": + version "0.24.1" + resolved "https://registry.yarnpkg.com/@google/generative-ai/-/generative-ai-0.24.1.tgz#634a3c06f8ea7a6125c1b0d6c1e66bb11afb52c9" + integrity sha512-MqO+MLfM6kjxcKoy0p1wRzG3b4ZZXtPI+z2IE26UogS2Cm/XHO+7gGRBh6gcJsOiIVoH93UwKvW4HdgiOZCy9Q== + +"@graphql-typed-document-node/core@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== + +"@grpc/grpc-js@^1.13.1": + version "1.13.4" + resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.13.4.tgz#922fbc496e229c5fa66802d2369bf181c1df1c5a" + integrity sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg== + dependencies: + "@grpc/proto-loader" "^0.7.13" + "@js-sdsl/ordered-map" "^4.4.2" + +"@grpc/proto-loader@^0.7.13": + version "0.7.15" + resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.7.15.tgz#4cdfbf35a35461fc843abe8b9e2c0770b5095e60" + integrity sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ== + dependencies: + lodash.camelcase "^4.3.0" + long "^5.0.0" + protobufjs "^7.2.5" + yargs "^17.7.2" + +"@humanwhocodes/config-array@^0.13.0": + version "0.13.0" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.13.0.tgz#fb907624df3256d04b9aa2df50d7aa97ec648748" + integrity sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw== + dependencies: + "@humanwhocodes/object-schema" "^2.0.3" + debug "^4.3.1" + minimatch "^3.0.5" + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^2.0.3": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz#4a2868d75d6d6963e423bcf90b7fd1be343409d3" + integrity sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA== + +"@inquirer/checkbox@^2.5.0": + version "2.5.0" + resolved "https://registry.yarnpkg.com/@inquirer/checkbox/-/checkbox-2.5.0.tgz#41c5c9dd332c0a8fa159be23982ce080d0b199d4" + integrity sha512-sMgdETOfi2dUHT8r7TT1BTKOwNvdDGFDXYWtQ2J69SvlYNntk9I/gJe7r5yvMwwsuKnYbuRs3pNhx4tgNck5aA== dependencies: - "@inquirer/core" "^9.0.10" + "@inquirer/core" "^9.1.0" "@inquirer/figures" "^1.0.5" - "@inquirer/type" "^1.5.2" + "@inquirer/type" "^1.5.3" ansi-escapes "^4.3.2" yoctocolors-cjs "^2.1.2" -"@inquirer/confirm@^3.1.22": - version "3.1.22" - resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-3.1.22.tgz#23990624c11f60c6f7a5b0558c7505c35076a037" - integrity sha512-gsAKIOWBm2Q87CDfs9fEo7wJT3fwWIJfnDGMn9Qy74gBnNFOACDNfhUzovubbJjWnKLGBln7/NcSmZwj5DuEXg== +"@inquirer/checkbox@^4.2.1": + version "4.2.1" + resolved "https://registry.yarnpkg.com/@inquirer/checkbox/-/checkbox-4.2.1.tgz#45125a32f27c5cfd82a23d5ecf49b4dc137e1247" + integrity sha512-bevKGO6kX1eM/N+pdh9leS5L7TBF4ICrzi9a+cbWkrxeAeIcwlo/7OfWGCDERdRCI2/Q6tjltX4bt07ALHDwFw== dependencies: - "@inquirer/core" "^9.0.10" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^10.1.15" + "@inquirer/figures" "^1.0.13" + "@inquirer/type" "^3.0.8" + ansi-escapes "^4.3.2" + yoctocolors-cjs "^2.1.2" -"@inquirer/core@^9.0.10": - version "9.0.10" - resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-9.0.10.tgz#4270191e2ad3bea6223530a093dd9479bcbc7dd0" - integrity sha512-TdESOKSVwf6+YWDz8GhS6nKscwzkIyakEzCLJ5Vh6O3Co2ClhCJ0A4MG909MUWfaWdpJm7DE45ii51/2Kat9tA== +"@inquirer/confirm@^3.1.22", "@inquirer/confirm@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-3.2.0.tgz#6af1284670ea7c7d95e3f1253684cfbd7228ad6a" + integrity sha512-oOIwPs0Dvq5220Z8lGL/6LHRTEr9TgLHmiI99Rj1PJ1p1czTys+olrgBqZk4E2qC0YTzeHprxSQmoHioVdJ7Lw== dependencies: - "@inquirer/figures" "^1.0.5" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" + +"@inquirer/confirm@^5.1.14": + version "5.1.14" + resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-5.1.14.tgz#e6321edf51a3a5f54dc548b80ef6ba89891351ad" + integrity sha512-5yR4IBfe0kXe59r1YCTG8WXkUbl7Z35HK87Sw+WUyGD8wNUx7JvY7laahzeytyE1oLn74bQnL7hstctQxisQ8Q== + dependencies: + "@inquirer/core" "^10.1.15" + "@inquirer/type" "^3.0.8" + +"@inquirer/core@^10.1.15": + version "10.1.15" + resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-10.1.15.tgz#8feb69fd536786181a2b6bfb84d8674faa9d2e59" + integrity sha512-8xrp836RZvKkpNbVvgWUlxjT4CraKk2q+I3Ksy+seI2zkcE+y6wNs1BVhgcv8VyImFecUhdQrYLdW32pAjwBdA== + dependencies: + "@inquirer/figures" "^1.0.13" + "@inquirer/type" "^3.0.8" + ansi-escapes "^4.3.2" + cli-width "^4.1.0" + mute-stream "^2.0.0" + signal-exit "^4.1.0" + wrap-ansi "^6.2.0" + yoctocolors-cjs "^2.1.2" + +"@inquirer/core@^9.1.0": + version "9.2.1" + resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-9.2.1.tgz#677c49dee399c9063f31e0c93f0f37bddc67add1" + integrity sha512-F2VBt7W/mwqEU4bL0RnHNZmC/OxzNx9cOYxHqnXX3MP6ruYvZUZAW9imgN9+h/uBT/oP8Gh888J2OZSbjSeWcg== + dependencies: + "@inquirer/figures" "^1.0.6" + "@inquirer/type" "^2.0.0" "@types/mute-stream" "^0.0.4" - "@types/node" "^22.1.0" + "@types/node" "^22.5.5" "@types/wrap-ansi" "^3.0.0" ansi-escapes "^4.3.2" - cli-spinners "^2.9.2" cli-width "^4.1.0" mute-stream "^1.0.0" signal-exit "^4.1.0" @@ -462,107 +1256,228 @@ wrap-ansi "^6.2.0" yoctocolors-cjs "^2.1.2" -"@inquirer/editor@^2.1.22": - version "2.1.22" - resolved "https://registry.yarnpkg.com/@inquirer/editor/-/editor-2.1.22.tgz#f97eda20954da1dab47df9f4c3ae11604d56360c" - integrity sha512-K1QwTu7GCK+nKOVRBp5HY9jt3DXOfPGPr6WRDrPImkcJRelG9UTx2cAtK1liXmibRrzJlTWOwqgWT3k2XnS62w== +"@inquirer/editor@^2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@inquirer/editor/-/editor-2.2.0.tgz#a41eb7b151bd9a6bc3c0b69219d02d82547bc387" + integrity sha512-9KHOpJ+dIL5SZli8lJ6xdaYLPPzB8xB9GZItg39MBybzhxA16vxmszmQFrRwbOA918WA2rvu8xhDEg/p6LXKbw== dependencies: - "@inquirer/core" "^9.0.10" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" external-editor "^3.1.0" -"@inquirer/expand@^2.1.22": - version "2.1.22" - resolved "https://registry.yarnpkg.com/@inquirer/expand/-/expand-2.1.22.tgz#7593e93a516a49434629c41f3738479c8234d2df" - integrity sha512-wTZOBkzH+ItPuZ3ZPa9lynBsdMp6kQ9zbjVPYEtSBG7UulGjg2kQiAnUjgyG4SlntpTce5bOmXAPvE4sguXjpA== +"@inquirer/editor@^4.2.17": + version "4.2.17" + resolved "https://registry.yarnpkg.com/@inquirer/editor/-/editor-4.2.17.tgz#5af16f6f24f62f552feb05c7bec2dc0743230584" + integrity sha512-r6bQLsyPSzbWrZZ9ufoWL+CztkSatnJ6uSxqd6N+o41EZC51sQeWOzI6s5jLb+xxTWxl7PlUppqm8/sow241gg== dependencies: - "@inquirer/core" "^9.0.10" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^10.1.15" + "@inquirer/external-editor" "^1.0.1" + "@inquirer/type" "^3.0.8" + +"@inquirer/expand@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@inquirer/expand/-/expand-2.3.0.tgz#afc44aee303315a85563e9d0275e658f0ee0e701" + integrity sha512-qnJsUcOGCSG1e5DTOErmv2BPQqrtT6uzqn1vI/aYGiPKq+FgslGZmtdnXbhuI7IlT7OByDoEEqdnhUnVR2hhLw== + dependencies: + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" yoctocolors-cjs "^2.1.2" -"@inquirer/figures@^1.0.5": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.5.tgz#57f9a996d64d3e3345d2a3ca04d36912e94f8790" - integrity sha512-79hP/VWdZ2UVc9bFGJnoQ/lQMpL74mGgzSYX1xUqCVk7/v73vJCMw1VuyWN1jGkZ9B3z7THAbySqGbCNefcjfA== +"@inquirer/expand@^4.0.17": + version "4.0.17" + resolved "https://registry.yarnpkg.com/@inquirer/expand/-/expand-4.0.17.tgz#b688f4a1a65daf2bf77a11de7734766769cce343" + integrity sha512-PSqy9VmJx/VbE3CT453yOfNa+PykpKg/0SYP7odez1/NWBGuDXgPhp4AeGYYKjhLn5lUUavVS/JbeYMPdH50Mw== + dependencies: + "@inquirer/core" "^10.1.15" + "@inquirer/type" "^3.0.8" + yoctocolors-cjs "^2.1.2" -"@inquirer/input@^2.2.9": - version "2.2.9" - resolved "https://registry.yarnpkg.com/@inquirer/input/-/input-2.2.9.tgz#08fdf9a48e4f6fc64c2d508b9d10afac843f9bd8" - integrity sha512-7Z6N+uzkWM7+xsE+3rJdhdG/+mQgejOVqspoW+w0AbSZnL6nq5tGMEVASaYVWbkoSzecABWwmludO2evU3d31g== +"@inquirer/external-editor@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@inquirer/external-editor/-/external-editor-1.0.1.tgz#ab0a82c5719a963fb469021cde5cd2b74fea30f8" + integrity sha512-Oau4yL24d2B5IL4ma4UpbQigkVhzPDXLoqy1ggK4gnHg/stmkffJE4oOXHXF3uz0UEpywG68KcyXsyYpA1Re/Q== dependencies: - "@inquirer/core" "^9.0.10" - "@inquirer/type" "^1.5.2" + chardet "^2.1.0" + iconv-lite "^0.6.3" -"@inquirer/number@^1.0.10": - version "1.0.10" - resolved "https://registry.yarnpkg.com/@inquirer/number/-/number-1.0.10.tgz#ac2b440ca57b5de5a231e4898c12d4453683c055" - integrity sha512-kWTxRF8zHjQOn2TJs+XttLioBih6bdc5CcosXIzZsrTY383PXI35DuhIllZKu7CdXFi2rz2BWPN9l0dPsvrQOA== +"@inquirer/figures@^1.0.13", "@inquirer/figures@^1.0.3", "@inquirer/figures@^1.0.5", "@inquirer/figures@^1.0.6": + version "1.0.13" + resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.13.tgz#ad0afd62baab1c23175115a9b62f511b6a751e45" + integrity sha512-lGPVU3yO9ZNqA7vTYz26jny41lE7yoQansmqdMLBEfqaGsmdg7V3W9mK9Pvb5IL4EVZ9GnSDGMO/cJXud5dMaw== + +"@inquirer/input@^2.2.4", "@inquirer/input@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@inquirer/input/-/input-2.3.0.tgz#9b99022f53780fecc842908f3f319b52a5a16865" + integrity sha512-XfnpCStx2xgh1LIRqPXrTNEEByqQWoxsWYzNRSEUxJ5c6EQlhMogJ3vHKu8aXuTacebtaZzMAHwEL0kAflKOBw== + dependencies: + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" + +"@inquirer/input@^4.2.1": + version "4.2.1" + resolved "https://registry.yarnpkg.com/@inquirer/input/-/input-4.2.1.tgz#c174654eb1ab34dfd42a9cf6095a7e735a4db130" + integrity sha512-tVC+O1rBl0lJpoUZv4xY+WGWY8V5b0zxU1XDsMsIHYregdh7bN5X5QnIONNBAl0K765FYlAfNHS2Bhn7SSOVow== + dependencies: + "@inquirer/core" "^10.1.15" + "@inquirer/type" "^3.0.8" + +"@inquirer/number@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@inquirer/number/-/number-1.1.0.tgz#4dac004021ea67c89552a261564f103a494cac96" + integrity sha512-ilUnia/GZUtfSZy3YEErXLJ2Sljo/mf9fiKc08n18DdwdmDbOzRcTv65H1jjDvlsAuvdFXf4Sa/aL7iw/NanVA== dependencies: - "@inquirer/core" "^9.0.10" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" -"@inquirer/password@^2.1.22": - version "2.1.22" - resolved "https://registry.yarnpkg.com/@inquirer/password/-/password-2.1.22.tgz#ec7ee5709923cf285b3e0ae53eed4fdc3c05b422" - integrity sha512-5Fxt1L9vh3rAKqjYwqsjU4DZsEvY/2Gll+QkqR4yEpy6wvzLxdSgFhUcxfDAOtO4BEoTreWoznC0phagwLU5Kw== +"@inquirer/number@^3.0.17": + version "3.0.17" + resolved "https://registry.yarnpkg.com/@inquirer/number/-/number-3.0.17.tgz#32a66136ce35cad9f40ceb5f82a8cfac4f306517" + integrity sha512-GcvGHkyIgfZgVnnimURdOueMk0CztycfC8NZTiIY9arIAkeOgt6zG57G+7vC59Jns3UX27LMkPKnKWAOF5xEYg== dependencies: - "@inquirer/core" "^9.0.10" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^10.1.15" + "@inquirer/type" "^3.0.8" + +"@inquirer/password@^2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@inquirer/password/-/password-2.2.0.tgz#0b6f26336c259c8a9e5f5a3f2e1a761564f764ba" + integrity sha512-5otqIpgsPYIshqhgtEwSspBQE40etouR8VIxzpJkv9i0dVHIpyhiivbkH9/dGiMLdyamT54YRdGJLfl8TFnLHg== + dependencies: + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" ansi-escapes "^4.3.2" -"@inquirer/prompts@^5.3.8": - version "5.3.8" - resolved "https://registry.yarnpkg.com/@inquirer/prompts/-/prompts-5.3.8.tgz#f394050d95076c2f1b046be324f06f619b257c3e" - integrity sha512-b2BudQY/Si4Y2a0PdZZL6BeJtl8llgeZa7U2j47aaJSCeAl1e4UI7y8a9bSkO3o/ZbZrgT5muy/34JbsjfIWxA== +"@inquirer/password@^4.0.17": + version "4.0.17" + resolved "https://registry.yarnpkg.com/@inquirer/password/-/password-4.0.17.tgz#45480c8ace688ebf071e350536ea746792b3eeba" + integrity sha512-DJolTnNeZ00E1+1TW+8614F7rOJJCM4y4BAGQ3Gq6kQIG+OJ4zr3GLjIjVVJCbKsk2jmkmv6v2kQuN/vriHdZA== dependencies: - "@inquirer/checkbox" "^2.4.7" - "@inquirer/confirm" "^3.1.22" - "@inquirer/editor" "^2.1.22" - "@inquirer/expand" "^2.1.22" - "@inquirer/input" "^2.2.9" - "@inquirer/number" "^1.0.10" - "@inquirer/password" "^2.1.22" - "@inquirer/rawlist" "^2.2.4" - "@inquirer/search" "^1.0.7" - "@inquirer/select" "^2.4.7" - -"@inquirer/rawlist@^2.2.4": - version "2.2.4" - resolved "https://registry.yarnpkg.com/@inquirer/rawlist/-/rawlist-2.2.4.tgz#73d5d4fafa2ca012e6cfb9eb1d8ddf33bab2dde0" - integrity sha512-pb6w9pWrm7EfnYDgQObOurh2d2YH07+eDo3xQBsNAM2GRhliz6wFXGi1thKQ4bN6B0xDd6C3tBsjdr3obsCl3Q== - dependencies: - "@inquirer/core" "^9.0.10" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^10.1.15" + "@inquirer/type" "^3.0.8" + ansi-escapes "^4.3.2" + +"@inquirer/prompts@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@inquirer/prompts/-/prompts-5.5.0.tgz#5805aa15a13180017829aa31d071fd37a43b735d" + integrity sha512-BHDeL0catgHdcHbSFFUddNzvx/imzJMft+tWDPwTm3hfu8/tApk1HrooNngB2Mb4qY+KaRWF+iZqoVUPeslEog== + dependencies: + "@inquirer/checkbox" "^2.5.0" + "@inquirer/confirm" "^3.2.0" + "@inquirer/editor" "^2.2.0" + "@inquirer/expand" "^2.3.0" + "@inquirer/input" "^2.3.0" + "@inquirer/number" "^1.1.0" + "@inquirer/password" "^2.2.0" + "@inquirer/rawlist" "^2.3.0" + "@inquirer/search" "^1.1.0" + "@inquirer/select" "^2.5.0" + +"@inquirer/prompts@^7.8.1": + version "7.8.2" + resolved "https://registry.yarnpkg.com/@inquirer/prompts/-/prompts-7.8.2.tgz#5d9d8d7273831bd512e9cfaf3d827cce0f2eae0f" + integrity sha512-nqhDw2ZcAUrKNPwhjinJny903bRhI0rQhiDz1LksjeRxqa36i3l75+4iXbOy0rlDpLJGxqtgoPavQjmmyS5UJw== + dependencies: + "@inquirer/checkbox" "^4.2.1" + "@inquirer/confirm" "^5.1.14" + "@inquirer/editor" "^4.2.17" + "@inquirer/expand" "^4.0.17" + "@inquirer/input" "^4.2.1" + "@inquirer/number" "^3.0.17" + "@inquirer/password" "^4.0.17" + "@inquirer/rawlist" "^4.1.5" + "@inquirer/search" "^3.1.0" + "@inquirer/select" "^4.3.1" + +"@inquirer/rawlist@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@inquirer/rawlist/-/rawlist-2.3.0.tgz#6b2c0da39c1cd855af5608b2d627681cdac7277d" + integrity sha512-zzfNuINhFF7OLAtGHfhwOW2TlYJyli7lOUoJUXw/uyklcwalV6WRXBXtFIicN8rTRK1XTiPWB4UY+YuW8dsnLQ== + dependencies: + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" yoctocolors-cjs "^2.1.2" -"@inquirer/search@^1.0.7": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@inquirer/search/-/search-1.0.7.tgz#72ab9ccfb57f05dd81a8b2df26214588e048be18" - integrity sha512-p1wpV+3gd1eST/o5N3yQpYEdFNCzSP0Klrl+5bfD3cTTz8BGG6nf4Z07aBW0xjlKIj1Rp0y3x/X4cZYi6TfcLw== +"@inquirer/rawlist@^4.1.5": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@inquirer/rawlist/-/rawlist-4.1.5.tgz#e3664e3da3fba93f34ee25813faa7957aa717991" + integrity sha512-R5qMyGJqtDdi4Ht521iAkNqyB6p2UPuZUbMifakg1sWtu24gc2Z8CJuw8rP081OckNDMgtDCuLe42Q2Kr3BolA== + dependencies: + "@inquirer/core" "^10.1.15" + "@inquirer/type" "^3.0.8" + yoctocolors-cjs "^2.1.2" + +"@inquirer/search@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@inquirer/search/-/search-1.1.0.tgz#665928cac2326b9501ddafbb8606ce4823b3106b" + integrity sha512-h+/5LSj51dx7hp5xOn4QFnUaKeARwUCLs6mIhtkJ0JYPBLmEYjdHSYh7I6GrLg9LwpJ3xeX0FZgAG1q0QdCpVQ== dependencies: - "@inquirer/core" "^9.0.10" + "@inquirer/core" "^9.1.0" "@inquirer/figures" "^1.0.5" - "@inquirer/type" "^1.5.2" + "@inquirer/type" "^1.5.3" + yoctocolors-cjs "^2.1.2" + +"@inquirer/search@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@inquirer/search/-/search-3.1.0.tgz#22f1373938eef7b98c3c30f604aac8fbe9baf27a" + integrity sha512-PMk1+O/WBcYJDq2H7foV0aAZSmDdkzZB9Mw2v/DmONRJopwA/128cS9M/TXWLKKdEQKZnKwBzqu2G4x/2Nqx8Q== + dependencies: + "@inquirer/core" "^10.1.15" + "@inquirer/figures" "^1.0.13" + "@inquirer/type" "^3.0.8" yoctocolors-cjs "^2.1.2" -"@inquirer/select@^2.4.7": - version "2.4.7" - resolved "https://registry.yarnpkg.com/@inquirer/select/-/select-2.4.7.tgz#6a23742b4f76d228186dfd42571d973def378ffa" - integrity sha512-JH7XqPEkBpNWp3gPCqWqY8ECbyMoFcCZANlL6pV9hf59qK6dGmkOlx1ydyhY+KZ0c5X74+W6Mtp+nm2QX0/MAQ== +"@inquirer/select@^2.5.0": + version "2.5.0" + resolved "https://registry.yarnpkg.com/@inquirer/select/-/select-2.5.0.tgz#345c6908ecfaeef3d84ddd2f9feb2f487c558efb" + integrity sha512-YmDobTItPP3WcEI86GvPo+T2sRHkxxOq/kXmsBjHS5BVXUgvgZ5AfJjkvQvZr03T81NnI3KrrRuMzeuYUQRFOA== dependencies: - "@inquirer/core" "^9.0.10" + "@inquirer/core" "^9.1.0" "@inquirer/figures" "^1.0.5" - "@inquirer/type" "^1.5.2" + "@inquirer/type" "^1.5.3" ansi-escapes "^4.3.2" yoctocolors-cjs "^2.1.2" -"@inquirer/type@^1.5.2": - version "1.5.2" - resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-1.5.2.tgz#15f5e4a4dae02c4203650cb07c8a000cdd423939" - integrity sha512-w9qFkumYDCNyDZmNQjf/n6qQuvQ4dMC3BJesY4oF+yr0CxR5vxujflAVeIcS6U336uzi9GM0kAfZlLrZ9UTkpA== +"@inquirer/select@^4.3.1": + version "4.3.1" + resolved "https://registry.yarnpkg.com/@inquirer/select/-/select-4.3.1.tgz#b49e76dab47f7c729e4e1e520fedc268e5b88cdc" + integrity sha512-Gfl/5sqOF5vS/LIrSndFgOh7jgoe0UXEizDqahFRkq5aJBLegZ6WjuMh/hVEJwlFQjyLq1z9fRtvUMkb7jM1LA== + dependencies: + "@inquirer/core" "^10.1.15" + "@inquirer/figures" "^1.0.13" + "@inquirer/type" "^3.0.8" + ansi-escapes "^4.3.2" + yoctocolors-cjs "^2.1.2" + +"@inquirer/type@^1.5.3": + version "1.5.5" + resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-1.5.5.tgz#303ea04ce7ad2e585b921b662b3be36ef7b4f09b" + integrity sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA== + dependencies: + mute-stream "^1.0.0" + +"@inquirer/type@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-2.0.0.tgz#08fa513dca2cb6264fe1b0a2fabade051444e3f6" + integrity sha512-XvJRx+2KR3YXyYtPUUy+qd9i7p+GO9Ko6VIIpWlBrpWwXDv8WLFeHTxz35CfQFUiBMLXlGHhGzys7lqit9gWag== dependencies: mute-stream "^1.0.0" +"@inquirer/type@^3.0.8": + version "3.0.8" + resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-3.0.8.tgz#efc293ba0ed91e90e6267f1aacc1c70d20b8b4e8" + integrity sha512-lg9Whz8onIHRthWaN1Q9EGLa/0LFJjyM8mEUbL1eTi6yMGvBf8gvyDLtxSXztQsxMvhxxNpJYrwa1YHdq+w4Jw== + +"@isaacs/balanced-match@^4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz#3081dadbc3460661b751e7591d7faea5df39dd29" + integrity sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ== + +"@isaacs/brace-expansion@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz#4b3dabab7d8e75a429414a96bd67bf4c1d13e0f3" + integrity sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA== + dependencies: + "@isaacs/balanced-match" "^4.0.1" + "@isaacs/cliui@^8.0.2": version "8.0.2" resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" @@ -575,9 +1490,21 @@ wrap-ansi "^8.1.0" wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" +"@isaacs/fs-minipass@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz#2d59ae3ab4b38fb4270bfa23d30f8e2e86c7fe32" + integrity sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w== + dependencies: + minipass "^7.0.4" + +"@isaacs/string-locale-compare@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz#291c227e93fd407a96ecd59879a35809120e432b" + integrity sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ== + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" - resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== dependencies: camelcase "^5.3.1" @@ -586,49 +1513,115 @@ js-yaml "^3.13.1" resolve-from "^5.0.0" -"@istanbuljs/schema@^0.1.2": +"@istanbuljs/schema@^0.1.2", "@istanbuljs/schema@^0.1.3": version "0.1.3" - resolved "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== -"@jridgewell/gen-mapping@^0.3.2": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" - integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== +"@jridgewell/gen-mapping@^0.3.12", "@jridgewell/gen-mapping@^0.3.5": + version "0.3.12" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz#2234ce26c62889f03db3d7fea43c1932ab3e927b" + integrity sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg== dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/sourcemap-codec" "^1.5.0" + "@jridgewell/trace-mapping" "^0.3.24" -"@jridgewell/resolve-uri@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== +"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14", "@jridgewell/sourcemap-codec@^1.5.0": + version "1.5.4" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz#7358043433b2e5da569aa02cbc4c121da3af27d7" + integrity sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw== -"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": - version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" - integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" -"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.20" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f" - integrity sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q== +"@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.28": + version "0.3.29" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz#a58d31eaadaf92c6695680b2e1d464a9b8fbf7fc" + integrity sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ== dependencies: "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" -"@keyv/redis@^2.1.2": - version "2.1.2" - resolved "https://registry.npmjs.org/@keyv/redis/-/redis-2.1.2.tgz" - integrity sha512-D6vNOuyH/5cBNfHcyxzck1l7V+Qd4RAT7uz2SHYAjutbXQ03o3SSneRyvrp76H4/uvHyutPWTJ1Za3EpGSVe5g== +"@js-sdsl/ordered-map@^4.4.2": + version "4.4.2" + resolved "https://registry.yarnpkg.com/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz#9299f82874bab9e4c7f9c48d865becbfe8d6907c" + integrity sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw== + +"@jsdoc/salty@^0.2.1": + version "0.2.9" + resolved "https://registry.yarnpkg.com/@jsdoc/salty/-/salty-0.2.9.tgz#4d8c147f7ca011532681ce86352a77a0178f1dec" + integrity sha512-yYxMVH7Dqw6nO0d5NIV8OQWnitU8k6vXH8NtgqAfIa/IUqRMxRv/NUJJ08VEKbAakwxlgBl5PJdrU0dMPStsnw== + dependencies: + lodash "^4.17.21" + +"@jsforce/jsforce-node@^3.10.4": + version "3.10.7" + resolved "https://registry.yarnpkg.com/@jsforce/jsforce-node/-/jsforce-node-3.10.7.tgz#9a377e8da925d1eec713ca43ea0bc1ba6aafd7f6" + integrity sha512-+2E7c/rRqB2QRtjFiBOkJwyZWw1vViQ2eeHIfLnF8WT6t8J5h16KyJ4dMyiw4JJx/WUT7xtNFiZ0kz85LS//cw== + dependencies: + "@sindresorhus/is" "^4" + base64url "^3.0.1" + csv-parse "^5.5.2" + csv-stringify "^6.6.0" + faye "^1.4.0" + form-data "^4.0.4" + https-proxy-agent "^5.0.0" + multistream "^3.1.0" + node-fetch "^2.6.1" + xml2js "^0.6.2" + +"@jsonjoy.com/base64@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jsonjoy.com/base64/-/base64-1.1.2.tgz#cf8ea9dcb849b81c95f14fc0aaa151c6b54d2578" + integrity sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA== + +"@jsonjoy.com/buffers@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@jsonjoy.com/buffers/-/buffers-1.0.0.tgz#ade6895b7d3883d70f87b5743efaa12c71dfef7a" + integrity sha512-NDigYR3PHqCnQLXYyoLbnEdzMMvzeiCWo1KOut7Q0CoIqg9tUAPKJ1iq/2nFhc5kZtexzutNY0LFjdwWL3Dw3Q== + +"@jsonjoy.com/codegen@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@jsonjoy.com/codegen/-/codegen-1.0.0.tgz#5c23f796c47675f166d23b948cdb889184b93207" + integrity sha512-E8Oy+08cmCf0EK/NMxpaJZmOxPqM+6iSe2S4nlSBrPZOORoDJILxtbSUEDKQyTamm/BVAhIGllOBNU79/dwf0g== + +"@jsonjoy.com/json-pack@^1.0.3": + version "1.10.0" + resolved "https://registry.yarnpkg.com/@jsonjoy.com/json-pack/-/json-pack-1.10.0.tgz#d098b0164512889f18269e373407c4b5a46f3ee2" + integrity sha512-PMOU9Sh0baiLZEDewwR/YAHJBV2D8pPIzcFQSU7HQl/k/HNCDyVfO1OvkyDwBGp4dPtvZc7Hl9FFYWwTP1CbZw== + dependencies: + "@jsonjoy.com/base64" "^1.1.2" + "@jsonjoy.com/buffers" "^1.0.0" + "@jsonjoy.com/codegen" "^1.0.0" + "@jsonjoy.com/json-pointer" "^1.0.1" + "@jsonjoy.com/util" "^1.9.0" + hyperdyperid "^1.2.0" + thingies "^2.5.0" + +"@jsonjoy.com/json-pointer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@jsonjoy.com/json-pointer/-/json-pointer-1.0.1.tgz#3b710158e8a212708a2886ea5e38d92e2ea4f4a0" + integrity sha512-tJpwQfuBuxqZlyoJOSZcqf7OUmiYQ6MiPNmOv4KbZdXE/DdvBSSAwhos0zIlJU/AXxC8XpuO8p08bh2fIl+RKA== + dependencies: + "@jsonjoy.com/util" "^1.3.0" + +"@jsonjoy.com/util@^1.3.0", "@jsonjoy.com/util@^1.9.0": + version "1.9.0" + resolved "https://registry.yarnpkg.com/@jsonjoy.com/util/-/util-1.9.0.tgz#7ee95586aed0a766b746cd8d8363e336c3c47c46" + integrity sha512-pLuQo+VPRnN8hfPqUTLTHk126wuYdXVxE6aDmjSeV4NCAgyxWbiOIeNJVtID3h1Vzpoi9m4jXezf73I6LgabgQ== dependencies: - ioredis "~4.17.1" + "@jsonjoy.com/buffers" "^1.0.0" + "@jsonjoy.com/codegen" "^1.0.0" "@kwsites/file-exists@^1.1.1": version "1.1.1" @@ -642,347 +1635,582 @@ resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919" integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw== -"@mrmlnc/readdir-enhanced@^2.2.1": - version "2.2.1" - resolved "https://registry.npmjs.org/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz" - integrity sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g== +"@langchain/anthropic@^0.3.27": + version "0.3.27" + resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-0.3.27.tgz#12e51d1a7a84b1c8a84f024050dd93648b2dff27" + integrity sha512-d4YUwZRjUGAMHTrv7U1jKqsvDrmns9/Ua2I/8BUPbCEBTswowGZUb0Om1KcjXG7MqehrJ03Gd78plckQ8Q7qfw== dependencies: - call-me-maybe "^1.0.1" - glob-to-regexp "^0.3.0" + "@anthropic-ai/sdk" "^0.56.0" + fast-xml-parser "^4.4.1" -"@nodelib/fs.scandir@2.1.4": - version "2.1.4" - resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz" - integrity sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA== +"@langchain/community@^0.3.56": + version "0.3.56" + resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.3.56.tgz#ed01e18e89c5515ed241c381c47eb689b0a63395" + integrity sha512-lDjUnRfHAX7aMXyEB2EWbe5qOmdQdz8n+0CNQ4ExpLy3NOFQhEVkWclhsucaX04zh0r/VH5Pkk9djpnhPBDH7g== dependencies: - "@nodelib/fs.stat" "2.0.4" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.4", "@nodelib/fs.stat@^2.0.2": - version "2.0.4" - resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz" - integrity sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q== - -"@nodelib/fs.stat@^1.1.2": - version "1.1.3" - resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz" - integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw== + "@langchain/openai" ">=0.2.0 <0.7.0" + "@langchain/weaviate" "^0.2.0" + binary-extensions "^2.2.0" + expr-eval "^2.0.2" + flat "^5.0.2" + js-yaml "^4.1.0" + langchain ">=0.2.3 <0.3.0 || >=0.3.4 <0.4.0" + langsmith "^0.3.67" + uuid "^10.0.0" + zod "^3.25.32" + +"@langchain/core@0.3.57": + version "0.3.57" + resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.3.57.tgz#3e9d9414046873405f48c761fe632bd757250a91" + integrity sha512-jz28qCTKJmi47b6jqhQ6vYRTG5jRpqhtPQjriRTB5wR8mgvzo6xKs0fG/kExS3ZvM79ytD1npBvgf8i19xOo9Q== + dependencies: + "@cfworker/json-schema" "^4.0.2" + ansi-styles "^5.0.0" + camelcase "6" + decamelize "1.2.0" + js-tiktoken "^1.0.12" + langsmith "^0.3.29" + mustache "^4.2.0" + p-queue "^6.6.2" + p-retry "4" + uuid "^10.0.0" + zod "^3.22.4" + zod-to-json-schema "^3.22.3" + +"@langchain/core@^0.3.72": + version "0.3.72" + resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.3.72.tgz#725e2fc863c45672862c8486083e5703557ab422" + integrity sha512-WsGWVZYnlKffj2eEfDocPNiaTRoxyYiLSQdQ7oxZvxGZBqo/90vpjbC33UGK1uPNBM4kT+pkdaol/MnvKUh8TQ== + dependencies: + "@cfworker/json-schema" "^4.0.2" + ansi-styles "^5.0.0" + camelcase "6" + decamelize "1.2.0" + js-tiktoken "^1.0.12" + langsmith "^0.3.46" + mustache "^4.2.0" + p-queue "^6.6.2" + p-retry "4" + uuid "^10.0.0" + zod "^3.25.32" + zod-to-json-schema "^3.22.3" + +"@langchain/google-genai@^0.2.17": + version "0.2.17" + resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-0.2.17.tgz#91c388c845ea130da209bafcf2e0f5d870171f33" + integrity sha512-A21HhTJ5WQdh06ZMC8o/1HzkptHudzzRU8oExcWQ8aRa3Q9/4Es4bopEsEnu50rmDeARG3czMsUSUVS+BQYGEA== + dependencies: + "@google/generative-ai" "^0.24.0" + uuid "^11.1.0" + +"@langchain/ollama@^0.2.4": + version "0.2.4" + resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-0.2.4.tgz#91c2108015e018f1dcae1207c8bc44da0cf047fa" + integrity sha512-XThDrZurNPcUO6sasN13rkes1aGgu5gWAtDkkyIGT3ZeMOvrYgPKGft+bbhvsigTIH9C01TfPzrSp8LAmvHIjA== + dependencies: + ollama "^0.5.17" + uuid "^10.0.0" -"@nodelib/fs.walk@^1.2.3": - version "1.2.6" - resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz" - integrity sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow== +"@langchain/openai@>=0.1.0 <0.7.0", "@langchain/openai@>=0.2.0 <0.7.0": + version "0.6.2" + resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.6.2.tgz#fb68fa7305ef419e772a56c29146810c0923513d" + integrity sha512-OjdSfGENdz4tR9TPN4KTat7vJIE6cgt7vT0z69qU1J1aHCs9MyNwqdaQFF++LVlZAae9aTpyCyqeODyge42oKw== dependencies: - "@nodelib/fs.scandir" "2.1.4" - fastq "^1.6.0" + js-tiktoken "^1.0.12" + openai "^5.3.0" + zod "^3.25.32" -"@oclif/command@^1", "@oclif/command@^1.5.13", "@oclif/command@^1.5.20", "@oclif/command@^1.6.0", "@oclif/command@^1.8.0": - version "1.8.0" - resolved "https://registry.npmjs.org/@oclif/command/-/command-1.8.0.tgz" - integrity sha512-5vwpq6kbvwkQwKqAoOU3L72GZ3Ta8RRrewKj9OJRolx28KLJJ8Dg9Rf7obRwt5jQA9bkYd8gqzMTrI7H3xLfaw== +"@langchain/textsplitters@>=0.0.0 <0.2.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@langchain/textsplitters/-/textsplitters-0.1.0.tgz#f37620992192df09ecda3dfbd545b36a6bcbae46" + integrity sha512-djI4uw9rlkAb5iMhtLED+xJebDdAG935AdP4eRTB02R7OB/act55Bj9wsskhZsvuyQRpO4O1wQOp85s6T6GWmw== dependencies: - "@oclif/config" "^1.15.1" - "@oclif/errors" "^1.3.3" - "@oclif/parser" "^3.8.3" - "@oclif/plugin-help" "^3" - debug "^4.1.1" - semver "^7.3.2" + js-tiktoken "^1.0.12" -"@oclif/command@^1.8.1": - version "1.8.16" - resolved "https://registry.npmjs.org/@oclif/command/-/command-1.8.16.tgz" - integrity sha512-rmVKYEsKzurfRU0xJz+iHelbi1LGlihIWZ7Qvmb/CBz1EkhL7nOkW4SVXmG2dA5Ce0si2gr88i6q4eBOMRNJ1w== +"@langchain/weaviate@^0.2.0": + version "0.2.1" + resolved "https://registry.yarnpkg.com/@langchain/weaviate/-/weaviate-0.2.1.tgz#51ad20cf6d40e63d6149e5d01f91597cdff66744" + integrity sha512-rlfAKF+GB0A5MUrol34oDrBkl4q6AefARk9KDW+LfzhV/74pZZLZyIPYPxvE4XwI3gvpwp024DNsDxK/4UW0/g== dependencies: - "@oclif/config" "^1.18.2" - "@oclif/errors" "^1.3.5" - "@oclif/help" "^1.0.1" - "@oclif/parser" "^3.8.6" - debug "^4.1.1" - semver "^7.3.2" + uuid "^10.0.0" + weaviate-client "^3.5.2" -"@oclif/config@1.18.2": - version "1.18.2" - resolved "https://registry.npmjs.org/@oclif/config/-/config-1.18.2.tgz" - integrity sha512-cE3qfHWv8hGRCP31j7fIS7BfCflm/BNZ2HNqHexH+fDrdF2f1D5S8VmXWLC77ffv3oDvWyvE9AZeR0RfmHCCaA== +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: - "@oclif/errors" "^1.3.3" - "@oclif/parser" "^3.8.0" - debug "^4.1.1" - globby "^11.0.1" - is-wsl "^2.1.1" - tslib "^2.0.0" + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" -"@oclif/config@^1.15.1", "@oclif/config@^1.18.3": - version "1.18.3" - resolved "https://registry.npmjs.org/@oclif/config/-/config-1.18.3.tgz" - integrity sha512-sBpko86IrTscc39EvHUhL+c++81BVTsIZ3ETu/vG+cCdi0N6vb2DoahR67A9FI2CGnxRRHjnTfa3m6LulwNATA== +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: - "@oclif/errors" "^1.3.5" - "@oclif/parser" "^3.8.0" - debug "^4.1.1" - globby "^11.0.1" - is-wsl "^2.1.1" - tslib "^2.3.1" + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" -"@oclif/config@^1.17.0": - version "1.17.0" - resolved "https://registry.npmjs.org/@oclif/config/-/config-1.17.0.tgz" - integrity sha512-Lmfuf6ubjQ4ifC/9bz1fSCHc6F6E653oyaRXxg+lgT4+bYf9bk+nqrUpAbrXyABkCqgIBiFr3J4zR/kiFdE1PA== +"@npmcli/agent@^2.0.0": + version "2.2.2" + resolved "https://registry.yarnpkg.com/@npmcli/agent/-/agent-2.2.2.tgz#967604918e62f620a648c7975461c9c9e74fc5d5" + integrity sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og== dependencies: - "@oclif/errors" "^1.3.3" - "@oclif/parser" "^3.8.0" - debug "^4.1.1" - globby "^11.0.1" - is-wsl "^2.1.1" - tslib "^2.0.0" + agent-base "^7.1.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.1" + lru-cache "^10.0.1" + socks-proxy-agent "^8.0.3" -"@oclif/config@^1.18.2": - version "1.18.6" - resolved "https://registry.yarnpkg.com/@oclif/config/-/config-1.18.6.tgz#37367026b3110a2f04875509b1920a8ee4489f21" - integrity sha512-OWhCpdu4QqggOPX1YPZ4XVmLLRX+lhGjXV6RNA7sogOwLqlEmSslnN/lhR5dkhcWZbKWBQH29YCrB3LDPRu/IA== +"@npmcli/agent@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/agent/-/agent-3.0.0.tgz#1685b1fbd4a1b7bb4f930cbb68ce801edfe7aa44" + integrity sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q== dependencies: - "@oclif/errors" "^1.3.6" - "@oclif/parser" "^3.8.9" - debug "^4.3.4" - globby "^11.1.0" - is-wsl "^2.1.1" - tslib "^2.3.1" - -"@oclif/dev-cli@^1.26.0": - version "1.26.0" - resolved "https://registry.npmjs.org/@oclif/dev-cli/-/dev-cli-1.26.0.tgz" - integrity sha512-272udZP+bG4qahoAcpWcMTJKiA+V42kRMqQM7n4tgW35brYb2UP5kK+p08PpF8sgSfRTV8MoJVJG9ax5kY82PA== - dependencies: - "@oclif/command" "^1.8.0" - "@oclif/config" "^1.17.0" - "@oclif/errors" "^1.3.3" - "@oclif/plugin-help" "^3.2.0" - cli-ux "^5.2.1" - debug "^4.1.1" - find-yarn-workspace-root "^2.0.0" - fs-extra "^8.1" - github-slugger "^1.2.1" - lodash "^4.17.11" - normalize-package-data "^3.0.0" - qqjs "^0.3.10" - tslib "^2.0.3" + agent-base "^7.1.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.1" + lru-cache "^10.0.1" + socks-proxy-agent "^8.0.3" -"@oclif/errors@1.3.5", "@oclif/errors@^1.3.5": - version "1.3.5" - resolved "https://registry.npmjs.org/@oclif/errors/-/errors-1.3.5.tgz" - integrity sha512-OivucXPH/eLLlOT7FkCMoZXiaVYf8I/w1eTAM1+gKzfhALwWTusxEx7wBmW0uzvkSg/9ovWLycPaBgJbM3LOCQ== +"@npmcli/arborist@^7.2.0": + version "7.5.4" + resolved "https://registry.yarnpkg.com/@npmcli/arborist/-/arborist-7.5.4.tgz#3dd9e531d6464ef6715e964c188e0880c471ac9b" + integrity sha512-nWtIc6QwwoUORCRNzKx4ypHqCk3drI+5aeYdMTQQiRCcn4lOOgfQh7WyZobGYTxXPSq1VwV53lkpN/BRlRk08g== + dependencies: + "@isaacs/string-locale-compare" "^1.1.0" + "@npmcli/fs" "^3.1.1" + "@npmcli/installed-package-contents" "^2.1.0" + "@npmcli/map-workspaces" "^3.0.2" + "@npmcli/metavuln-calculator" "^7.1.1" + "@npmcli/name-from-folder" "^2.0.0" + "@npmcli/node-gyp" "^3.0.0" + "@npmcli/package-json" "^5.1.0" + "@npmcli/query" "^3.1.0" + "@npmcli/redact" "^2.0.0" + "@npmcli/run-script" "^8.1.0" + bin-links "^4.0.4" + cacache "^18.0.3" + common-ancestor-path "^1.0.1" + hosted-git-info "^7.0.2" + json-parse-even-better-errors "^3.0.2" + json-stringify-nice "^1.1.4" + lru-cache "^10.2.2" + minimatch "^9.0.4" + nopt "^7.2.1" + npm-install-checks "^6.2.0" + npm-package-arg "^11.0.2" + npm-pick-manifest "^9.0.1" + npm-registry-fetch "^17.0.1" + pacote "^18.0.6" + parse-conflict-json "^3.0.0" + proc-log "^4.2.0" + proggy "^2.0.0" + promise-all-reject-late "^1.0.0" + promise-call-limit "^3.0.1" + read-package-json-fast "^3.0.2" + semver "^7.3.7" + ssri "^10.0.6" + treeverse "^3.0.0" + walk-up-path "^3.0.1" + +"@npmcli/fs@^3.1.0", "@npmcli/fs@^3.1.1": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-3.1.1.tgz#59cdaa5adca95d135fc00f2bb53f5771575ce726" + integrity sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg== dependencies: - clean-stack "^3.0.0" - fs-extra "^8.1" - indent-string "^4.0.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" + semver "^7.3.5" -"@oclif/errors@^1", "@oclif/errors@^1.2.1", "@oclif/errors@^1.2.2", "@oclif/errors@^1.3.3": - version "1.3.4" - resolved "https://registry.npmjs.org/@oclif/errors/-/errors-1.3.4.tgz" - integrity sha512-pJKXyEqwdfRTUdM8n5FIHiQQHg5ETM0Wlso8bF9GodczO40mF5Z3HufnYWJE7z8sGKxOeJCdbAVZbS8Y+d5GCw== +"@npmcli/fs@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-4.0.0.tgz#a1eb1aeddefd2a4a347eca0fab30bc62c0e1c0f2" + integrity sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q== dependencies: - clean-stack "^3.0.0" - fs-extra "^8.1" - indent-string "^4.0.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" + semver "^7.3.5" -"@oclif/errors@^1.3.6": - version "1.3.6" - resolved "https://registry.yarnpkg.com/@oclif/errors/-/errors-1.3.6.tgz#e8fe1fc12346cb77c4f274e26891964f5175f75d" - integrity sha512-fYaU4aDceETd89KXP+3cLyg9EHZsLD3RxF2IU9yxahhBpspWjkWi3Dy3bTgcwZ3V47BgxQaGapzJWDM33XIVDQ== - dependencies: - clean-stack "^3.0.0" - fs-extra "^8.1" - indent-string "^4.0.0" - strip-ansi "^6.0.1" - wrap-ansi "^7.0.0" +"@npmcli/git@^5.0.0": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@npmcli/git/-/git-5.0.8.tgz#8ba3ff8724192d9ccb2735a2aa5380a992c5d3d1" + integrity sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ== + dependencies: + "@npmcli/promise-spawn" "^7.0.0" + ini "^4.1.3" + lru-cache "^10.0.1" + npm-pick-manifest "^9.0.0" + proc-log "^4.0.0" + promise-inflight "^1.0.1" + promise-retry "^2.0.1" + semver "^7.3.5" + which "^4.0.0" -"@oclif/help@^1.0.1": - version "1.0.1" - resolved "https://registry.npmjs.org/@oclif/help/-/help-1.0.1.tgz" - integrity sha512-8rsl4RHL5+vBUAKBL6PFI3mj58hjPCp2VYyXD4TAa7IMStikFfOH2gtWmqLzIlxAED2EpD0dfYwo9JJxYsH7Aw== +"@npmcli/installed-package-contents@^2.0.1", "@npmcli/installed-package-contents@^2.1.0": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz#63048e5f6e40947a3a88dcbcb4fd9b76fdd37c17" + integrity sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w== dependencies: - "@oclif/config" "1.18.2" - "@oclif/errors" "1.3.5" - chalk "^4.1.2" - indent-string "^4.0.0" - lodash "^4.17.21" - string-width "^4.2.0" - strip-ansi "^6.0.0" - widest-line "^3.1.0" - wrap-ansi "^6.2.0" + npm-bundled "^3.0.0" + npm-normalize-package-bin "^3.0.0" -"@oclif/linewrap@^1.0.0": - version "1.0.0" - resolved "https://registry.npmjs.org/@oclif/linewrap/-/linewrap-1.0.0.tgz" - integrity sha512-Ups2dShK52xXa8w6iBWLgcjPJWjais6KPJQq3gQ/88AY6BXoTX+MIGFPrWQO1KLMiQfoTpcLnUwloN4brrVUHw== +"@npmcli/map-workspaces@^3.0.2": + version "3.0.6" + resolved "https://registry.yarnpkg.com/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz#27dc06c20c35ef01e45a08909cab9cb3da08cea6" + integrity sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA== + dependencies: + "@npmcli/name-from-folder" "^2.0.0" + glob "^10.2.2" + minimatch "^9.0.0" + read-package-json-fast "^3.0.0" -"@oclif/parser@3.8.6", "@oclif/parser@^3.8.6": - version "3.8.6" - resolved "https://registry.npmjs.org/@oclif/parser/-/parser-3.8.6.tgz" - integrity sha512-tXb0NKgSgNxmf6baN6naK+CCwOueaFk93FG9u202U7mTBHUKsioOUlw1SG/iPi9aJM3WE4pHLXmty59pci0OEw== +"@npmcli/metavuln-calculator@^7.1.1": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@npmcli/metavuln-calculator/-/metavuln-calculator-7.1.1.tgz#4d3b6c3192f72bc8ad59476de0da939c33877fcf" + integrity sha512-Nkxf96V0lAx3HCpVda7Vw4P23RILgdi/5K1fmj2tZkWIYLpXAN8k2UVVOsW16TsS5F8Ws2I7Cm+PU1/rsVF47g== dependencies: - "@oclif/errors" "^1.2.2" - "@oclif/linewrap" "^1.0.0" - chalk "^4.1.0" - tslib "^2.0.0" + cacache "^18.0.0" + json-parse-even-better-errors "^3.0.0" + pacote "^18.0.0" + proc-log "^4.1.0" + semver "^7.3.5" + +"@npmcli/name-from-folder@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/name-from-folder/-/name-from-folder-2.0.0.tgz#c44d3a7c6d5c184bb6036f4d5995eee298945815" + integrity sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg== -"@oclif/parser@^3.8.0", "@oclif/parser@^3.8.3": - version "3.8.5" - resolved "https://registry.npmjs.org/@oclif/parser/-/parser-3.8.5.tgz" - integrity sha512-yojzeEfmSxjjkAvMRj0KzspXlMjCfBzNRPkWw8ZwOSoNWoJn+OCS/m/S+yfV6BvAM4u2lTzX9Y5rCbrFIgkJLg== +"@npmcli/node-gyp@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz#101b2d0490ef1aa20ed460e4c0813f0db560545a" + integrity sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA== + +"@npmcli/package-json@^5.0.0", "@npmcli/package-json@^5.1.0": + version "5.2.1" + resolved "https://registry.yarnpkg.com/@npmcli/package-json/-/package-json-5.2.1.tgz#df69477b1023b81ff8503f2b9db4db4faea567ed" + integrity sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ== + dependencies: + "@npmcli/git" "^5.0.0" + glob "^10.2.2" + hosted-git-info "^7.0.0" + json-parse-even-better-errors "^3.0.0" + normalize-package-data "^6.0.0" + proc-log "^4.0.0" + semver "^7.5.3" + +"@npmcli/promise-spawn@^7.0.0": + version "7.0.2" + resolved "https://registry.yarnpkg.com/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz#1d53d34ffeb5d151bfa8ec661bcccda8bbdfd532" + integrity sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ== dependencies: - "@oclif/errors" "^1.2.2" - "@oclif/linewrap" "^1.0.0" - chalk "^2.4.2" - tslib "^1.9.3" + which "^4.0.0" -"@oclif/parser@^3.8.9": - version "3.8.9" - resolved "https://registry.yarnpkg.com/@oclif/parser/-/parser-3.8.9.tgz#9399041ada7e465043f34b24f4d82a8beb68a023" - integrity sha512-1j/kThdse7yHQz6+c3v8RA1I3gD6+SGt2O7IAb/MAMoxqyBrFQDabQHH2UU4eVFGMLN7U91AiYJp11zJ9LcQAg== +"@npmcli/query@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@npmcli/query/-/query-3.1.0.tgz#bc202c59e122a06cf8acab91c795edda2cdad42c" + integrity sha512-C/iR0tk7KSKGldibYIB9x8GtO/0Bd0I2mhOaDb8ucQL/bQVTmGoeREaFj64Z5+iCBRf3dQfed0CjJL7I8iTkiQ== dependencies: - "@oclif/errors" "^1.3.6" - "@oclif/linewrap" "^1.0.0" - chalk "^4.1.0" - tslib "^2.4.1" + postcss-selector-parser "^6.0.10" -"@oclif/plugin-help@^2.2.0": - version "2.2.3" - resolved "https://registry.npmjs.org/@oclif/plugin-help/-/plugin-help-2.2.3.tgz" - integrity sha512-bGHUdo5e7DjPJ0vTeRBMIrfqTRDBfyR5w0MP41u0n3r7YG5p14lvMmiCXxi6WDaP2Hw5nqx3PnkAIntCKZZN7g== +"@npmcli/redact@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@npmcli/redact/-/redact-2.0.1.tgz#95432fd566e63b35c04494621767a4312c316762" + integrity sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw== + +"@npmcli/run-script@^8.0.0", "@npmcli/run-script@^8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@npmcli/run-script/-/run-script-8.1.0.tgz#a563e5e29b1ca4e648a6b1bbbfe7220b4bfe39fc" + integrity sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg== dependencies: - "@oclif/command" "^1.5.13" - chalk "^2.4.1" - indent-string "^4.0.0" - lodash.template "^4.4.0" - string-width "^3.0.0" - strip-ansi "^5.0.0" - widest-line "^2.0.1" - wrap-ansi "^4.0.0" + "@npmcli/node-gyp" "^3.0.0" + "@npmcli/package-json" "^5.0.0" + "@npmcli/promise-spawn" "^7.0.0" + node-gyp "^10.0.0" + proc-log "^4.0.0" + which "^4.0.0" -"@oclif/plugin-help@^3", "@oclif/plugin-help@^3.2.0", "@oclif/plugin-help@^3.2.2": - version "3.2.2" - resolved "https://registry.npmjs.org/@oclif/plugin-help/-/plugin-help-3.2.2.tgz" - integrity sha512-SPZ8U8PBYK0n4srFjCLedk0jWU4QlxgEYLCXIBShJgOwPhTTQknkUlsEwaMIevvCU4iCQZhfMX+D8Pz5GZjFgA== +"@oclif/core@^4", "@oclif/core@^4.0.27", "@oclif/core@^4.5.2", "@oclif/core@^4.5.3": + version "4.5.3" + resolved "https://registry.yarnpkg.com/@oclif/core/-/core-4.5.3.tgz#2b913879e1673d95e63254573b67326b62f5f309" + integrity sha512-ISoFlfmsuxJvNKXhabCO4/KqNXDQdLHchZdTPfZbtqAsQbqTw5IKitLVZq9Sz1LWizN37HILp4u0350B8scBjg== dependencies: - "@oclif/command" "^1.5.20" - "@oclif/config" "^1.15.1" - "@oclif/errors" "^1.2.2" - chalk "^4.1.0" + ansi-escapes "^4.3.2" + ansis "^3.17.0" + clean-stack "^3.0.1" + cli-spinners "^2.9.2" + debug "^4.4.0" + ejs "^3.1.10" + get-package-type "^0.1.0" indent-string "^4.0.0" - lodash.template "^4.4.0" - string-width "^4.2.0" - strip-ansi "^6.0.0" + is-wsl "^2.2.0" + lilconfig "^3.1.3" + minimatch "^9.0.5" + semver "^7.6.3" + string-width "^4.2.3" + supports-color "^8" + tinyglobby "^0.2.14" widest-line "^3.1.0" - wrap-ansi "^4.0.0" - -"@oclif/screen@^1.0.3": - version "1.0.4" - resolved "https://registry.npmjs.org/@oclif/screen/-/screen-1.0.4.tgz" - integrity sha512-60CHpq+eqnTxLZQ4PGHYNwUX572hgpMHGPtTWMjdTMsAvlm69lZV/4ly6O3sAYkomo4NggGcomrDpBe34rxUqw== + wordwrap "^1.0.0" + wrap-ansi "^7.0.0" -"@oclif/test@^1", "@oclif/test@^1.2.4": - version "1.2.8" - resolved "https://registry.npmjs.org/@oclif/test/-/test-1.2.8.tgz" - integrity sha512-HCh0qPge1JCqTEw4s2ScnicEZd4Ro4/0VvdjpsfCiX6fuDV53fRZ2uqLTgxKGHrVoqOZnVrRZHyhFyEsFGs+zQ== +"@oclif/plugin-command-snapshot@^5.3.6": + version "5.3.6" + resolved "https://registry.yarnpkg.com/@oclif/plugin-command-snapshot/-/plugin-command-snapshot-5.3.6.tgz#fa9786279b532d8a0c6add51717dd8ea9fc520c9" + integrity sha512-0uu1KoB5IvS79l7Ao92vUmVHh9eWqP5uWv4oD7aeNFmUnCQrTB8nhdclP2E6MqMoatB6C0Xv+TXWC/ISLqBu3A== dependencies: - fancy-test "^1.4.3" + "@oclif/core" "^4" + ansis "^3.17.0" + globby "^14.1.0" + just-diff "^5.2.0" + lodash.difference "^4.5.0" + lodash.get "^4.4.2" + lodash.sortby "^4.7.0" + semver "^7.7.2" + ts-json-schema-generator "^1.5.1" + +"@oclif/plugin-help@^6.2.29": + version "6.2.31" + resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-6.2.31.tgz#0fb80dd1bd8c63e4349a6c0d53046a4d44cec9c1" + integrity sha512-o4xR98DEFf+VqY+M9B3ZooTm2T/mlGvyBHwHcnsPJCEnvzHqEA9xUlCUK4jm7FBXHhkppziMgCC2snsueLoIpQ== + dependencies: + "@oclif/core" "^4" + +"@oclif/plugin-not-found@^3.2.63": + version "3.2.64" + resolved "https://registry.yarnpkg.com/@oclif/plugin-not-found/-/plugin-not-found-3.2.64.tgz#03af9241f2701e53ea182a0262ef47e70125b3ab" + integrity sha512-WDCPkFw5Qi9ALVODnGWdFDcm49iBOg7G2/u1C/o/KB4eSxlQn0JEDhLaMGcLmwOYKQnQdI9x35K77vhR1JrwDg== + dependencies: + "@inquirer/prompts" "^7.8.1" + "@oclif/core" "^4.5.2" + ansis "^3.17.0" + fast-levenshtein "^3.0.0" + +"@oclif/plugin-warn-if-update-available@^3.1.46": + version "3.1.46" + resolved "https://registry.yarnpkg.com/@oclif/plugin-warn-if-update-available/-/plugin-warn-if-update-available-3.1.46.tgz#21593b68f3876021c26841993c17ac82aa18e179" + integrity sha512-YDlr//SHmC80eZrt+0wNFWSo1cOSU60RoWdhSkAoPB3pUGPSNHZDquXDpo7KniinzYPsj1rfetCYk7UVXwYu7A== + dependencies: + "@oclif/core" "^4" + ansis "^3.17.0" + debug "^4.4.1" + http-call "^5.2.2" + lodash "^4.17.21" + registry-auth-token "^5.1.0" -"@octokit/auth-token@^2.4.4": - version "2.5.0" - resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.5.0.tgz#27c37ea26c205f28443402477ffd261311f21e36" - integrity sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g== - dependencies: - "@octokit/types" "^6.0.3" +"@octokit/auth-token@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7" + integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA== -"@octokit/core@^3.6.0": - version "3.6.0" - resolved "https://registry.yarnpkg.com/@octokit/core/-/core-3.6.0.tgz#3376cb9f3008d9b3d110370d90e0a1fcd5fe6085" - integrity sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q== - dependencies: - "@octokit/auth-token" "^2.4.4" - "@octokit/graphql" "^4.5.8" - "@octokit/request" "^5.6.3" - "@octokit/request-error" "^2.0.5" - "@octokit/types" "^6.0.3" +"@octokit/auth-token@^5.0.0": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-5.1.2.tgz#68a486714d7a7fd1df56cb9bc89a860a0de866de" + integrity sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw== + +"@octokit/core@^5.0.1": + version "5.2.2" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.2.tgz#252805732de9b4e8e4f658d34b80c4c9b2534761" + integrity sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg== + dependencies: + "@octokit/auth-token" "^4.0.0" + "@octokit/graphql" "^7.1.0" + "@octokit/request" "^8.4.1" + "@octokit/request-error" "^5.1.1" + "@octokit/types" "^13.0.0" before-after-hook "^2.2.0" universal-user-agent "^6.0.0" -"@octokit/endpoint@^6.0.1": - version "6.0.12" - resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.12.tgz#3b4d47a4b0e79b1027fb8d75d4221928b2d05658" - integrity sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA== - dependencies: - "@octokit/types" "^6.0.3" - is-plain-object "^5.0.0" +"@octokit/core@^6.1.4": + version "6.1.6" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-6.1.6.tgz#302b3e7188c81e43352c6df4dfabbf897ff192c1" + integrity sha512-kIU8SLQkYWGp3pVKiYzA5OSaNF5EE03P/R8zEmmrG6XwOg5oBjXyQVVIauQ0dgau4zYhpZEhJrvIYt6oM+zZZA== + dependencies: + "@octokit/auth-token" "^5.0.0" + "@octokit/graphql" "^8.2.2" + "@octokit/request" "^9.2.3" + "@octokit/request-error" "^6.1.8" + "@octokit/types" "^14.0.0" + before-after-hook "^3.0.2" + universal-user-agent "^7.0.0" + +"@octokit/endpoint@^10.1.4": + version "10.1.4" + resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-10.1.4.tgz#8783be38a32b95af8bcb6523af20ab4eed7a2adb" + integrity sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA== + dependencies: + "@octokit/types" "^14.0.0" + universal-user-agent "^7.0.2" + +"@octokit/endpoint@^9.0.6": + version "9.0.6" + resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-9.0.6.tgz#114d912108fe692d8b139cfe7fc0846dfd11b6c0" + integrity sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw== + dependencies: + "@octokit/types" "^13.1.0" universal-user-agent "^6.0.0" -"@octokit/graphql@^4.5.8": - version "4.8.0" - resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.8.0.tgz#664d9b11c0e12112cbf78e10f49a05959aa22cc3" - integrity sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg== +"@octokit/graphql@^7.1.0": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-7.1.1.tgz#79d9f3d0c96a8fd13d64186fe5c33606d48b79cc" + integrity sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g== dependencies: - "@octokit/request" "^5.6.0" - "@octokit/types" "^6.0.3" + "@octokit/request" "^8.4.1" + "@octokit/types" "^13.0.0" universal-user-agent "^6.0.0" -"@octokit/openapi-types@^12.11.0": - version "12.11.0" - resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-12.11.0.tgz#da5638d64f2b919bca89ce6602d059f1b52d3ef0" - integrity sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ== +"@octokit/graphql@^8.2.2": + version "8.2.2" + resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-8.2.2.tgz#3db48c4ffdf07f99600cee513baf45e73eced4d1" + integrity sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA== + dependencies: + "@octokit/request" "^9.2.3" + "@octokit/types" "^14.0.0" + universal-user-agent "^7.0.0" + +"@octokit/openapi-types@^20.0.0": + version "20.0.0" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-20.0.0.tgz#9ec2daa0090eeb865ee147636e0c00f73790c6e5" + integrity sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA== -"@octokit/plugin-paginate-rest@^2.17.0": - version "2.21.3" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz#7f12532797775640dbb8224da577da7dc210c87e" - integrity sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw== +"@octokit/openapi-types@^24.2.0": + version "24.2.0" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-24.2.0.tgz#3d55c32eac0d38da1a7083a9c3b0cca77924f7d3" + integrity sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg== + +"@octokit/openapi-types@^25.1.0": + version "25.1.0" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-25.1.0.tgz#5a72a9dfaaba72b5b7db375fd05e90ca90dc9682" + integrity sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA== + +"@octokit/plugin-paginate-rest@^11.4.2": + version "11.6.0" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.6.0.tgz#e5e9ff3530e867c3837fdbff94ce15a2468a1f37" + integrity sha512-n5KPteiF7pWKgBIBJSk8qzoZWcUkza2O6A0za97pMGVrGfPdltxrfmfF5GucHYvHGZD8BdaZmmHGz5cX/3gdpw== dependencies: - "@octokit/types" "^6.40.0" + "@octokit/types" "^13.10.0" -"@octokit/plugin-rest-endpoint-methods@^5.13.0": - version "5.16.2" - resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz#7ee8bf586df97dd6868cf68f641354e908c25342" - integrity sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw== +"@octokit/plugin-paginate-rest@^9.2.2": + version "9.2.2" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.2.tgz#c516bc498736bcdaa9095b9a1d10d9d0501ae831" + integrity sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ== dependencies: - "@octokit/types" "^6.39.0" - deprecation "^2.3.1" + "@octokit/types" "^12.6.0" -"@octokit/request-error@^2.0.5", "@octokit/request-error@^2.1.0": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.1.0.tgz#9e150357831bfc788d13a4fd4b1913d60c74d677" - integrity sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg== +"@octokit/plugin-request-log@^5.3.1": + version "5.3.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-5.3.1.tgz#ccb75d9705de769b2aa82bcd105cc96eb0c00f69" + integrity sha512-n/lNeCtq+9ofhC15xzmJCNKP2BWTv8Ih2TTy+jatNCCq/gQP/V7rK3fjIfuz0pDWDALO/o/4QY4hyOF6TQQFUw== + +"@octokit/plugin-rest-endpoint-methods@^10.4.0": + version "10.4.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz#41ba478a558b9f554793075b2e20cd2ef973be17" + integrity sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg== + dependencies: + "@octokit/types" "^12.6.0" + +"@octokit/plugin-rest-endpoint-methods@^13.3.0": + version "13.5.0" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.5.0.tgz#d8c8ca2123b305596c959a9134dfa8b0495b0ba6" + integrity sha512-9Pas60Iv9ejO3WlAX3maE1+38c5nqbJXV5GrncEfkndIpZrJ/WPMRd2xYDcPPEt5yzpxcjw9fWNoPhsSGzqKqw== + dependencies: + "@octokit/types" "^13.10.0" + +"@octokit/request-error@^5.1.1": + version "5.1.1" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.1.tgz#b9218f9c1166e68bb4d0c89b638edc62c9334805" + integrity sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g== dependencies: - "@octokit/types" "^6.0.3" + "@octokit/types" "^13.1.0" deprecation "^2.0.0" once "^1.4.0" -"@octokit/request@^5.6.0", "@octokit/request@^5.6.3": - version "5.6.3" - resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.6.3.tgz#19a022515a5bba965ac06c9d1334514eb50c48b0" - integrity sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A== +"@octokit/request-error@^6.1.8": + version "6.1.8" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-6.1.8.tgz#3c7ce1ca6721eabd43dbddc76b44860de1fdea75" + integrity sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ== dependencies: - "@octokit/endpoint" "^6.0.1" - "@octokit/request-error" "^2.1.0" - "@octokit/types" "^6.16.1" - is-plain-object "^5.0.0" - node-fetch "^2.6.7" + "@octokit/types" "^14.0.0" + +"@octokit/request@^8.4.1": + version "8.4.1" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-8.4.1.tgz#715a015ccf993087977ea4365c44791fc4572486" + integrity sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw== + dependencies: + "@octokit/endpoint" "^9.0.6" + "@octokit/request-error" "^5.1.1" + "@octokit/types" "^13.1.0" universal-user-agent "^6.0.0" -"@octokit/types@^6.0.3", "@octokit/types@^6.16.1", "@octokit/types@^6.39.0", "@octokit/types@^6.40.0": - version "6.41.0" - resolved "https://registry.yarnpkg.com/@octokit/types/-/types-6.41.0.tgz#e58ef78d78596d2fb7df9c6259802464b5f84a04" - integrity sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg== +"@octokit/request@^9.2.3": + version "9.2.4" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-9.2.4.tgz#037400946a30f971917f47175053c1075fac713b" + integrity sha512-q8ybdytBmxa6KogWlNa818r0k1wlqzNC+yNkcQDECHvQo8Vmstrg18JwqJHdJdUiHD2sjlwBgSm9kHkOKe2iyA== + dependencies: + "@octokit/endpoint" "^10.1.4" + "@octokit/request-error" "^6.1.8" + "@octokit/types" "^14.0.0" + fast-content-type-parse "^2.0.0" + universal-user-agent "^7.0.2" + +"@octokit/rest@^21.1.1": + version "21.1.1" + resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-21.1.1.tgz#7a70455ca451b1d253e5b706f35178ceefb74de2" + integrity sha512-sTQV7va0IUVZcntzy1q3QqPm/r8rWtDCqpRAmb8eXXnKkjoQEtFe3Nt5GTVsHft+R6jJoHeSiVLcgcvhtue/rg== + dependencies: + "@octokit/core" "^6.1.4" + "@octokit/plugin-paginate-rest" "^11.4.2" + "@octokit/plugin-request-log" "^5.3.1" + "@octokit/plugin-rest-endpoint-methods" "^13.3.0" + +"@octokit/types@^12.6.0": + version "12.6.0" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-12.6.0.tgz#8100fb9eeedfe083aae66473bd97b15b62aedcb2" + integrity sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw== + dependencies: + "@octokit/openapi-types" "^20.0.0" + +"@octokit/types@^13.0.0", "@octokit/types@^13.1.0", "@octokit/types@^13.10.0": + version "13.10.0" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.10.0.tgz#3e7c6b19c0236c270656e4ea666148c2b51fd1a3" + integrity sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA== + dependencies: + "@octokit/openapi-types" "^24.2.0" + +"@octokit/types@^14.0.0": + version "14.1.0" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-14.1.0.tgz#3bf9b3a3e3b5270964a57cc9d98592ed44f840f2" + integrity sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g== dependencies: - "@octokit/openapi-types" "^12.11.0" + "@octokit/openapi-types" "^25.1.0" "@pkgjs/parseargs@^0.11.0": version "0.11.0" resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== +"@pnpm/config.env-replace@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz#ab29da53df41e8948a00f2433f085f54de8b3a4c" + integrity sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w== + +"@pnpm/network.ca-file@^1.0.1": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz#2ab05e09c1af0cdf2fcf5035bea1484e222f7983" + integrity sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA== + dependencies: + graceful-fs "4.2.10" + +"@pnpm/npm-conf@^2.1.0": + version "2.3.1" + resolved "https://registry.yarnpkg.com/@pnpm/npm-conf/-/npm-conf-2.3.1.tgz#bb375a571a0bd63ab0a23bece33033c683e9b6b0" + integrity sha512-c83qWb22rNRuB0UaVCI0uRPNRr8Z0FWnEIvT47jiHAmOIUHbBOg5XvV7pM5x+rKn9HRpjxquDbXYSXr3fAKFcw== + dependencies: + "@pnpm/config.env-replace" "^1.1.0" + "@pnpm/network.ca-file" "^1.0.1" + config-chain "^1.1.11" + "@postman/form-data@~3.1.1": version "3.1.1" resolved "https://registry.yarnpkg.com/@postman/form-data/-/form-data-3.1.1.tgz#d0446d0d3639a291f5e800e89fa1d0d3723f9414" @@ -1002,192 +2230,954 @@ universalify "^0.2.0" url-parse "^1.5.3" -"@postman/tunnel-agent@^0.6.3": - version "0.6.3" - resolved "https://registry.yarnpkg.com/@postman/tunnel-agent/-/tunnel-agent-0.6.3.tgz#23048d8d8618d453c571f03189e944afdc2292b7" - integrity sha512-k57fzmAZ2PJGxfOA4SGR05ejorHbVAa/84Hxh/2nAztjNXc4ZjOm9NUIk6/Z6LCrBvJZqjRZbN8e/nROVUPVdg== +"@postman/tunnel-agent@^0.6.4": + version "0.6.4" + resolved "https://registry.yarnpkg.com/@postman/tunnel-agent/-/tunnel-agent-0.6.4.tgz#30ceaadf0bd21cc0677ad5d1abd238b0fe27ca38" + integrity sha512-CJJlq8V7rNKhAw4sBfjixKpJW00SHqebqNUQKxMoepgeWZIbdPcD+rguRcivGhS4N12PymDcKgUgSD4rVC+RjQ== dependencies: safe-buffer "^5.0.1" -"@salesforce/bunyan@^2.0.0": - version "2.0.0" - resolved "https://registry.npmjs.org/@salesforce/bunyan/-/bunyan-2.0.0.tgz" - integrity sha512-5hq+HWQSeymuygl3i9ehlQo3XWrlBE+A+QzmpDaoK37op4u9M+SBUbXfOW0IABOQCg+JmfQPocSMV74hRoqU9w== +"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" + integrity sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ== + +"@protobufjs/base64@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735" + integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== + +"@protobufjs/codegen@^2.0.4": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb" + integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== + +"@protobufjs/eventemitter@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70" + integrity sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q== + +"@protobufjs/fetch@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45" + integrity sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ== dependencies: - dayjs "^1.8.16" - dayjs-plugin-utc "^0.1.2" - optionalDependencies: - dtrace-provider "~0.6" - mv "~2" - safe-json-stringify "~1" + "@protobufjs/aspromise" "^1.1.1" + "@protobufjs/inquire" "^1.1.0" -"@salesforce/command@^4.2.1": - version "4.2.1" - resolved "https://registry.npmjs.org/@salesforce/command/-/command-4.2.1.tgz" - integrity sha512-hanDjR8yLdeKlrlUXjh18pmbSuF+46jtIfjpG0aA1Q089qol9+nqGt64ToeN7df7BM0Vk0X0+765YNKV/zvxlA== - dependencies: - "@oclif/command" "^1.8.1" - "@oclif/errors" "^1.2.2" - "@oclif/parser" "3.8.6" - "@oclif/plugin-help" "^2.2.0" - "@oclif/test" "^1.2.4" - "@salesforce/core" "^2.31.0" - "@salesforce/kit" "^1.5.17" - "@salesforce/ts-types" "^1.5.20" - chalk "^2.4.2" - cli-ux "^4.9.3" - -"@salesforce/core@^2.31.0", "@salesforce/core@^2.33.1": - version "2.33.1" - resolved "https://registry.npmjs.org/@salesforce/core/-/core-2.33.1.tgz" - integrity sha512-jKVFYEvlV+loBoau5heBOVXmzsPO+RbYh6SPybJK6xF7khQmzu7+WAQbikY2eY8VaXcded2kka8L/FKuD/LKBg== - dependencies: - "@salesforce/bunyan" "^2.0.0" - "@salesforce/kit" "^1.5.0" - "@salesforce/schemas" "^1.0.1" - "@salesforce/ts-types" "^1.5.13" - "@types/graceful-fs" "^4.1.5" - "@types/jsforce" "^1.9.35" - "@types/mkdirp" "^1.0.1" - debug "^3.1.0" - faye "^1.4.0" - graceful-fs "^4.2.4" - jsen "0.6.6" - jsforce "^1.11.0" - jsonwebtoken "8.5.0" - mkdirp "1.0.4" - semver "^7.3.5" - ts-retry-promise "^0.6.0" +"@protobufjs/float@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1" + integrity sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ== -"@salesforce/dev-config@^2.1.0": - version "2.1.0" - resolved "https://registry.npmjs.org/@salesforce/dev-config/-/dev-config-2.1.0.tgz" - integrity sha512-I+zrptt8zI1jbP3TVA6g7i3quuh71tPky8gwCsXaQ2X9ea1xbeAslN4YjWfgqbY5SaxJhqP979oxFADlH+OehQ== +"@protobufjs/inquire@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089" + integrity sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q== + +"@protobufjs/path@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d" + integrity sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA== -"@salesforce/kit@^1.5.0", "@salesforce/kit@^1.5.17": - version "1.5.25" - resolved "https://registry.npmjs.org/@salesforce/kit/-/kit-1.5.25.tgz" - integrity sha512-Tbb7AZwJ00oGW8uv4DWsb3tjYi/rI8XWICWhLDpi44lTjrd+b22hKZMJLZ6XWEmezwtbzLT7vZIcj3IMtDgkIg== +"@protobufjs/pool@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54" + integrity sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw== + +"@protobufjs/utf8@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" + integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== + +"@puppeteer/browsers@2.10.6": + version "2.10.6" + resolved "https://registry.yarnpkg.com/@puppeteer/browsers/-/browsers-2.10.6.tgz#0b1b5046ec4918a4fd4e4c9383153a80af288bd2" + integrity sha512-pHUn6ZRt39bP3698HFQlu2ZHCkS/lPcpv7fVQcGBSzNNygw171UXAKrCUhy+TEMw4lEttOKDgNpb04hwUAJeiQ== + dependencies: + debug "^4.4.1" + extract-zip "^2.0.1" + progress "^2.0.3" + proxy-agent "^6.5.0" + semver "^7.7.2" + tar-fs "^3.1.0" + yargs "^17.7.2" + +"@puppeteer/browsers@2.6.1": + version "2.6.1" + resolved "https://registry.yarnpkg.com/@puppeteer/browsers/-/browsers-2.6.1.tgz#d75aec5010cae377c5e4742bf5e4f62a79c21315" + integrity sha512-aBSREisdsGH890S2rQqK82qmQYU3uFpSH8wcZWHgHzl3LfzsxAKbLNiAG9mO8v1Y0UICBeClICxPJvyr0rcuxg== + dependencies: + debug "^4.4.0" + extract-zip "^2.0.1" + progress "^2.0.3" + proxy-agent "^6.5.0" + semver "^7.6.3" + tar-fs "^3.0.6" + unbzip2-stream "^1.4.3" + yargs "^17.7.2" + +"@rtsao/scc@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@rtsao/scc/-/scc-1.1.0.tgz#927dd2fae9bc3361403ac2c7a00c32ddce9ad7e8" + integrity sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g== + +"@salesforce/cli-plugins-testkit@^5.3.41": + version "5.3.41" + resolved "https://registry.yarnpkg.com/@salesforce/cli-plugins-testkit/-/cli-plugins-testkit-5.3.41.tgz#16acca933dcd64f0fd2c917c7b22529107f373e6" + integrity sha512-K6fZfvzJAb0WFJFlgyIHgLyWLXR7i2SKDZ069kvCmDN0QVfftp/OwYrtosIzxiXdpHOtcoAkt65M1UR1aw0XzA== + dependencies: + "@salesforce/core" "^8.8.0" + "@salesforce/kit" "^3.2.3" + "@salesforce/ts-types" "^2.0.11" + "@types/shelljs" "^0.8.15" + debug "^4.4.0" + jszip "^3.10.1" + shelljs "^0.8.4" + sinon "^17.0.2" + strip-ansi "6.0.1" + ts-retry-promise "^0.8.1" + +"@salesforce/core@^8.18.1", "@salesforce/core@^8.19.1", "@salesforce/core@^8.23.1", "@salesforce/core@^8.5.1", "@salesforce/core@^8.8.0": + version "8.23.1" + resolved "https://registry.yarnpkg.com/@salesforce/core/-/core-8.23.1.tgz#89e04518d6d4033ef6a248380eb952328068797c" + integrity sha512-/mQMu6g0gmkKQsl+G93VkkU+yrLEjnBzdUu0sPlS0WY5jM4M9sxg97LmRXa6dchECU3c/ugamsXaP6j6QmEfsQ== + dependencies: + "@jsforce/jsforce-node" "^3.10.4" + "@salesforce/kit" "^3.2.4" + "@salesforce/schemas" "^1.10.0" + "@salesforce/ts-types" "^2.0.11" + ajv "^8.17.1" + change-case "^4.1.2" + fast-levenshtein "^3.0.0" + faye "^1.4.1" + form-data "^4.0.4" + js2xmlparser "^4.0.1" + jsonwebtoken "9.0.2" + jszip "3.10.1" + memfs "^4.30.1" + pino "^9.7.0" + pino-abstract-transport "^1.2.0" + pino-pretty "^11.3.0" + proper-lockfile "^4.1.2" + semver "^7.6.3" + ts-retry-promise "^0.8.1" + +"@salesforce/dev-config@^4.3.1", "@salesforce/dev-config@^4.3.2": + version "4.3.2" + resolved "https://registry.yarnpkg.com/@salesforce/dev-config/-/dev-config-4.3.2.tgz#10047e2b8d289c93f157ab4243a1b1de57f2d6a2" + integrity sha512-mxhsWV1rzHfhGMVSFQRLOHZTGfB1R2FtqbuIb3hrgDFsW1NLjEDS2U+eZWBJiCYod1JeGpJxnETNq587lem1Gg== + +"@salesforce/dev-scripts@^10": + version "10.2.12" + resolved "https://registry.yarnpkg.com/@salesforce/dev-scripts/-/dev-scripts-10.2.12.tgz#05cb48e60cf1a204ec8402aa803093c924b02bf7" + integrity sha512-sQFrUm16PLefZ3U4scEP0+jCKrUvaVjvyMADoM7KK2sXCu9TkvwAD5XtPeBoUsf9SdWhdFf2isv1X8DKM5q/6w== + dependencies: + "@commitlint/cli" "^17.1.2" + "@commitlint/config-conventional" "^17.8.1" + "@salesforce/dev-config" "^4.3.1" + "@salesforce/prettier-config" "^0.0.3" + "@types/chai" "^4.3.14" + "@types/mocha" "^10.0.7" + "@types/node" "^18.19.41" + "@types/sinon" "^10.0.20" + chai "^4.3.10" + chalk "^4.0.0" + cosmiconfig "^8.3.6" + eslint-config-salesforce-typescript "^3.4.0" + husky "^7.0.4" + linkinator "^6.1.1" + mocha "^10.7.0" + nyc "^17.0.0" + prettier "^2.8.8" + pretty-quick "^3.3.1" + shelljs "^0.8.5" + sinon "10.0.0" + source-map-support "^0.5.21" + ts-node "^10.9.2" + typedoc "^0.26.5" + typedoc-plugin-missing-exports "^3.0.0" + typescript "^5.5.4" + wireit "^0.14.5" + +"@salesforce/kit@^3.2.3", "@salesforce/kit@^3.2.4": + version "3.2.4" + resolved "https://registry.yarnpkg.com/@salesforce/kit/-/kit-3.2.4.tgz#69fb56974685e41d26ae5db30a7261acdc731bb6" + integrity sha512-9buqZ2puIGWqjUFWYNroSeNih4d1s9kdQAzZfutr/Re/JMl6xBct0ATO5LVb1ty5UhdBruJrVaiTg03PqVKU+Q== dependencies: - "@salesforce/ts-types" "^1.5.20" - shx "^0.3.3" - tslib "^2.2.0" + "@salesforce/ts-types" "^2.0.12" -"@salesforce/schemas@^1.0.1": - version "1.0.4" - resolved "https://registry.npmjs.org/@salesforce/schemas/-/schemas-1.0.4.tgz" - integrity sha512-JatCrSuWbr4aWJlkJ9CVCUucZvY1pfKfO/m1AHxaWix5kUQykA+oI1zQRKjelOy2IbDmz09cBCEGjMrMp3u+dA== +"@salesforce/prettier-config@^0.0.3": + version "0.0.3" + resolved "https://registry.yarnpkg.com/@salesforce/prettier-config/-/prettier-config-0.0.3.tgz#ba648d4886bb38adabe073dbea0b3a91b3753bb0" + integrity sha512-hYOhoPTCSYMDYn+U1rlEk16PoBeAJPkrdg4/UtAzupM1mRRJOwEPMG1d7U8DxJFKuXW3DMEYWr2MwAIBDaHmFg== -"@salesforce/ts-sinon@^1.2.4": - version "1.3.5" - resolved "https://registry.npmjs.org/@salesforce/ts-sinon/-/ts-sinon-1.3.5.tgz" - integrity sha512-Zp92SW5IrxAman3G61jhxDiJX+SrvYtfiNKhXi/nGKNk5Utbq0O2KfbjPomqk4vVvhUheZpnHUdN/y/XaxRsoA== +"@salesforce/schemas@^1.10.0": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@salesforce/schemas/-/schemas-1.10.3.tgz#52c867fdd60679cf216110aa49542b7ad391f5d1" + integrity sha512-FKfvtrYTcvTXE9advzS25/DEY9yJhEyLvStm++eQFtnAaX1pe4G3oGHgiQ0q55BM5+0AlCh0+0CVtQv1t4oJRA== + +"@salesforce/sf-plugins-core@^11.3.12": + version "11.3.12" + resolved "https://registry.yarnpkg.com/@salesforce/sf-plugins-core/-/sf-plugins-core-11.3.12.tgz#18b3a553688428bcffea9d36abc72847497f06ae" + integrity sha512-hi8EcSoRHRxj4sm/V5YDtzq9bPr/cKpM4fC6abo/jRzpXygwizinc2gVQkXfVdhjK7NGMskVRQB1N+0TThG7bA== + dependencies: + "@inquirer/confirm" "^3.1.22" + "@inquirer/password" "^2.2.0" + "@oclif/core" "^4.0.27" + "@salesforce/core" "^8.5.1" + "@salesforce/kit" "^3.2.3" + "@salesforce/ts-types" "^2.0.12" + ansis "^3.3.2" + cli-progress "^3.12.0" + natural-orderby "^3.0.2" + slice-ansi "^7.1.0" + string-width "^7.2.0" + terminal-link "^3.0.0" + +"@salesforce/source-deploy-retrieve@^12.20.1": + version "12.21.6" + resolved "https://registry.yarnpkg.com/@salesforce/source-deploy-retrieve/-/source-deploy-retrieve-12.21.6.tgz#28962b7bb12c08d9d4899f65f0f88650f606dbc4" + integrity sha512-I0Mx0a/VgGKy++npOJpaTLV7oZA3IZDd+r3LrfFOddb5zO7jTPDaslpXxYRfJTlRyHNJCyUR2XY9t5Dz5a+awA== + dependencies: + "@salesforce/core" "^8.18.1" + "@salesforce/kit" "^3.2.3" + "@salesforce/ts-types" "^2.0.12" + "@salesforce/types" "^1.3.0" + fast-levenshtein "^3.0.0" + fast-xml-parser "^4.5.3" + got "^11.8.6" + graceful-fs "^4.2.11" + ignore "^5.3.2" + isbinaryfile "^5.0.2" + jszip "^3.10.1" + mime "2.6.0" + minimatch "^9.0.5" + proxy-agent "^6.4.0" + yaml "^2.7.1" + +"@salesforce/ts-types@^2.0.11", "@salesforce/ts-types@^2.0.12": + version "2.0.12" + resolved "https://registry.yarnpkg.com/@salesforce/ts-types/-/ts-types-2.0.12.tgz#60420622812a7ec7e46d220667bc29b42dc247ff" + integrity sha512-BIJyduJC18Kc8z+arUm5AZ9VkPRyw1KKAm+Tk+9LT99eOzhNilyfKzhZ4t+tG2lIGgnJpmytZfVDZ0e2kFul8g== + +"@salesforce/types@^1.3.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@salesforce/types/-/types-1.4.0.tgz#a8b8baa0b7cc9cb6718379464d9bc9e4ab834e9e" + integrity sha512-WpXzQd+JglQrwUs05ePGa1/vFFn1s7rymw2ltBbFj2Z0p/ez1ft6J39ILVlteS/mGca47Ce8JN+u3USVxfxkKA== + +"@samverschueren/stream-to-observable@^0.3.0": + version "0.3.1" + resolved "https://registry.yarnpkg.com/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz#a21117b19ee9be70c379ec1877537ef2e1c63301" + integrity sha512-c/qwwcHyafOQuVQJj0IlBjf5yYgBI7YPJ77k4fOJYesb41jio65eaJODRUmfYKhTOFBrIZ66kgvGPlNbjuoRdQ== dependencies: - "@salesforce/ts-types" "^1.5.5" - sinon "5.1.1" - tslib "^1.10.0" + any-observable "^0.3.0" -"@salesforce/ts-types@^1.5.13", "@salesforce/ts-types@^1.5.20": - version "1.5.20" - resolved "https://registry.npmjs.org/@salesforce/ts-types/-/ts-types-1.5.20.tgz" - integrity sha512-Ov6um4CWd63EvkRavkHG0J/P9XYL55sdkDWPMr7+AIgqh5flHxDRz09/C4e9M94aX30rzJxW4TVX6EBf4Cu2BQ== +"@shikijs/core@1.29.2": + version "1.29.2" + resolved "https://registry.yarnpkg.com/@shikijs/core/-/core-1.29.2.tgz#9c051d3ac99dd06ae46bd96536380c916e552bf3" + integrity sha512-vju0lY9r27jJfOY4Z7+Rt/nIOjzJpZ3y+nYpqtUZInVoXQ/TJZcfGnNOGnKjFdVZb8qexiCuSlZRKcGfhhTTZQ== dependencies: - tslib "^2.2.0" + "@shikijs/engine-javascript" "1.29.2" + "@shikijs/engine-oniguruma" "1.29.2" + "@shikijs/types" "1.29.2" + "@shikijs/vscode-textmate" "^10.0.1" + "@types/hast" "^3.0.4" + hast-util-to-html "^9.0.4" -"@salesforce/ts-types@^1.5.5": - version "1.5.5" - resolved "https://registry.npmjs.org/@salesforce/ts-types/-/ts-types-1.5.5.tgz" - integrity sha512-d4YdsA3MBTJcC6ZdqHe2+yv7MWKsoYmgjTlc56SOy8sROrQ9RjJYaUnj1h1Zi1aWGgkGaNCCAOBomcrhMa4crw== +"@shikijs/engine-javascript@1.29.2": + version "1.29.2" + resolved "https://registry.yarnpkg.com/@shikijs/engine-javascript/-/engine-javascript-1.29.2.tgz#a821ad713a3e0b7798a1926fd9e80116e38a1d64" + integrity sha512-iNEZv4IrLYPv64Q6k7EPpOCE/nuvGiKl7zxdq0WFuRPF5PAE9PRo2JGq/d8crLusM59BRemJ4eOqrFrC4wiQ+A== dependencies: - tslib "^1.10.0" + "@shikijs/types" "1.29.2" + "@shikijs/vscode-textmate" "^10.0.1" + oniguruma-to-es "^2.2.0" -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== +"@shikijs/engine-oniguruma@1.29.2": + version "1.29.2" + resolved "https://registry.yarnpkg.com/@shikijs/engine-oniguruma/-/engine-oniguruma-1.29.2.tgz#d879717ced61d44e78feab16f701f6edd75434f1" + integrity sha512-7iiOx3SG8+g1MnlzZVDYiaeHe7Ez2Kf2HrJzdmGwkRisT7r4rak0e655AcM/tF9JG/kg5fMNYlLLKglbN7gBqA== + dependencies: + "@shikijs/types" "1.29.2" + "@shikijs/vscode-textmate" "^10.0.1" + +"@shikijs/langs@1.29.2": + version "1.29.2" + resolved "https://registry.yarnpkg.com/@shikijs/langs/-/langs-1.29.2.tgz#4f1de46fde8991468c5a68fa4a67dd2875d643cd" + integrity sha512-FIBA7N3LZ+223U7cJDUYd5shmciFQlYkFXlkKVaHsCPgfVLiO+e12FmQE6Tf9vuyEsFe3dIl8qGWKXgEHL9wmQ== + dependencies: + "@shikijs/types" "1.29.2" -"@sindresorhus/is@^4.0.0": +"@shikijs/themes@1.29.2": + version "1.29.2" + resolved "https://registry.yarnpkg.com/@shikijs/themes/-/themes-1.29.2.tgz#293cc5c83dd7df3fdc8efa25cec8223f3a6acb0d" + integrity sha512-i9TNZlsq4uoyqSbluIcZkmPL9Bfi3djVxRnofUHwvx/h6SRW3cwgBC5SML7vsDcWyukY0eCzVN980rqP6qNl9g== + dependencies: + "@shikijs/types" "1.29.2" + +"@shikijs/types@1.29.2": + version "1.29.2" + resolved "https://registry.yarnpkg.com/@shikijs/types/-/types-1.29.2.tgz#a93fdb410d1af8360c67bf5fc1d1a68d58e21c4f" + integrity sha512-VJjK0eIijTZf0QSTODEXCqinjBn0joAHQ+aPSBzrv4O2d/QSbsMw+ZeSRx03kV34Hy7NzUvV/7NqfYGRLrASmw== + dependencies: + "@shikijs/vscode-textmate" "^10.0.1" + "@types/hast" "^3.0.4" + +"@shikijs/vscode-textmate@^10.0.1": + version "10.0.2" + resolved "https://registry.yarnpkg.com/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz#a90ab31d0cc1dfb54c66a69e515bf624fa7b2224" + integrity sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg== + +"@sigstore/bundle@^2.3.2": + version "2.3.2" + resolved "https://registry.yarnpkg.com/@sigstore/bundle/-/bundle-2.3.2.tgz#ad4dbb95d665405fd4a7a02c8a073dbd01e4e95e" + integrity sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA== + dependencies: + "@sigstore/protobuf-specs" "^0.3.2" + +"@sigstore/core@^1.0.0", "@sigstore/core@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@sigstore/core/-/core-1.1.0.tgz#5583d8f7ffe599fa0a89f2bf289301a5af262380" + integrity sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg== + +"@sigstore/protobuf-specs@^0.3.2": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz#7dd46d68b76c322873a2ef7581ed955af6f4dcde" + integrity sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ== + +"@sigstore/sign@^2.3.2": + version "2.3.2" + resolved "https://registry.yarnpkg.com/@sigstore/sign/-/sign-2.3.2.tgz#d3d01e56d03af96fd5c3a9b9897516b1233fc1c4" + integrity sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA== + dependencies: + "@sigstore/bundle" "^2.3.2" + "@sigstore/core" "^1.0.0" + "@sigstore/protobuf-specs" "^0.3.2" + make-fetch-happen "^13.0.1" + proc-log "^4.2.0" + promise-retry "^2.0.1" + +"@sigstore/tuf@^2.3.4": + version "2.3.4" + resolved "https://registry.yarnpkg.com/@sigstore/tuf/-/tuf-2.3.4.tgz#da1d2a20144f3b87c0172920cbc8dcc7851ca27c" + integrity sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw== + dependencies: + "@sigstore/protobuf-specs" "^0.3.2" + tuf-js "^2.2.1" + +"@sigstore/verify@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@sigstore/verify/-/verify-1.2.1.tgz#c7e60241b432890dcb8bd8322427f6062ef819e1" + integrity sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g== + dependencies: + "@sigstore/bundle" "^2.3.2" + "@sigstore/core" "^1.1.0" + "@sigstore/protobuf-specs" "^0.3.2" + +"@sindresorhus/is@^4", "@sindresorhus/is@^4.0.0": version "4.6.0" resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== -"@sinonjs/commons@^1", "@sinonjs/commons@^1.3.0", "@sinonjs/commons@^1.7.0": - version "1.8.2" - resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.2.tgz" - integrity sha512-sruwd86RJHdsVf/AtBoijDmUqJp3B6hF/DGC23C+JaegnDHaZyewCjoVGTdg3J0uz3Zs7NnIT05OBOmML72lQw== +"@sindresorhus/is@^5.2.0": + version "5.6.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-5.6.0.tgz#41dd6093d34652cddb5d5bdeee04eafc33826668" + integrity sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g== + +"@sindresorhus/merge-streams@^2.1.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz#719df7fb41766bc143369eaa0dd56d8dc87c9958" + integrity sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg== + +"@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.1": + version "1.8.6" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.6.tgz#80c516a4dc264c2a69115e7578d62581ff455ed9" + integrity sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ== dependencies: type-detect "4.0.8" -"@sinonjs/formatio@^2.0.0": - version "2.0.0" - resolved "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-2.0.0.tgz" - integrity sha512-ls6CAMA6/5gG+O/IdsBcblvnd8qcO/l1TYoNeAzp3wcISOxlPXQEus0mLcdwazEkWjaBdaJ3TaxmNgCLWwvWzg== +"@sinonjs/commons@^3.0.0", "@sinonjs/commons@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.1.tgz#1029357e44ca901a615585f6d27738dbc89084cd" + integrity sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ== dependencies: - samsam "1.3.0" + type-detect "4.0.8" -"@sinonjs/formatio@^3.2.1": - version "3.2.2" - resolved "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.2.tgz" - integrity sha512-B8SEsgd8gArBLMD6zpRw3juQ2FVSsmdd7qlevyDqzS9WTCtvF55/gAL+h6gue8ZvPYcdiPdvueM/qm//9XzyTQ== +"@sinonjs/fake-timers@^11.2.2": + version "11.3.1" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-11.3.1.tgz#51d6e8d83ca261ff02c0ab0e68e9db23d5cd5999" + integrity sha512-EVJO7nW5M/F5Tur0Rf2z/QoMo+1Ia963RiMtapiQrEWvY0iBUvADo8Beegwjpnle5BHkyHuoxSTW3jF43H1XRA== dependencies: - "@sinonjs/commons" "^1" - "@sinonjs/samsam" "^3.1.0" + "@sinonjs/commons" "^3.0.1" -"@sinonjs/samsam@^3.1.0": - version "3.3.3" - resolved "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.3.tgz" - integrity sha512-bKCMKZvWIjYD0BLGnNrxVuw4dkWCYsLqFOUWw8VgKF/+5Y+mE7LfHWPIYoDXowH+3a9LsWDMo0uAP8YDosPvHQ== +"@sinonjs/fake-timers@^6.0.0", "@sinonjs/fake-timers@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40" + integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA== dependencies: - "@sinonjs/commons" "^1.3.0" - array-from "^2.1.1" - lodash "^4.17.15" + "@sinonjs/commons" "^1.7.0" + +"@sinonjs/samsam@^5.3.1": + version "5.3.1" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-5.3.1.tgz#375a45fe6ed4e92fca2fb920e007c48232a6507f" + integrity sha512-1Hc0b1TtyfBu8ixF/tpfSHTVWKwCBLY4QJbkgnE7HcwyvT2xArDxb4K7dMgqRm3szI+LJbzmW/s4xxEhv6hwDg== + dependencies: + "@sinonjs/commons" "^1.6.0" + lodash.get "^4.4.2" + type-detect "^4.0.8" + +"@sinonjs/samsam@^8.0.0": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-8.0.2.tgz#e4386bf668ff36c95949e55a38dc5f5892fc2689" + integrity sha512-v46t/fwnhejRSFTGqbpn9u+LQ9xJDse10gNnPgAcxgdoCDMXj/G2asWAC/8Qs+BAZDicX+MNZouXT1A7c83kVw== + dependencies: + "@sinonjs/commons" "^3.0.1" + lodash.get "^4.4.2" + type-detect "^4.1.0" -"@sinonjs/text-encoding@^0.7.1": - version "0.7.1" - resolved "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz" - integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== +"@sinonjs/text-encoding@^0.7.1", "@sinonjs/text-encoding@^0.7.2": + version "0.7.3" + resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz#282046f03e886e352b2d5f5da5eb755e01457f3f" + integrity sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA== -"@slack/logger@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@slack/logger/-/logger-3.0.0.tgz#b736d4e1c112c22a10ffab0c2d364620aedcb714" - integrity sha512-DTuBFbqu4gGfajREEMrkq5jBhcnskinhr4+AnfJEk48zhVeEv3XnUKGIX98B74kxhYsIMfApGGySTn7V3b5yBA== +"@slack/logger@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@slack/logger/-/logger-4.0.0.tgz#788303ff1840be91bdad7711ef66ca0cbc7073d2" + integrity sha512-Wz7QYfPAlG/DR+DfABddUZeNgoeY7d1J39OCR2jR+v7VBsB8ezulDK5szTnDDPDwLH5IWhLvXIHlCFZV7MSKgA== dependencies: - "@types/node" ">=12.0.0" + "@types/node" ">=18.0.0" -"@slack/types@^2.8.0": - version "2.8.0" - resolved "https://registry.yarnpkg.com/@slack/types/-/types-2.8.0.tgz#11ea10872262a7e6f86f54e5bcd4f91e3a41fe91" - integrity sha512-ghdfZSF0b4NC9ckBA8QnQgC9DJw2ZceDq0BIjjRSv6XAZBXJdWgxIsYz0TYnWSiqsKZGH2ZXbj9jYABZdH3OSQ== - -"@slack/web-api@^6.9.0": - version "6.9.0" - resolved "https://registry.yarnpkg.com/@slack/web-api/-/web-api-6.9.0.tgz#d829dcfef490dbce8e338912706b6f39dcde3ad2" - integrity sha512-RME5/F+jvQmZHkoP+ogrDbixq1Ms1mBmylzuWq4sf3f7GCpMPWoiZ+WqWk+sism3vrlveKWIgO9R4Qg9fiRyoQ== - dependencies: - "@slack/logger" "^3.0.0" - "@slack/types" "^2.8.0" - "@types/is-stream" "^1.1.0" - "@types/node" ">=12.0.0" - axios "^0.27.2" - eventemitter3 "^3.1.0" - form-data "^2.5.0" +"@slack/types@^2.16.0", "@slack/types@^2.9.0": + version "2.16.0" + resolved "https://registry.yarnpkg.com/@slack/types/-/types-2.16.0.tgz#92ba59f9e970440b524423ad694eba4fa4995a86" + integrity sha512-bICnyukvdklXhwxprR3uF1+ZFkTvWTZge4evlCS4G1H1HU6QLY68AcjqzQRymf7/5gNt6Y4OBb4NdviheyZcAg== + +"@slack/web-api@^7.10.0": + version "7.10.0" + resolved "https://registry.yarnpkg.com/@slack/web-api/-/web-api-7.10.0.tgz#e64eaa01eb0e2e7d60fc19d8789698c79afa2a90" + integrity sha512-kT+07JvOqpYH3b/ttVo3iqKIFiHV2NKmD6QUc/F7HrjCgSdSA10zxqi0euXEF2prB49OU7SfjadzQ0WhNc7tiw== + dependencies: + "@slack/logger" "^4.0.0" + "@slack/types" "^2.9.0" + "@types/node" ">=18.0.0" + "@types/retry" "0.12.0" + axios "^1.11.0" + eventemitter3 "^5.0.1" + form-data "^4.0.4" is-electron "2.2.2" - is-stream "^1.1.0" - p-queue "^6.6.1" - p-retry "^4.0.0" + is-stream "^2" + p-queue "^6" + p-retry "^4" + retry "^0.13.1" + +"@smithy/abort-controller@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-4.0.5.tgz#2872a12d0f11dfdcc4254b39566d5f24ab26a4ab" + integrity sha512-jcrqdTQurIrBbUm4W2YdLVMQDoL0sA9DTxYd2s+R/y+2U9NLOP7Xf/YqfSg1FZhlZIYEnvk2mwbyvIfdLEPo8g== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/chunked-blob-reader-native@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-4.0.0.tgz#33cbba6deb8a3c516f98444f65061784f7cd7f8c" + integrity sha512-R9wM2yPmfEMsUmlMlIgSzOyICs0x9uu7UTHoccMyt7BWw8shcGM8HqB355+BZCPBcySvbTYMs62EgEQkNxz2ig== + dependencies: + "@smithy/util-base64" "^4.0.0" + tslib "^2.6.2" + +"@smithy/chunked-blob-reader@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader/-/chunked-blob-reader-5.0.0.tgz#3f6ea5ff4e2b2eacf74cefd737aa0ba869b2e0f6" + integrity sha512-+sKqDBQqb036hh4NPaUiEkYFkTUGYzRsn3EuFhyfQfMy6oGHEUJDurLP9Ufb5dasr/XiAmPNMr6wa9afjQB+Gw== + dependencies: + tslib "^2.6.2" + +"@smithy/config-resolver@^4.1.5": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-4.1.5.tgz#3cb7cde8d13ca64630e5655812bac9ffe8182469" + integrity sha512-viuHMxBAqydkB0AfWwHIdwf/PRH2z5KHGUzqyRtS/Wv+n3IHI993Sk76VCA7dD/+GzgGOmlJDITfPcJC1nIVIw== + dependencies: + "@smithy/node-config-provider" "^4.1.4" + "@smithy/types" "^4.3.2" + "@smithy/util-config-provider" "^4.0.0" + "@smithy/util-middleware" "^4.0.5" + tslib "^2.6.2" + +"@smithy/core@^3.8.0": + version "3.8.0" + resolved "https://registry.yarnpkg.com/@smithy/core/-/core-3.8.0.tgz#321d03564b753025b92e4476579efcd5c505ab1f" + integrity sha512-EYqsIYJmkR1VhVE9pccnk353xhs+lB6btdutJEtsp7R055haMJp2yE16eSxw8fv+G0WUY6vqxyYOP8kOqawxYQ== + dependencies: + "@smithy/middleware-serde" "^4.0.9" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-body-length-browser" "^4.0.0" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-stream" "^4.2.4" + "@smithy/util-utf8" "^4.0.0" + "@types/uuid" "^9.0.1" + tslib "^2.6.2" + uuid "^9.0.1" + +"@smithy/credential-provider-imds@^4.0.7": + version "4.0.7" + resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.7.tgz#d8bb566ffd8d9e556810b83d6e0b01b39036b810" + integrity sha512-dDzrMXA8d8riFNiPvytxn0mNwR4B3h8lgrQ5UjAGu6T9z/kRg/Xncf4tEQHE/+t25sY8IH3CowcmWi+1U5B1Gw== + dependencies: + "@smithy/node-config-provider" "^4.1.4" + "@smithy/property-provider" "^4.0.5" + "@smithy/types" "^4.3.2" + "@smithy/url-parser" "^4.0.5" + tslib "^2.6.2" + +"@smithy/eventstream-codec@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-4.0.5.tgz#e742a4badaaf985ac9abcf4283ff4c39d7e48438" + integrity sha512-miEUN+nz2UTNoRYRhRqVTJCx7jMeILdAurStT2XoS+mhokkmz1xAPp95DFW9Gxt4iF2VBqpeF9HbTQ3kY1viOA== + dependencies: + "@aws-crypto/crc32" "5.2.0" + "@smithy/types" "^4.3.2" + "@smithy/util-hex-encoding" "^4.0.0" + tslib "^2.6.2" + +"@smithy/eventstream-serde-browser@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.0.5.tgz#fbebe76edf542d656fe3b187ac6b1e47a63f735f" + integrity sha512-LCUQUVTbM6HFKzImYlSB9w4xafZmpdmZsOh9rIl7riPC3osCgGFVP+wwvYVw6pXda9PPT9TcEZxaq3XE81EdJQ== + dependencies: + "@smithy/eventstream-serde-universal" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/eventstream-serde-config-resolver@^4.1.3": + version "4.1.3" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.1.3.tgz#59a01611feaef9830da592bf726ee8eef4f2c11d" + integrity sha512-yTTzw2jZjn/MbHu1pURbHdpjGbCuMHWncNBpJnQAPxOVnFUAbSIUSwafiphVDjNV93TdBJWmeVAds7yl5QCkcA== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/eventstream-serde-node@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.0.5.tgz#44f962898cfb3de806725ea5d88e904c7f3955d7" + integrity sha512-lGS10urI4CNzz6YlTe5EYG0YOpsSp3ra8MXyco4aqSkQDuyZPIw2hcaxDU82OUVtK7UY9hrSvgWtpsW5D4rb4g== + dependencies: + "@smithy/eventstream-serde-universal" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/eventstream-serde-universal@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.0.5.tgz#ec34b9999c7db3e057d67acb14ec0c8627c7ae2e" + integrity sha512-JFnmu4SU36YYw3DIBVao3FsJh4Uw65vVDIqlWT4LzR6gXA0F3KP0IXFKKJrhaVzCBhAuMsrUUaT5I+/4ZhF7aw== + dependencies: + "@smithy/eventstream-codec" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/fetch-http-handler@^5.1.1": + version "5.1.1" + resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-5.1.1.tgz#a444c99bffdf314deb447370429cc3e719f1a866" + integrity sha512-61WjM0PWmZJR+SnmzaKI7t7G0UkkNFboDpzIdzSoy7TByUzlxo18Qlh9s71qug4AY4hlH/CwXdubMtkcNEb/sQ== + dependencies: + "@smithy/protocol-http" "^5.1.3" + "@smithy/querystring-builder" "^4.0.5" + "@smithy/types" "^4.3.2" + "@smithy/util-base64" "^4.0.0" + tslib "^2.6.2" + +"@smithy/hash-blob-browser@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/hash-blob-browser/-/hash-blob-browser-4.0.5.tgz#f8f2857e59907c3359dc451a22c1623373115aea" + integrity sha512-F7MmCd3FH/Q2edhcKd+qulWkwfChHbc9nhguBlVjSUE6hVHhec3q6uPQ+0u69S6ppvLtR3eStfCuEKMXBXhvvA== + dependencies: + "@smithy/chunked-blob-reader" "^5.0.0" + "@smithy/chunked-blob-reader-native" "^4.0.0" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/hash-node@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-4.0.5.tgz#16cf8efe42b8b611b1f56f78464b97b27ca6a3ec" + integrity sha512-cv1HHkKhpyRb6ahD8Vcfb2Hgz67vNIXEp2vnhzfxLFGRukLCNEA5QdsorbUEzXma1Rco0u3rx5VTqbM06GcZqQ== + dependencies: + "@smithy/types" "^4.3.2" + "@smithy/util-buffer-from" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/hash-stream-node@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/hash-stream-node/-/hash-stream-node-4.0.5.tgz#823a120823de313e72c0be2cdd440925075665f8" + integrity sha512-IJuDS3+VfWB67UC0GU0uYBG/TA30w+PlOaSo0GPm9UHS88A6rCP6uZxNjNYiyRtOcjv7TXn/60cW8ox1yuZsLg== + dependencies: + "@smithy/types" "^4.3.2" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/invalid-dependency@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-4.0.5.tgz#ed88e209668266b09c4b501f9bd656728b5ece60" + integrity sha512-IVnb78Qtf7EJpoEVo7qJ8BEXQwgC4n3igeJNNKEj/MLYtapnx8A67Zt/J3RXAj2xSO1910zk0LdFiygSemuLow== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/is-array-buffer@^2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz#f84f0d9f9a36601a9ca9381688bd1b726fd39111" + integrity sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA== + dependencies: + tslib "^2.6.2" + +"@smithy/is-array-buffer@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz#55a939029321fec462bcc574890075cd63e94206" + integrity sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw== + dependencies: + tslib "^2.6.2" + +"@smithy/md5-js@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/md5-js/-/md5-js-4.0.5.tgz#77216159386050dbcf6b58f16f4ac14ac5183474" + integrity sha512-8n2XCwdUbGr8W/XhMTaxILkVlw2QebkVTn5tm3HOcbPbOpWg89zr6dPXsH8xbeTsbTXlJvlJNTQsKAIoqQGbdA== + dependencies: + "@smithy/types" "^4.3.2" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/middleware-content-length@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-4.0.5.tgz#c5d6e47f5a9fbba20433602bec9bffaeeb821ff3" + integrity sha512-l1jlNZoYzoCC7p0zCtBDE5OBXZ95yMKlRlftooE5jPWQn4YBPLgsp+oeHp7iMHaTGoUdFqmHOPa8c9G3gBsRpQ== + dependencies: + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/middleware-endpoint@^4.1.18": + version "4.1.18" + resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.18.tgz#81b2f85e3c72b0f1a2d8776e01b0a2968af62c0a" + integrity sha512-ZhvqcVRPZxnZlokcPaTwb+r+h4yOIOCJmx0v2d1bpVlmP465g3qpVSf7wxcq5zZdu4jb0H4yIMxuPwDJSQc3MQ== + dependencies: + "@smithy/core" "^3.8.0" + "@smithy/middleware-serde" "^4.0.9" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/shared-ini-file-loader" "^4.0.5" + "@smithy/types" "^4.3.2" + "@smithy/url-parser" "^4.0.5" + "@smithy/util-middleware" "^4.0.5" + tslib "^2.6.2" + +"@smithy/middleware-retry@^4.1.19": + version "4.1.19" + resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-4.1.19.tgz#19c013c1a548e1185cc1bfabfab3f498667c9e89" + integrity sha512-X58zx/NVECjeuUB6A8HBu4bhx72EoUz+T5jTMIyeNKx2lf+Gs9TmWPNNkH+5QF0COjpInP/xSpJGJ7xEnAklQQ== + dependencies: + "@smithy/node-config-provider" "^4.1.4" + "@smithy/protocol-http" "^5.1.3" + "@smithy/service-error-classification" "^4.0.7" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-retry" "^4.0.7" + "@types/uuid" "^9.0.1" + tslib "^2.6.2" + uuid "^9.0.1" + +"@smithy/middleware-serde@^4.0.9": + version "4.0.9" + resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-4.0.9.tgz#71213158bb11c1d632829001ca3f233323fb2a7c" + integrity sha512-uAFFR4dpeoJPGz8x9mhxp+RPjo5wW0QEEIPPPbLXiRRWeCATf/Km3gKIVR5vaP8bN1kgsPhcEeh+IZvUlBv6Xg== + dependencies: + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/middleware-stack@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-4.0.5.tgz#577050d4c0afe816f1ea85f335b2ef64f73e4328" + integrity sha512-/yoHDXZPh3ocRVyeWQFvC44u8seu3eYzZRveCMfgMOBcNKnAmOvjbL9+Cp5XKSIi9iYA9PECUuW2teDAk8T+OQ== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/node-config-provider@^4.1.4": + version "4.1.4" + resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-4.1.4.tgz#42f231b7027e5a7ce003fd80180e586fe814944a" + integrity sha512-+UDQV/k42jLEPPHSn39l0Bmc4sB1xtdI9Gd47fzo/0PbXzJ7ylgaOByVjF5EeQIumkepnrJyfx86dPa9p47Y+w== + dependencies: + "@smithy/property-provider" "^4.0.5" + "@smithy/shared-ini-file-loader" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/node-http-handler@^4.1.1": + version "4.1.1" + resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-4.1.1.tgz#dd806d9e08b6e73125040dd0808ab56d16a178e9" + integrity sha512-RHnlHqFpoVdjSPPiYy/t40Zovf3BBHc2oemgD7VsVTFFZrU5erFFe0n52OANZZ/5sbshgD93sOh5r6I35Xmpaw== + dependencies: + "@smithy/abort-controller" "^4.0.5" + "@smithy/protocol-http" "^5.1.3" + "@smithy/querystring-builder" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/property-provider@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-4.0.5.tgz#d3b368b31d5b130f4c30cc0c91f9ebb28d9685fc" + integrity sha512-R/bswf59T/n9ZgfgUICAZoWYKBHcsVDurAGX88zsiUtOTA/xUAPyiT+qkNCPwFn43pZqN84M4MiUsbSGQmgFIQ== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/protocol-http@^5.1.3": + version "5.1.3" + resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-5.1.3.tgz#86855b528c0e4cb9fa6fb4ed6ba3cdf5960f88f4" + integrity sha512-fCJd2ZR7D22XhDY0l+92pUag/7je2BztPRQ01gU5bMChcyI0rlly7QFibnYHzcxDvccMjlpM/Q1ev8ceRIb48w== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/querystring-builder@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-4.0.5.tgz#158ae170f8ec2d8af6b84cdaf774205a7dfacf68" + integrity sha512-NJeSCU57piZ56c+/wY+AbAw6rxCCAOZLCIniRE7wqvndqxcKKDOXzwWjrY7wGKEISfhL9gBbAaWWgHsUGedk+A== + dependencies: + "@smithy/types" "^4.3.2" + "@smithy/util-uri-escape" "^4.0.0" + tslib "^2.6.2" + +"@smithy/querystring-parser@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-4.0.5.tgz#95706e56aa769f09dc8922d1b19ffaa06946e252" + integrity sha512-6SV7md2CzNG/WUeTjVe6Dj8noH32r4MnUeFKZrnVYsQxpGSIcphAanQMayi8jJLZAWm6pdM9ZXvKCpWOsIGg0w== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/service-error-classification@^4.0.7": + version "4.0.7" + resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-4.0.7.tgz#24072198a8c110d29677762162a5096e29eb4862" + integrity sha512-XvRHOipqpwNhEjDf2L5gJowZEm5nsxC16pAZOeEcsygdjv9A2jdOh3YoDQvOXBGTsaJk6mNWtzWalOB9976Wlg== + dependencies: + "@smithy/types" "^4.3.2" + +"@smithy/shared-ini-file-loader@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.5.tgz#8d8a493276cd82a7229c755bef8d375256c5ebb9" + integrity sha512-YVVwehRDuehgoXdEL4r1tAAzdaDgaC9EQvhK0lEbfnbrd0bd5+CTQumbdPryX3J2shT7ZqQE+jPW4lmNBAB8JQ== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/signature-v4@^5.1.3": + version "5.1.3" + resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-5.1.3.tgz#92a4f6e9ce66730eeb0d996cd0478c5cbaf5b3f5" + integrity sha512-mARDSXSEgllNzMw6N+mC+r1AQlEBO3meEAkR/UlfAgnMzJUB3goRBWgip1EAMG99wh36MDqzo86SfIX5Y+VEaw== + dependencies: + "@smithy/is-array-buffer" "^4.0.0" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + "@smithy/util-hex-encoding" "^4.0.0" + "@smithy/util-middleware" "^4.0.5" + "@smithy/util-uri-escape" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/smithy-client@^4.4.10": + version "4.4.10" + resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-4.4.10.tgz#c4b49c1d1ff9eb813f88f1e425a5dfac25a03180" + integrity sha512-iW6HjXqN0oPtRS0NK/zzZ4zZeGESIFcxj2FkWed3mcK8jdSdHzvnCKXSjvewESKAgGKAbJRA+OsaqKhkdYRbQQ== + dependencies: + "@smithy/core" "^3.8.0" + "@smithy/middleware-endpoint" "^4.1.18" + "@smithy/middleware-stack" "^4.0.5" + "@smithy/protocol-http" "^5.1.3" + "@smithy/types" "^4.3.2" + "@smithy/util-stream" "^4.2.4" + tslib "^2.6.2" + +"@smithy/types@^4.3.1": + version "4.3.1" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-4.3.1.tgz#c11276ea16235d798f47a68aef9f44d3dbb70dd4" + integrity sha512-UqKOQBL2x6+HWl3P+3QqFD4ncKq0I8Nuz9QItGv5WuKuMHuuwlhvqcZCoXGfc+P1QmfJE7VieykoYYmrOoFJxA== + dependencies: + tslib "^2.6.2" + +"@smithy/types@^4.3.2": + version "4.3.2" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-4.3.2.tgz#66ac513e7057637de262e41ac15f70cf464c018a" + integrity sha512-QO4zghLxiQ5W9UZmX2Lo0nta2PuE1sSrXUYDoaB6HMR762C0P7v/HEPHf6ZdglTVssJG1bsrSBxdc3quvDSihw== + dependencies: + tslib "^2.6.2" + +"@smithy/url-parser@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-4.0.5.tgz#1824a9c108b85322c5a31f345f608d47d06f073a" + integrity sha512-j+733Um7f1/DXjYhCbvNXABV53NyCRRA54C7bNEIxNPs0YjfRxeMKjjgm2jvTYrciZyCjsicHwQ6Q0ylo+NAUw== + dependencies: + "@smithy/querystring-parser" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/util-base64@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-4.0.0.tgz#8345f1b837e5f636e5f8470c4d1706ae0c6d0358" + integrity sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg== + dependencies: + "@smithy/util-buffer-from" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-body-length-browser@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz#965d19109a4b1e5fe7a43f813522cce718036ded" + integrity sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA== + dependencies: + tslib "^2.6.2" + +"@smithy/util-body-length-node@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz#3db245f6844a9b1e218e30c93305bfe2ffa473b3" + integrity sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg== + dependencies: + tslib "^2.6.2" + +"@smithy/util-buffer-from@^2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz#6fc88585165ec73f8681d426d96de5d402021e4b" + integrity sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA== + dependencies: + "@smithy/is-array-buffer" "^2.2.0" + tslib "^2.6.2" + +"@smithy/util-buffer-from@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz#b23b7deb4f3923e84ef50c8b2c5863d0dbf6c0b9" + integrity sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug== + dependencies: + "@smithy/is-array-buffer" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-config-provider@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz#e0c7c8124c7fba0b696f78f0bd0ccb060997d45e" + integrity sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w== + dependencies: + tslib "^2.6.2" + +"@smithy/util-defaults-mode-browser@^4.0.26": + version "4.0.26" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.26.tgz#fc04cd466bbb0d80e41930af8d6a8c33c48490f2" + integrity sha512-xgl75aHIS/3rrGp7iTxQAOELYeyiwBu+eEgAk4xfKwJJ0L8VUjhO2shsDpeil54BOFsqmk5xfdesiewbUY5tKQ== + dependencies: + "@smithy/property-provider" "^4.0.5" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + bowser "^2.11.0" + tslib "^2.6.2" + +"@smithy/util-defaults-mode-node@^4.0.26": + version "4.0.26" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.26.tgz#adfee8c54301ec4cbabed58cd604995a81b4a8dc" + integrity sha512-z81yyIkGiLLYVDetKTUeCZQ8x20EEzvQjrqJtb/mXnevLq2+w3XCEWTJ2pMp401b6BkEkHVfXb/cROBpVauLMQ== + dependencies: + "@smithy/config-resolver" "^4.1.5" + "@smithy/credential-provider-imds" "^4.0.7" + "@smithy/node-config-provider" "^4.1.4" + "@smithy/property-provider" "^4.0.5" + "@smithy/smithy-client" "^4.4.10" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/util-endpoints@^3.0.7": + version "3.0.7" + resolved "https://registry.yarnpkg.com/@smithy/util-endpoints/-/util-endpoints-3.0.7.tgz#9d52f2e7e7a1ea4814ae284270a5f1d3930b3773" + integrity sha512-klGBP+RpBp6V5JbrY2C/VKnHXn3d5V2YrifZbmMY8os7M6m8wdYFoO6w/fe5VkP+YVwrEktW3IWYaSQVNZJ8oQ== + dependencies: + "@smithy/node-config-provider" "^4.1.4" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/util-hex-encoding@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz#dd449a6452cffb37c5b1807ec2525bb4be551e8d" + integrity sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw== + dependencies: + tslib "^2.6.2" + +"@smithy/util-middleware@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-4.0.5.tgz#405caf2a66e175ce8ca6c747fa1245b3f5386879" + integrity sha512-N40PfqsZHRSsByGB81HhSo+uvMxEHT+9e255S53pfBw/wI6WKDI7Jw9oyu5tJTLwZzV5DsMha3ji8jk9dsHmQQ== + dependencies: + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/util-retry@^4.0.7": + version "4.0.7" + resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-4.0.7.tgz#3169450193e917da170a87557fcbdfe0faa86779" + integrity sha512-TTO6rt0ppK70alZpkjwy+3nQlTiqNfoXja+qwuAchIEAIoSZW8Qyd76dvBv3I5bCpE38APafG23Y/u270NspiQ== + dependencies: + "@smithy/service-error-classification" "^4.0.7" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" + +"@smithy/util-stream@^4.2.4": + version "4.2.4" + resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-4.2.4.tgz#fa9f0e2fd5a8a5adbd013066b475ea8f9d4f900f" + integrity sha512-vSKnvNZX2BXzl0U2RgCLOwWaAP9x/ddd/XobPK02pCbzRm5s55M53uwb1rl/Ts7RXZvdJZerPkA+en2FDghLuQ== + dependencies: + "@smithy/fetch-http-handler" "^5.1.1" + "@smithy/node-http-handler" "^4.1.1" + "@smithy/types" "^4.3.2" + "@smithy/util-base64" "^4.0.0" + "@smithy/util-buffer-from" "^4.0.0" + "@smithy/util-hex-encoding" "^4.0.0" + "@smithy/util-utf8" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-uri-escape@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz#a96c160c76f3552458a44d8081fade519d214737" + integrity sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg== + dependencies: + tslib "^2.6.2" + +"@smithy/util-utf8@^2.0.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.3.0.tgz#dd96d7640363259924a214313c3cf16e7dd329c5" + integrity sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A== + dependencies: + "@smithy/util-buffer-from" "^2.2.0" + tslib "^2.6.2" + +"@smithy/util-utf8@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-4.0.0.tgz#09ca2d9965e5849e72e347c130f2a29d5c0c863c" + integrity sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow== + dependencies: + "@smithy/util-buffer-from" "^4.0.0" + tslib "^2.6.2" + +"@smithy/util-waiter@^4.0.7": + version "4.0.7" + resolved "https://registry.yarnpkg.com/@smithy/util-waiter/-/util-waiter-4.0.7.tgz#c013cf6a5918c21f8b430b4a825dbac132163f4a" + integrity sha512-mYqtQXPmrwvUljaHyGxYUIIRI3qjBTEb/f5QFi3A6VlxhpmZd5mWXn9W+qUkf2pVE1Hv3SqxefiZOPGdxmO64A== + dependencies: + "@smithy/abort-controller" "^4.0.5" + "@smithy/types" "^4.3.2" + tslib "^2.6.2" "@supercharge/promise-pool@^3.2.0": version "3.2.0" resolved "https://registry.yarnpkg.com/@supercharge/promise-pool/-/promise-pool-3.2.0.tgz#a6ab4afdf798e453a6bb51c4ae340852e1266af8" integrity sha512-pj0cAALblTZBPtMltWOlZTQSLT07jIaFNeM8TWoJD1cQMgDB9mcMlVMoetiB35OzNJpqQ2b+QEtwiR9f20mADg== -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== - dependencies: - defer-to-connect "^1.0.1" - "@szmarczak/http-timer@^4.0.5": version "4.0.6" resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" @@ -1195,25 +3185,50 @@ dependencies: defer-to-connect "^2.0.0" +"@szmarczak/http-timer@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-5.0.1.tgz#c7c1bf1141cdd4751b0399c8fc7b8b664cd5be3a" + integrity sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw== + dependencies: + defer-to-connect "^2.0.1" + +"@tootallnate/quickjs-emscripten@^0.23.0": + version "0.23.0" + resolved "https://registry.yarnpkg.com/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz#db4ecfd499a9765ab24002c3b696d02e6d32a12c" + integrity sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA== + "@tsconfig/node10@^1.0.7": - version "1.0.8" - resolved "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz" - integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg== + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2" + integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw== "@tsconfig/node12@^1.0.7": - version "1.0.9" - resolved "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.9.tgz" - integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw== + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== "@tsconfig/node14@^1.0.0": - version "1.0.1" - resolved "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz" - integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg== + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== "@tsconfig/node16@^1.0.2": - version "1.0.2" - resolved "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.2.tgz" - integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA== + version "1.0.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== + +"@tufjs/canonical-json@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz#a52f61a3d7374833fca945b2549bc30a2dd40d0a" + integrity sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA== + +"@tufjs/models@2.0.1": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@tufjs/models/-/models-2.0.1.tgz#e429714e753b6c2469af3212e7f320a6973c2812" + integrity sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg== + dependencies: + "@tufjs/canonical-json" "2.0.0" + minimatch "^9.0.4" "@types/cacheable-request@^6.0.1": version "6.0.3" @@ -1230,56 +3245,97 @@ resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.5.tgz#db9468cb1b1b5a925b8f34822f1669df0c5472f5" integrity sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg== -"@types/chai@*", "@types/chai@^4": - version "4.2.15" - resolved "https://registry.npmjs.org/@types/chai/-/chai-4.2.15.tgz" - integrity sha512-rYff6FI+ZTKAPkJUoyz7Udq3GaoDZnxYDEvdEdFZASiA7PoErltHezDishqQiSDWrGxvxmplH304jyzQmjp0AQ== +"@types/chai@^4.3.14": + version "4.3.20" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.20.tgz#cb291577ed342ca92600430841a00329ba05cecc" + integrity sha512-/pC9HAB5I/xMlc5FP77qjCnI16ChlJfW0tGa0IUcFn38VJrTV6DeZ60NU5KZBtaOZqjdpwTWohz5HU1RrhiYxQ== -"@types/dompurify@^3.0.5": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@types/dompurify/-/dompurify-3.0.5.tgz#02069a2fcb89a163bacf1a788f73cb415dd75cb7" - integrity sha512-1Wg0g3BtQF7sSb27fJQAKck1HECM6zV1EB66j8JH9i3LCjYabJa0FSdiSgsD5K/RbrsR0SiraKacLB+T8ZVYAg== +"@types/columnify@^1.5.4": + version "1.5.4" + resolved "https://registry.yarnpkg.com/@types/columnify/-/columnify-1.5.4.tgz#519fb13a2aa545300927b6fc3aa78327e8f1808e" + integrity sha512-YPEVzmy3kJupUee1ueLuvGspy6U2JHcxt6rYvRsSCEgVC54+KdBFjQ6NG/0koZk69e1bfXwSusgChwdFhvEXMw== + +"@types/configstore@*": + version "6.0.2" + resolved "https://registry.yarnpkg.com/@types/configstore/-/configstore-6.0.2.tgz#0b3d5e176cc5db6c1636a87a333d333693243f3a" + integrity sha512-OS//b51j9uyR3zvwD04Kfs5kHpve2qalQ18JhY/ho3voGYUTPLEG90/ocfKPI48hyHH8T04f7KEEbK6Ue60oZQ== + +"@types/cosmiconfig@^6.0.3": + version "6.0.3" + resolved "https://registry.yarnpkg.com/@types/cosmiconfig/-/cosmiconfig-6.0.3.tgz#03e26eb39c438ff7949ad950f887baf49aa383b8" + integrity sha512-SiTm0f1L6hGrhqAE1bb5GSC9Ay9v9lCSA2mYUH91p6kBMdkbDfuXIyCBmhD6PmSJTiF7ZHS95BTPc0HPqlnWgw== dependencies: - "@types/trusted-types" "*" + cosmiconfig "*" -"@types/find-cache-dir@^3.2.1": - version "3.2.1" - resolved "https://registry.yarnpkg.com/@types/find-cache-dir/-/find-cache-dir-3.2.1.tgz#7b959a4b9643a1e6a1a5fe49032693cc36773501" - integrity sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw== +"@types/cross-spawn@^6.0.6": + version "6.0.6" + resolved "https://registry.yarnpkg.com/@types/cross-spawn/-/cross-spawn-6.0.6.tgz#0163d0b79a6f85409e0decb8dcca17147f81fd22" + integrity sha512-fXRhhUkG4H3TQk5dBhQ7m/JDdSNHKwR2BBia62lhwEIq9xGiQKLxd6LymNhn47SjXhsUEPmxi+PKw2OkW4LLjA== + dependencies: + "@types/node" "*" -"@types/fs-extra@^9.0.13": - version "9.0.13" - resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-9.0.13.tgz#7594fbae04fe7f1918ce8b3d213f74ff44ac1f45" - integrity sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA== +"@types/diff@^6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@types/diff/-/diff-6.0.0.tgz#031f27cf57564f3cce825f38fb19fdd4349ad07a" + integrity sha512-dhVCYGv3ZSbzmQaBSagrv1WJ6rXCdkyTcDyoNu1MD8JohI7pR7k8wdZEm+mvdxRKXyHVwckFzWU1vJc+Z29MlA== + +"@types/ejs@^3.1.4": + version "3.1.5" + resolved "https://registry.yarnpkg.com/@types/ejs/-/ejs-3.1.5.tgz#49d738257cc73bafe45c13cb8ff240683b4d5117" + integrity sha512-nv+GSx77ZtXiJzwKdsASqi+YQ5Z7vwHsTP0JY2SiQgjGckkBRKZnk8nIM+7oUZ1VCtuTz0+By4qVR7fqzp/Dfg== + +"@types/expect@^1.20.4": + version "1.20.4" + resolved "https://registry.yarnpkg.com/@types/expect/-/expect-1.20.4.tgz#8288e51737bf7e3ab5d7c77bfa695883745264e5" + integrity sha512-Q5Vn3yjTDyCMV50TB6VRIbQNxSE4OmZR86VSbGaNpfUolm0iePBB4KdEEHmxoY5sT2+2DIvXW0rvMDP2nHZ4Mg== + +"@types/extract-zip@^2.0.3": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/extract-zip/-/extract-zip-2.0.3.tgz#5872005fd818767734c3528ba0bf57fc80a8c848" + integrity sha512-yrO7h+0qOIGxHCmBeL5fKFzR+PBafh9LG6sOLBFFi2JuN+Hj663TAxfnqJh5vkQn963VimrhBF1GZzea3A+4Ig== dependencies: + extract-zip "*" + +"@types/fs-extra@^11.0.4": + version "11.0.4" + resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-11.0.4.tgz#e16a863bb8843fba8c5004362b5a73e17becca45" + integrity sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ== + dependencies: + "@types/jsonfile" "*" "@types/node" "*" -"@types/glob@^7.1.1": - version "7.1.3" - resolved "https://registry.npmjs.org/@types/glob/-/glob-7.1.3.tgz" - integrity sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w== +"@types/fs-readdir-recursive@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@types/fs-readdir-recursive/-/fs-readdir-recursive-1.1.3.tgz#a77009e049060b4c399d2bc7d5d41097d8d1c617" + integrity sha512-2v5JKYQO+14CfurtdaL1cbLrjBeFjmcLkD35zDkaaytYSY/57jb2Kz6FbfJ1k+Lx2aaS0zpTR1dwCyqLkjo9vQ== + +"@types/glob@^8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-8.1.0.tgz#b63e70155391b0584dce44e7ea25190bbc38f2fc" + integrity sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w== dependencies: - "@types/minimatch" "*" + "@types/minimatch" "^5.1.2" "@types/node" "*" -"@types/graceful-fs@^4.1.5": - version "4.1.5" - resolved "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz" - integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== +"@types/hast@^3.0.0", "@types/hast@^3.0.4": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa" + integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ== dependencies: - "@types/node" "*" + "@types/unist" "*" -"@types/http-cache-semantics@*": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" - integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== +"@types/http-cache-semantics@*", "@types/http-cache-semantics@^4.0.2": + version "4.0.4" + resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz#b979ebad3919799c979b17c72621c0bc0a31c6c4" + integrity sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA== -"@types/is-stream@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@types/is-stream/-/is-stream-1.1.0.tgz#b84d7bb207a210f2af9bed431dc0fbe9c4143be1" - integrity sha512-jkZatu4QVbR60mpIzjINmtS1ZF4a/FqdTUTBeQDVOQ2PYyidtwFKr0B5G6ERukKwliq+7mIXvxyppwzG5EgRYg== +"@types/inquirer@^9.0.3", "@types/inquirer@^9.0.7": + version "9.0.8" + resolved "https://registry.yarnpkg.com/@types/inquirer/-/inquirer-9.0.8.tgz#09d39cc6435ab7b0597f527ff5e2d6ccdf6b14be" + integrity sha512-CgPD5kFGWsb8HJ5K7rfWlifao87m4ph8uioU7OTncJevmE/VLIqAAjfQtko578JZg7/f69K4FgqYym3gNr7DeA== dependencies: - "@types/node" "*" + "@types/through" "*" + rxjs "^7.2.0" "@types/jira-client@^7.1.9": version "7.1.9" @@ -1289,18 +3345,28 @@ "@types/node" "*" "@types/request" "*" -"@types/jsforce@^1.9.35": - version "1.9.38" - resolved "https://registry.npmjs.org/@types/jsforce/-/jsforce-1.9.38.tgz" - integrity sha512-+Iwf5jlDiK8z+zI2LAi4mzln8++5lETv2YofFEATu+dNkrP8LACB76lz2tPsXDx/a+5uW8HQhbwL/SyqIic0cg== +"@types/js-yaml@^4.0.9": + version "4.0.9" + resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.9.tgz#cd82382c4f902fed9691a2ed79ec68c5898af4c2" + integrity sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg== + +"@types/json-schema@^7.0.12", "@types/json-schema@^7.0.15": + version "7.0.15" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" + integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/jsonfile@*": + version "6.1.4" + resolved "https://registry.yarnpkg.com/@types/jsonfile/-/jsonfile-6.1.4.tgz#614afec1a1164e7d670b4a7ad64df3e7beb7b702" + integrity sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ== dependencies: "@types/node" "*" -"@types/json-schema@^7.0.3": - version "7.0.7" - resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.7.tgz" - integrity sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA== - "@types/keyv@^3.1.4": version "3.1.4" resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-3.1.4.tgz#3ccdb1c6751b0c7e52300bcdacd5bcbf8faa75b6" @@ -1308,39 +3374,66 @@ dependencies: "@types/node" "*" -"@types/lodash-es@^4.17.6": - version "4.17.6" - resolved "https://registry.yarnpkg.com/@types/lodash-es/-/lodash-es-4.17.6.tgz#c2ed4c8320ffa6f11b43eb89e9eaeec65966a0a0" - integrity sha512-R+zTeVUKDdfoRxpAryaQNRKk3105Rrgx2CFRClIgRGaqDTdjsm8h6IYA8ir584W3ePzkZfst5xIgDwYrlh9HLg== +"@types/linkify-it@^5": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@types/linkify-it/-/linkify-it-5.0.0.tgz#21413001973106cda1c3a9b91eedd4ccd5469d76" + integrity sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q== + +"@types/lodash-es@^4.17.9": + version "4.17.12" + resolved "https://registry.yarnpkg.com/@types/lodash-es/-/lodash-es-4.17.12.tgz#65f6d1e5f80539aa7cfbfc962de5def0cf4f341b" + integrity sha512-0NgftHUcV4v34VhXm8QBSftKVXtbkBG3ViCjs6+eJ5a6y6Mi/jiFGPc1sC7QK+9BFhWrURE3EOggmWaSxL9OzQ== dependencies: "@types/lodash" "*" "@types/lodash@*": - version "4.14.168" - resolved "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.168.tgz" - integrity sha512-oVfRvqHV/V6D1yifJbVRU3TMp8OT6o6BG+U9MkwuJ3U8/CsDHvalRpsxBqivn71ztOFZBTfJMvETbqHiaNSj7Q== + version "4.17.20" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.20.tgz#1ca77361d7363432d29f5e55950d9ec1e1c6ea93" + integrity sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA== -"@types/minimatch@*": - version "3.0.3" - resolved "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz" - integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== +"@types/make-fetch-happen@^10.0.4": + version "10.0.4" + resolved "https://registry.yarnpkg.com/@types/make-fetch-happen/-/make-fetch-happen-10.0.4.tgz#67441098ec2090165a8982767b41bc9f96e26f0c" + integrity sha512-jKzweQaEMMAi55ehvR1z0JF6aSVQm/h1BXBhPLOJriaeQBctjw5YbpIGs7zAx9dN0Sa2OO5bcXwCkrlgenoPEA== + dependencies: + "@types/node-fetch" "*" + "@types/retry" "*" + "@types/ssri" "*" -"@types/minimatch@^3.0.3": - version "3.0.5" - resolved "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz" - integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== +"@types/markdown-it@^14.1.1": + version "14.1.2" + resolved "https://registry.yarnpkg.com/@types/markdown-it/-/markdown-it-14.1.2.tgz#57f2532a0800067d9b934f3521429a2e8bfb4c61" + integrity sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog== + dependencies: + "@types/linkify-it" "^5" + "@types/mdurl" "^2" -"@types/mkdirp@^1.0.1": - version "1.0.2" - resolved "https://registry.npmjs.org/@types/mkdirp/-/mkdirp-1.0.2.tgz" - integrity sha512-o0K1tSO0Dx5X6xlU5F1D6625FawhC3dU3iqr25lluNv/+/QIVH8RLNEiVokgIZo+mz+87w/3Mkg/VvQS+J51fQ== +"@types/mdast@^4.0.0": + version "4.0.4" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-4.0.4.tgz#7ccf72edd2f1aa7dd3437e180c64373585804dd6" + integrity sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA== dependencies: - "@types/node" "*" + "@types/unist" "*" + +"@types/mdurl@^2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@types/mdurl/-/mdurl-2.0.0.tgz#d43878b5b20222682163ae6f897b20447233bdfd" + integrity sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg== + +"@types/minimatch@^5.1.2": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" + integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== + +"@types/minimist@^1.2.0": + version "1.2.5" + resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.5.tgz#ec10755e871497bcd83efe927e43ec46e8c0747e" + integrity sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag== -"@types/mocha@^8.2.1": - version "8.2.1" - resolved "https://registry.npmjs.org/@types/mocha/-/mocha-8.2.1.tgz" - integrity sha512-NysN+bNqj6E0Hv4CTGWSlPzMW6vTKjDpOteycDkV4IWBsO+PU48JonrPzV9ODjiI2XrjmA05KInLgF5ivZ/YGQ== +"@types/mocha@^10.0.7": + version "10.0.10" + resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-10.0.10.tgz#91f62905e8d23cbd66225312f239454a23bebfa0" + integrity sha512-xPyYSz1cMPnJQhl0CLMH68j3gprKZaTjG3s5Vi+fDgx+uhG9NOXwbVt52eFS8ECyXhyKcjDLCBEqBExKuiZb7Q== "@types/mute-stream@^0.0.4": version "0.0.4" @@ -1349,52 +3442,66 @@ dependencies: "@types/node" "*" -"@types/node-fetch@^2.6.4": - version "2.6.11" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.11.tgz#9b39b78665dae0e82a08f02f4967d62c66f95d24" - integrity sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g== +"@types/node-fetch@*", "@types/node-fetch@^2.6.4": + version "2.6.12" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.12.tgz#8ab5c3ef8330f13100a7479e2cd56d3386830a03" + integrity sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA== dependencies: "@types/node" "*" form-data "^4.0.0" -"@types/node@*", "@types/node@^14.14.32": - version "14.14.32" - resolved "https://registry.npmjs.org/@types/node/-/node-14.14.32.tgz" - integrity sha512-/Ctrftx/zp4m8JOujM5ZhwzlWLx22nbQJiVqz8/zE15gOeEW+uly3FSX4fGFpcfEvFzXcMCJwq9lGVWgyARXhg== +"@types/node@*", "@types/node@>=13.7.0", "@types/node@>=16.18.31", "@types/node@>=18", "@types/node@>=18.0.0": + version "24.0.15" + resolved "https://registry.yarnpkg.com/@types/node/-/node-24.0.15.tgz#f34fbc973e7d64217106e0c59ed8761e6b51381e" + integrity sha512-oaeTSbCef7U/z7rDeJA138xpG3NuKc64/rZ2qmUFkFJmnMsAPaluIifqyWd8hSSMxyP9oie3dLAqYPblag9KgA== + dependencies: + undici-types "~7.8.0" -"@types/node@>=12.0.0": - version "20.6.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.6.0.tgz#9d7daa855d33d4efec8aea88cd66db1c2f0ebe16" - integrity sha512-najjVq5KN2vsH2U/xyh2opaSEz6cZMR2SetLIlxlj08nOcmPOemJmUK2o4kUzfLqfrWE0PIrNeE16XhYDd3nqg== +"@types/node@20.5.1": + version "20.5.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.5.1.tgz#178d58ee7e4834152b0e8b4d30cbfab578b9bb30" + integrity sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg== "@types/node@^14.0.1": version "14.18.63" resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.63.tgz#1788fa8da838dbb5f9ea994b834278205db6ca2b" integrity sha512-fAtCfv4jJg+ExtXhvCkCqUKZ+4ok/JQk01qDKhL5BDDoS3AxKXhV5/MAVUZyQnSEd2GT92fkgZl0pz0Q0AzcIQ== -"@types/node@^18.11.18": - version "18.19.39" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.39.tgz#c316340a5b4adca3aee9dcbf05de385978590593" - integrity sha512-nPwTRDKUctxw3di5b4TfT3I0sWDiWoPQCZjXhvdkINntwr8lcoVCKsTgnXeRubKIlfnV+eN/HYk6Jb40tbcEAQ== +"@types/node@^16.18.28": + version "16.18.126" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.18.126.tgz#27875faa2926c0f475b39a8bb1e546c0176f8d4b" + integrity sha512-OTcgaiwfGFBKacvfwuHzzn1KLxH/er8mluiy8/uM3sGXHaRe73RrSIj01jow9t4kJEW633Ov+cOexXeiApTyAw== + +"@types/node@^18.11.18", "@types/node@^18.19.41": + version "18.19.120" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.120.tgz#07b3bd73875956d5281fa27e6d77a66415f7d455" + integrity sha512-WtCGHFXnVI8WHLxDAt5TbnCM4eSE+nI0QN2NJtwzcgMhht2eNz6V9evJrk+lwC8bCY8OWV5Ym8Jz7ZEyGnKnMA== dependencies: undici-types "~5.26.4" -"@types/node@^22.1.0": - version "22.4.1" - resolved "https://registry.yarnpkg.com/@types/node/-/node-22.4.1.tgz#9b595d292c65b94c20923159e2ce947731b6fdce" - integrity sha512-1tbpb9325+gPnKK0dMm+/LMriX0vKxf6RnB0SZUqfyVkQ4fMgUSySqhxE/y8Jvs4NyF1yHzTfG9KlnkIODxPKg== +"@types/node@^22.5.5": + version "22.16.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.16.5.tgz#cc46ac3994cd957000d0c11095a0b1dae2ea2368" + integrity sha512-bJFoMATwIGaxxx8VJPeM8TonI8t579oRvgAuT8zFugJsJZgzqv0Fu8Mhp68iecjzG7cnN3mO2dJQ5uUM2EFrgQ== dependencies: - undici-types "~6.19.2" + undici-types "~6.21.0" -"@types/normalize-package-data@^2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz" - integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== +"@types/normalize-package-data@^2.4.0", "@types/normalize-package-data@^2.4.3": + version "2.4.4" + resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz#56e2cc26c397c038fab0e3a917a12d5c5909e901" + integrity sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA== -"@types/parse-json@^4.0.0": - version "4.0.0" - resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" - integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== +"@types/papaparse@^5.3.16": + version "5.3.16" + resolved "https://registry.yarnpkg.com/@types/papaparse/-/papaparse-5.3.16.tgz#320c8f6b8c9be898fe2d0c1b21aee51448bf9dca" + integrity sha512-T3VuKMC2H0lgsjI9buTB3uuKj3EMD2eap1MOuEQuBQ44EnDx/IkGhU6EwiTf9zG3za4SKlmwKAImdDKdNnCsXg== + dependencies: + "@types/node" "*" + +"@types/psl@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@types/psl/-/psl-1.1.3.tgz#c1e9febd70e7df248ac9911cdd145454643aa28f" + integrity sha512-Iu174JHfLd7i/XkXY6VDrqSlPvTDQOtQI7wNAXKKOAADJ9TduRLkNdMgjGiMxSttUIZnomv81JAbAbC0DhggxA== "@types/request@*": version "2.48.12" @@ -1407,220 +3514,418 @@ form-data "^2.5.0" "@types/responselike@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29" - integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA== + version "1.0.3" + resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.3.tgz#cc29706f0a397cfe6df89debfe4bf5cea159db50" + integrity sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw== dependencies: "@types/node" "*" +"@types/retry@*": + version "0.12.5" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.5.tgz#f090ff4bd8d2e5b940ff270ab39fd5ca1834a07e" + integrity sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw== + "@types/retry@0.12.0": version "0.12.0" resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== -"@types/semver@^7.3.12": - version "7.3.13" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.13.tgz#da4bfd73f49bd541d28920ab0e2bf0ee80f71c91" - integrity sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw== +"@types/semver@^7.5.0": + version "7.7.0" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.7.0.tgz#64c441bdae033b378b6eef7d0c3d77c329b9378e" + integrity sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA== + +"@types/set-value@^4.0.3": + version "4.0.3" + resolved "https://registry.yarnpkg.com/@types/set-value/-/set-value-4.0.3.tgz#ac7f5f9715c95c7351e02832df672a112428e587" + integrity sha512-tSuUcLl6kMzI+l0gG7FZ04xbIcynxNIYgWFj91LPAvRcn7W3L1EveXNdVjqFDgAZPjY1qCOsm8Sb1C70SxAPHw== + +"@types/shelljs@^0.8.15": + version "0.8.17" + resolved "https://registry.yarnpkg.com/@types/shelljs/-/shelljs-0.8.17.tgz#8b21b8f77015af263a7e3e5093ff2b77320e45d2" + integrity sha512-IDksKYmQA2W9MkQjiyptbMmcQx+8+Ol6b7h6dPU5S05JyiQDSb/nZKnrMrZqGwgV6VkVdl6/SPCKPDlMRvqECg== + dependencies: + "@types/node" "*" + glob "^11.0.3" -"@types/sinon@*": - version "9.0.11" - resolved "https://registry.npmjs.org/@types/sinon/-/sinon-9.0.11.tgz" - integrity sha512-PwP4UY33SeeVKodNE37ZlOsR9cReypbMJOhZ7BVE0lB+Hix3efCOxiJWiE5Ia+yL9Cn2Ch72EjFTRze8RZsNtg== +"@types/sinon@^10.0.20": + version "10.0.20" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.20.tgz#f1585debf4c0d99f9938f4111e5479fb74865146" + integrity sha512-2APKKruFNCAZgx3daAyACGzWuJ028VVCUDk6o2rw/Z4PXT0ogwdV4KUegW0MwVs0Zu59auPXbbuBJHF12Sx1Eg== dependencies: "@types/sinonjs__fake-timers" "*" "@types/sinonjs__fake-timers@*": - version "6.0.2" - resolved "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.2.tgz" - integrity sha512-dIPoZ3g5gcx9zZEszaxLSVTvMReD3xxyyDnQUjA6IYDG9Ba2AV0otMPs+77sG9ojB4Qr2N2Vk5RnKeuA0X/0bg== + version "8.1.5" + resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.5.tgz#5fd3592ff10c1e9695d377020c033116cc2889f2" + integrity sha512-mQkU2jY8jJEF7YHjHvsQO8+3ughTL1mcnn96igfhONmR+fUPSKIkefQYpSe8bsly2Ep7oQbn/6VG5/9/0qcArQ== + +"@types/sort-array@^4.1.2": + version "4.1.2" + resolved "https://registry.yarnpkg.com/@types/sort-array/-/sort-array-4.1.2.tgz#da2f8d7eb0eccbdf1a7d7e515d5ad64f4aa62a09" + integrity sha512-3HGt90YX0/YvNgTOs8fff33YkyOq4tOqNZ1/dMJBpi4dlzD3nxjeRKecpaB3M3gLSwNB1kocuSGMD8cCtLe7vQ== + +"@types/ssri@*": + version "7.1.5" + resolved "https://registry.yarnpkg.com/@types/ssri/-/ssri-7.1.5.tgz#7147b5ba43957cb0f639a3309a3943fc1829d5e8" + integrity sha512-odD/56S3B51liILSk5aXJlnYt99S6Rt9EFDDqGtJM26rKHApHcwyU/UoYHrzKkdkHMAIquGWCuHtQTbes+FRQw== + dependencies: + "@types/node" "*" + +"@types/through@*": + version "0.0.33" + resolved "https://registry.yarnpkg.com/@types/through/-/through-0.0.33.tgz#14ebf599320e1c7851e7d598149af183c6b9ea56" + integrity sha512-HsJ+z3QuETzP3cswwtzt2vEIiHBk/dCcHGhbmG5X3ecnwFD/lPrMpliGXxSCg03L9AhrdwA4Oz/qfspkDW+xGQ== + dependencies: + "@types/node" "*" "@types/tough-cookie@*": version "4.0.5" resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.5.tgz#cb6e2a691b70cb177c6e3ae9c1d2e8b2ea8cd304" integrity sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA== -"@types/trusted-types@*": +"@types/trusted-types@^2.0.7": version "2.0.7" resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.7.tgz#baccb07a970b91707df3a3e8ba6896c57ead2d11" integrity sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw== +"@types/unist@*", "@types/unist@^3.0.0": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.3.tgz#acaab0f919ce69cce629c2d4ed2eb4adc1b6c20c" + integrity sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q== + +"@types/update-notifier@^6.0.8": + version "6.0.8" + resolved "https://registry.yarnpkg.com/@types/update-notifier/-/update-notifier-6.0.8.tgz#b145580a5cf218e862ad384bede152118eba0bb0" + integrity sha512-IlDFnfSVfYQD+cKIg63DEXn3RFmd7W1iYtKQsJodcHK9R1yr8aKbKaPKfBxzPpcHCq2DU8zUq4PIPmy19Thjfg== + dependencies: + "@types/configstore" "*" + boxen "^7.1.1" + +"@types/uuid@^10.0.0": + version "10.0.0" + resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-10.0.0.tgz#e9c07fe50da0f53dc24970cca94d619ff03f6f6d" + integrity sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ== + +"@types/uuid@^9.0.1": + version "9.0.8" + resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-9.0.8.tgz#7545ba4fc3c003d6c756f651f3bf163d8f0f29ba" + integrity sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA== + +"@types/vinyl@^2.0.7", "@types/vinyl@^2.0.8": + version "2.0.12" + resolved "https://registry.yarnpkg.com/@types/vinyl/-/vinyl-2.0.12.tgz#17642ca9a8ae10f3db018e9f885da4188db4c6e6" + integrity sha512-Sr2fYMBUVGYq8kj3UthXFAu5UN6ZW+rYr4NACjZQJvHvj+c8lYv0CahmZ2P/r7iUkN44gGUBwqxZkrKXYPb7cw== + dependencies: + "@types/expect" "^1.20.4" + "@types/node" "*" + +"@types/which@^3.0.4": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/which/-/which-3.0.4.tgz#2c3a89be70c56a84a6957a7264639f39ae4340a1" + integrity sha512-liyfuo/106JdlgSchJzXEQCVArk0CvevqPote8F8HgWgJ3dRCcTHgJIsLDuee0kxk/mhbInzIZk3QWSZJ8R+2w== + "@types/wrap-ansi@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/wrap-ansi/-/wrap-ansi-3.0.0.tgz#18b97a972f94f60a679fd5c796d96421b9abb9fd" integrity sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g== -"@types/ws@^7.4.0": - version "7.4.0" - resolved "https://registry.npmjs.org/@types/ws/-/ws-7.4.0.tgz" - integrity sha512-Y29uQ3Uy+58bZrFLhX36hcI3Np37nqWE7ky5tjiDoy1GDZnIwVxS0CgF+s+1bXMzjKBFy+fqaRfb708iNzdinw== +"@types/ws@^8.18.1": + version "8.18.1" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.18.1.tgz#48464e4bf2ddfd17db13d845467f6070ffea4aa9" + integrity sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg== + dependencies: + "@types/node" "*" + +"@types/xml2js@^0.4.14": + version "0.4.14" + resolved "https://registry.yarnpkg.com/@types/xml2js/-/xml2js-0.4.14.tgz#5d462a2a7330345e2309c6b549a183a376de8f9a" + integrity sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ== dependencies: "@types/node" "*" -"@types/yarnpkg__lockfile@^1.1.5": - version "1.1.5" - resolved "https://registry.yarnpkg.com/@types/yarnpkg__lockfile/-/yarnpkg__lockfile-1.1.5.tgz#9639020e1fb65120a2f4387db8f1e8b63efdf229" - integrity sha512-8NYnGOctzsI4W0ApsP/BIHD/LnxpJ6XaGf2AZmz4EyDYJMxtprN4279dLNI1CPZcwC9H18qYcaFv4bXi0wmokg== +"@types/yargs-parser@*": + version "21.0.3" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" + integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ== + +"@types/yargs@^17.0.32": + version "17.0.33" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.33.tgz#8c32303da83eec050a84b3c7ae7b9f922d13e32d" + integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA== + dependencies: + "@types/yargs-parser" "*" "@types/yauzl@^2.9.1": - version "2.9.1" - resolved "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.1.tgz" - integrity sha512-A1b8SU4D10uoPjwb0lnHmmu8wZhR9d+9o2PKBQT2jU5YPTKsxac6M2qGAdY7VcL+dHHhARVUDmeg0rOrcd9EjA== + version "2.10.3" + resolved "https://registry.yarnpkg.com/@types/yauzl/-/yauzl-2.10.3.tgz#e9b2808b4f109504a03cda958259876f61017999" + integrity sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q== dependencies: "@types/node" "*" -"@typescript-eslint/eslint-plugin@^4.17.0": - version "4.17.0" - resolved "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.17.0.tgz" - integrity sha512-/fKFDcoHg8oNan39IKFOb5WmV7oWhQe1K6CDaAVfJaNWEhmfqlA24g+u1lqU5bMH7zuNasfMId4LaYWC5ijRLw== +"@typescript-eslint/eslint-plugin@^6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.21.0.tgz#30830c1ca81fd5f3c2714e524c4303e0194f9cd3" + integrity sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA== dependencies: - "@typescript-eslint/experimental-utils" "4.17.0" - "@typescript-eslint/scope-manager" "4.17.0" - debug "^4.1.1" - functional-red-black-tree "^1.0.1" - lodash "^4.17.15" - regexpp "^3.0.0" - semver "^7.3.2" - tsutils "^3.17.1" - -"@typescript-eslint/experimental-utils@4.17.0": - version "4.17.0" - resolved "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-4.17.0.tgz" - integrity sha512-ZR2NIUbnIBj+LGqCFGQ9yk2EBQrpVVFOh9/Kd0Lm6gLpSAcCuLLe5lUCibKGCqyH9HPwYC0GIJce2O1i8VYmWA== - dependencies: - "@types/json-schema" "^7.0.3" - "@typescript-eslint/scope-manager" "4.17.0" - "@typescript-eslint/types" "4.17.0" - "@typescript-eslint/typescript-estree" "4.17.0" - eslint-scope "^5.0.0" - eslint-utils "^2.0.0" - -"@typescript-eslint/parser@^4.17.0": - version "4.17.0" - resolved "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-4.17.0.tgz" - integrity sha512-KYdksiZQ0N1t+6qpnl6JeK9ycCFprS9xBAiIrw4gSphqONt8wydBw4BXJi3C11ywZmyHulvMaLjWsxDjUSDwAw== - dependencies: - "@typescript-eslint/scope-manager" "4.17.0" - "@typescript-eslint/types" "4.17.0" - "@typescript-eslint/typescript-estree" "4.17.0" - debug "^4.1.1" + "@eslint-community/regexpp" "^4.5.1" + "@typescript-eslint/scope-manager" "6.21.0" + "@typescript-eslint/type-utils" "6.21.0" + "@typescript-eslint/utils" "6.21.0" + "@typescript-eslint/visitor-keys" "6.21.0" + debug "^4.3.4" + graphemer "^1.4.0" + ignore "^5.2.4" + natural-compare "^1.4.0" + semver "^7.5.4" + ts-api-utils "^1.0.1" + +"@typescript-eslint/parser@^6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.21.0.tgz#af8fcf66feee2edc86bc5d1cf45e33b0630bf35b" + integrity sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ== + dependencies: + "@typescript-eslint/scope-manager" "6.21.0" + "@typescript-eslint/types" "6.21.0" + "@typescript-eslint/typescript-estree" "6.21.0" + "@typescript-eslint/visitor-keys" "6.21.0" + debug "^4.3.4" -"@typescript-eslint/scope-manager@4.17.0": - version "4.17.0" - resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-4.17.0.tgz" - integrity sha512-OJ+CeTliuW+UZ9qgULrnGpPQ1bhrZNFpfT/Bc0pzNeyZwMik7/ykJ0JHnQ7krHanFN9wcnPK89pwn84cRUmYjw== +"@typescript-eslint/scope-manager@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz#ea8a9bfc8f1504a6ac5d59a6df308d3a0630a2b1" + integrity sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg== dependencies: - "@typescript-eslint/types" "4.17.0" - "@typescript-eslint/visitor-keys" "4.17.0" + "@typescript-eslint/types" "6.21.0" + "@typescript-eslint/visitor-keys" "6.21.0" -"@typescript-eslint/types@4.17.0": - version "4.17.0" - resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-4.17.0.tgz" - integrity sha512-RN5z8qYpJ+kXwnLlyzZkiJwfW2AY458Bf8WqllkondQIcN2ZxQowAToGSd9BlAUZDB5Ea8I6mqL2quGYCLT+2g== +"@typescript-eslint/scope-manager@7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.18.0.tgz#c928e7a9fc2c0b3ed92ab3112c614d6bd9951c83" + integrity sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA== + dependencies: + "@typescript-eslint/types" "7.18.0" + "@typescript-eslint/visitor-keys" "7.18.0" -"@typescript-eslint/typescript-estree@4.17.0": - version "4.17.0" - resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-4.17.0.tgz" - integrity sha512-lRhSFIZKUEPPWpWfwuZBH9trYIEJSI0vYsrxbvVvNyIUDoKWaklOAelsSkeh3E2VBSZiNe9BZ4E5tYBZbUczVQ== +"@typescript-eslint/type-utils@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.21.0.tgz#6473281cfed4dacabe8004e8521cee0bd9d4c01e" + integrity sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag== dependencies: - "@typescript-eslint/types" "4.17.0" - "@typescript-eslint/visitor-keys" "4.17.0" - debug "^4.1.1" - globby "^11.0.1" - is-glob "^4.0.1" - semver "^7.3.2" - tsutils "^3.17.1" + "@typescript-eslint/typescript-estree" "6.21.0" + "@typescript-eslint/utils" "6.21.0" + debug "^4.3.4" + ts-api-utils "^1.0.1" + +"@typescript-eslint/types@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.21.0.tgz#205724c5123a8fef7ecd195075fa6e85bac3436d" + integrity sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg== -"@typescript-eslint/visitor-keys@4.17.0": - version "4.17.0" - resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-4.17.0.tgz" - integrity sha512-WfuMN8mm5SSqXuAr9NM+fItJ0SVVphobWYkWOwQ1odsfC014Vdxk/92t4JwS1Q6fCA/ABfCKpa3AVtpUKTNKGQ== +"@typescript-eslint/types@7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.18.0.tgz#b90a57ccdea71797ffffa0321e744f379ec838c9" + integrity sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ== + +"@typescript-eslint/typescript-estree@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz#c47ae7901db3b8bddc3ecd73daff2d0895688c46" + integrity sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ== dependencies: - "@typescript-eslint/types" "4.17.0" - eslint-visitor-keys "^2.0.0" + "@typescript-eslint/types" "6.21.0" + "@typescript-eslint/visitor-keys" "6.21.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + minimatch "9.0.3" + semver "^7.5.4" + ts-api-utils "^1.0.1" + +"@typescript-eslint/typescript-estree@7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.18.0.tgz#b5868d486c51ce8f312309ba79bdb9f331b37931" + integrity sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA== + dependencies: + "@typescript-eslint/types" "7.18.0" + "@typescript-eslint/visitor-keys" "7.18.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + minimatch "^9.0.4" + semver "^7.6.0" + ts-api-utils "^1.3.0" + +"@typescript-eslint/utils@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.21.0.tgz#4714e7a6b39e773c1c8e97ec587f520840cd8134" + integrity sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ== + dependencies: + "@eslint-community/eslint-utils" "^4.4.0" + "@types/json-schema" "^7.0.12" + "@types/semver" "^7.5.0" + "@typescript-eslint/scope-manager" "6.21.0" + "@typescript-eslint/types" "6.21.0" + "@typescript-eslint/typescript-estree" "6.21.0" + semver "^7.5.4" + +"@typescript-eslint/utils@^7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.18.0.tgz#bca01cde77f95fc6a8d5b0dbcbfb3d6ca4be451f" + integrity sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw== + dependencies: + "@eslint-community/eslint-utils" "^4.4.0" + "@typescript-eslint/scope-manager" "7.18.0" + "@typescript-eslint/types" "7.18.0" + "@typescript-eslint/typescript-estree" "7.18.0" + +"@typescript-eslint/visitor-keys@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz#87a99d077aa507e20e238b11d56cc26ade45fe47" + integrity sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A== + dependencies: + "@typescript-eslint/types" "6.21.0" + eslint-visitor-keys "^3.4.1" + +"@typescript-eslint/visitor-keys@7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.18.0.tgz#0564629b6124d67607378d0f0332a0495b25e7d7" + integrity sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg== + dependencies: + "@typescript-eslint/types" "7.18.0" + eslint-visitor-keys "^3.4.3" + +"@ungap/structured-clone@^1.0.0", "@ungap/structured-clone@^1.2.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.3.0.tgz#d06bbb384ebcf6c505fde1c3d0ed4ddffe0aaff8" + integrity sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g== + +"@xmldom/xmldom@^0.9.8": + version "0.9.8" + resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.9.8.tgz#1471e82bdff9e8f20ee8bbe60d4ffa8a516e78d8" + integrity sha512-p96FSY54r+WJ50FIOsCOjyj/wavs8921hG5+kVMmZgKcvIKxMXHTrjNJvRgWa/zuX3B6t2lijLNFaOyuxUH+2A== + +"@yeoman/adapter@^1.4.0": + version "1.6.0" + resolved "https://registry.yarnpkg.com/@yeoman/adapter/-/adapter-1.6.0.tgz#312a6ac7d8be23d34bb453d9bf5c057e387477ea" + integrity sha512-oJw/i6zUDdfsT5VOPuANDDEWs5NhlMq5+rmyZIVdgBTXalbCw6nUfJQOblO2Fnq+Ov6CxlyzDBTWQvL5cEjA+g== + dependencies: + "@types/inquirer" "^9.0.3" + chalk "^5.2.0" + inquirer "^9.2.2" + log-symbols "^7.0.0" + ora "^8.1.0" + p-queue "^8.0.1" + text-table "^0.2.0" -"@ungap/promise-all-settled@1.1.2": - version "1.1.2" - resolved "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz" - integrity sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q== +"@yeoman/conflicter@^2.0.0-alpha.2": + version "2.4.0" + resolved "https://registry.yarnpkg.com/@yeoman/conflicter/-/conflicter-2.4.0.tgz#eeb02eedb2f67150299f891026facbb43b2ec3ee" + integrity sha512-5qzY5+i16TSrcJsYshW3eKzflfYMMmVgC8gRtLCe4UxLO4u/dxpb8sN9HqQkpNVjuZV5ouIeOohSq9PSJDXC1g== + dependencies: + "@yeoman/transform" "^1.2.0" + binary-extensions "^2.3.0" + cli-table "^0.3.11" + dateformat "^5.0.3" + diff "^7.0.0" + isbinaryfile "^5.0.2" + mem-fs-editor "^11.1.2" + minimatch "^9.0.5" + p-transform "^4.1.6" + pretty-bytes "^6.1.1" + slash "^5.1.0" + textextensions "^6.11.0" + +"@yeoman/namespace@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@yeoman/namespace/-/namespace-1.0.1.tgz#8034277c4a62bdf41e05e2d890bc674bc320eb99" + integrity sha512-XGdYL0HCoPvrzW7T8bxD6RbCY/B8uvR2jpOzJc/yEwTueKHwoVhjSLjVXkokQAO0LNl8nQFLVZ1aKfr2eFWZeA== -"@xmldom/xmldom@^0.8.6": - version "0.8.6" - resolved "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.6.tgz" - integrity sha512-uRjjusqpoqfmRkTaNuLJ2VohVr67Q5YwDATW3VU7PfzTj6IRaihGrYI7zckGZjxQPBIp63nfvJbM+Yu5ICh0Bg== +"@yeoman/transform@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@yeoman/transform/-/transform-1.2.0.tgz#cad36ee95ccd6c7c79a0a774da1da31624219bf5" + integrity sha512-evb/+2XMEBoHr4BxBeFkjeVTgTS4Qe7VH8DmzZ9kgJK7C7ACPAhW/qBdsKKP1sb5MoeITSaJSVFnc8S1fjZmcw== + dependencies: + "@types/node" "^16.18.28" + minimatch "^9.0.0" + readable-stream "^4.3.0" -"@yarnpkg/lockfile@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31" - integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ== +"@yeoman/types@^1.1.1": + version "1.6.0" + resolved "https://registry.yarnpkg.com/@yeoman/types/-/types-1.6.0.tgz#78ad8b4367bf5d7260c5453f7f827473c25a70cc" + integrity sha512-7Deh9qpDCoOmQal7Sdicb1v/Qv0KFhLqDHnSKh9NBKvvU4EKAFXmd4MuXFvGTCLQfKhuZ6doiM9CMUd4jGp25w== -JSONStream@^1.2.1, JSONStream@^1.3.5: +JSONStream@^1.3.5: version "1.3.5" - resolved "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" integrity sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ== dependencies: jsonparse "^1.2.0" through ">=2.2.7 <3" +abbrev@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-2.0.0.tgz#cf59829b8b4f03f89dda2771cb7f3653828c89bf" + integrity sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ== + +abort-controller-x@^0.4.0, abort-controller-x@^0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/abort-controller-x/-/abort-controller-x-0.4.3.tgz#ff269788386fabd58a7b6eeaafcb6cf55c2958e0" + integrity sha512-VtUwTNU8fpMwvWGn4xE93ywbogTYsuT+AUxAXOeelbXuQVIwNmC5YLeho9sH4vZ4ITW8414TTAOG1nW6uIVHCA== + abort-controller@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== dependencies: event-target-shim "^5.0.0" -acorn-jsx@^5.3.1: - version "5.3.1" - resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.1.tgz" - integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng== +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn-walk@^8.1.1: - version "8.2.0" - resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz" - integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== - -acorn@^7.4.0: - version "7.4.1" - resolved "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + version "8.3.4" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7" + integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g== + dependencies: + acorn "^8.11.0" -acorn@^8.4.1: - version "8.7.1" - resolved "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz" - integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== +acorn@^8.11.0, acorn@^8.4.1, acorn@^8.9.0: + version "8.15.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816" + integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg== agent-base@6: version "6.0.2" - resolved "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== dependencies: debug "4" -agent-base@^7.0.2, agent-base@^7.1.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434" - integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg== - dependencies: - debug "^4.3.4" +agent-base@^7.1.0, agent-base@^7.1.2: + version "7.1.4" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.4.tgz#e3cd76d4c548ee895d3c3fd8dc1f6c5b9032e7a8" + integrity sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ== agentkeepalive@^4.2.1: - version "4.5.0" - resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" - integrity sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew== + version "4.6.0" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.6.0.tgz#35f73e94b3f40bf65f105219c623ad19c136ea6a" + integrity sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ== dependencies: humanize-ms "^1.2.1" aggregate-error@^3.0.0: version "3.1.0" - resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== dependencies: clean-stack "^2.0.0" indent-string "^4.0.0" -ajv@^6.10.0, ajv@^6.12.3, ajv@^6.12.4: +ajv@^6.12.4: version "6.12.6" - resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== dependencies: fast-deep-equal "^3.1.1" @@ -1628,124 +3933,117 @@ ajv@^6.10.0, ajv@^6.12.3, ajv@^6.12.4: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^7.0.2: - version "7.2.1" - resolved "https://registry.npmjs.org/ajv/-/ajv-7.2.1.tgz" - integrity sha512-+nu0HDv7kNSOua9apAVc979qd932rrZeb3WOvoiD31A/p1mIE5/9bN2027pE2rOPYEdS3UHzsvof4hY+lM9/WQ== +ajv@^8.11.0, ajv@^8.17.1: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6" + integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g== dependencies: - fast-deep-equal "^3.1.1" + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" json-schema-traverse "^1.0.0" require-from-string "^2.0.2" - uri-js "^4.2.2" -ansi-align@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.0.tgz" - integrity sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw== +ansi-align@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.1.tgz#0cdf12e111ace773a86e9a1fad1225c43cb19a59" + integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w== dependencies: - string-width "^3.0.0" + string-width "^4.1.0" -ansi-colors@4.1.1, ansi-colors@^4.1.1: - version "4.1.1" - resolved "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz" - integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== +ansi-colors@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" + integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== -ansi-escapes@^3.1.0: +ansi-escapes@^3.0.0: version "3.2.0" - resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== -ansi-escapes@^4.2.1, ansi-escapes@^4.3.2: +ansi-escapes@^4.3.2: version "4.3.2" - resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== dependencies: type-fest "^0.21.3" -ansi-escapes@^4.3.0: - version "4.3.1" - resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz" - integrity sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA== - dependencies: - type-fest "^0.11.0" - -ansi-red@^0.1.1: - version "0.1.1" - resolved "https://registry.npmjs.org/ansi-red/-/ansi-red-0.1.1.tgz" - integrity sha1-jGOPnRCAgAo1PJwoyKgcpHBdlGw= +ansi-escapes@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-5.0.0.tgz#b6a0caf0eef0c41af190e9a749e0c00ec04bb2a6" + integrity sha512-5GFMVX8HqE/TB+FuBJGuO5XG0WrsA6ptUqoODaT/n9mmUaZFkqnBueB4leqGBCmrUHnCnC4PCZTCd0E7QQ83bA== dependencies: - ansi-wrap "0.1.0" + type-fest "^1.0.2" ansi-regex@^2.0.0: version "2.1.1" - resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz" - integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA== ansi-regex@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== -ansi-regex@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.1.tgz#164daac87ab2d6f6db3a29875e2d1766582dabed" - integrity sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g== - -ansi-regex@^5.0.0, ansi-regex@^5.0.1: +ansi-regex@^5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== ansi-regex@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + version "6.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.1.0.tgz#95ec409c69619d6cb1b8b34f14b660ef28ebd654" + integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== ansi-styles@^2.2.1: version "2.2.1" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA== -ansi-styles@^3.0.0, ansi-styles@^3.2.0, ansi-styles@^3.2.1: +ansi-styles@^3.2.1: version "3.2.1" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" -ansi-styles@^4.0.0, ansi-styles@^4.1.0, ansi-styles@^4.2.0: +ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" -ansi-styles@^6.1.0: +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +ansi-styles@^6.1.0, ansi-styles@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== -ansi-wrap@0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz" - integrity sha1-qCJQ3bABXponyoLoLqYDu/pF768= +ansis@^3.16.0, ansis@^3.17.0, ansis@^3.3.2: + version "3.17.0" + resolved "https://registry.yarnpkg.com/ansis/-/ansis-3.17.0.tgz#fa8d9c2a93fe7d1177e0c17f9eeb562a58a832d7" + integrity sha512-0qWUglt9JEqLFr3w1I1pbrChn1grhaiAR2ocX1PP/flRmxgtwTzPFFFnfIlD6aMOLQZgSuCRlidD70lvx8yhzg== -ansicolors@~0.3.2: - version "0.3.2" - resolved "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz" - integrity sha1-ZlWX3oap/+Oqm/vmyuXG6kJrSXk= +any-observable@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" + integrity sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog== -anymatch@~3.1.1: - version "3.1.1" - resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz" - integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== +anymatch@~3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" append-transform@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-2.0.0.tgz#99d9d29c7b38391e6f428d28ce136551f0b77e12" integrity sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg== dependencies: default-require-extensions "^3.0.0" @@ -1797,259 +4095,309 @@ archiver@^5.0.0: archy@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz" - integrity sha1-+cjBN1fMHde8N5rHeyxipcKGjEA= + resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" + integrity sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw== + +are-docs-informative@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/are-docs-informative/-/are-docs-informative-0.0.2.tgz#387f0e93f5d45280373d387a59d34c96db321963" + integrity sha512-ixiS0nLNNG5jNQzgZJNoUpBKdo9yTYZMGJ+QgT2jmjR7G7+QHRCc4v6LQ3NgE7EBJq+o0ams3waJwkrlBom8Ig== arg@^4.1.0: version "4.1.3" - resolved "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== -argparse@^1.0.10, argparse@^1.0.7: +arg@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^1.0.7: version "1.0.10" - resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" argparse@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - array-back@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/array-back/-/array-back-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/array-back/-/array-back-5.0.0.tgz#e196609edcec48376236d163958df76e659a0d36" integrity sha512-kgVWwJReZWmVuWOQKEOohXKJX+nD02JAZ54D1RRWlv8L0NebauKAaFxACKzB74RTclt1+WNz5KHaLRDAPZbDEw== -array-differ@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/array-differ/-/array-differ-3.0.0.tgz" - integrity sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg== - -array-from@^2.1.1: - version "2.1.1" - resolved "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz" - integrity sha1-z+nYwmYoudxa7MYqn12PHzUsEZU= +array-back@^6.2.2: + version "6.2.2" + resolved "https://registry.yarnpkg.com/array-back/-/array-back-6.2.2.tgz#f567d99e9af88a6d3d2f9dfcc21db6f9ba9fd157" + integrity sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw== -array-union@^1.0.1, array-union@^1.0.2: +array-buffer-byte-length@^1.0.1, array-buffer-byte-length@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz" - integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= + resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz#384d12a37295aec3769ab022ad323a18a51ccf8b" + integrity sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw== dependencies: - array-uniq "^1.0.1" + call-bound "^1.0.3" + is-array-buffer "^3.0.5" + +array-differ@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-4.0.0.tgz#aa3c891c653523290c880022f45b06a42051b026" + integrity sha512-Q6VPTLMsmXZ47ENG3V+wQyZS1ZxXMxFyYzA+Z/GMrJ6yIutAIEf9wTyroTzmGjNfox9/h3GdGBCVh43GVFx4Uw== + +array-ify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece" + integrity sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng== + +array-includes@^3.1.9: + version "3.1.9" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.9.tgz#1f0ccaa08e90cdbc3eb433210f903ad0f17c3f3a" + integrity sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.4" + define-properties "^1.2.1" + es-abstract "^1.24.0" + es-object-atoms "^1.1.1" + get-intrinsic "^1.3.0" + is-string "^1.1.1" + math-intrinsics "^1.1.0" array-union@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array-uniq@^1.0.1: - version "1.0.3" - resolved "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz" - integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= +array-union@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-3.0.1.tgz#da52630d327f8b88cfbfb57728e2af5cd9b6b975" + integrity sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw== -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= +array.prototype.findlastindex@^1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz#cfa1065c81dcb64e34557c9b81d012f6a421c564" + integrity sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.4" + define-properties "^1.2.1" + es-abstract "^1.23.9" + es-errors "^1.3.0" + es-object-atoms "^1.1.1" + es-shim-unscopables "^1.1.0" + +array.prototype.flat@^1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz#534aaf9e6e8dd79fb6b9a9917f839ef1ec63afe5" + integrity sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg== + dependencies: + call-bind "^1.0.8" + define-properties "^1.2.1" + es-abstract "^1.23.5" + es-shim-unscopables "^1.0.2" + +array.prototype.flatmap@^1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz#712cc792ae70370ae40586264629e33aab5dd38b" + integrity sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg== + dependencies: + call-bind "^1.0.8" + define-properties "^1.2.1" + es-abstract "^1.23.5" + es-shim-unscopables "^1.0.2" + +arraybuffer.prototype.slice@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz#9d760d84dbdd06d0cbf92c8849615a1a7ab3183c" + integrity sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ== + dependencies: + array-buffer-byte-length "^1.0.1" + call-bind "^1.0.8" + define-properties "^1.2.1" + es-abstract "^1.23.5" + es-errors "^1.3.0" + get-intrinsic "^1.2.6" + is-array-buffer "^3.0.4" arrify@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz" - integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + integrity sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA== -arrify@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz" - integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== +arrify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-3.0.0.tgz#ccdefb8eaf2a1d2ab0da1ca2ce53118759fd46bc" + integrity sha512-tLkvA81vQG/XqE2mjDkGQHoOINtMHtysSnemrmoGe6PydDPMRbVugqyk4A6V/WDWEfm3l+0d8anA9r8cv/5Jaw== -asap@*, asap@~2.0.3: +asap@*: version "2.0.6" - resolved "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz" - integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== asn1@~0.2.3: - version "0.2.4" - resolved "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz" - integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== + version "0.2.6" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" + integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== dependencies: safer-buffer "~2.1.0" assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" - integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== assertion-error@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= +ast-types@^0.13.4: + version "0.13.4" + resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.13.4.tgz#ee0d77b343263965ecc3fb62da16e7222b2b6782" + integrity sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w== + dependencies: + tslib "^2.0.1" -astral-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz" - integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== +async-function@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/async-function/-/async-function-1.0.0.tgz#509c9fca60eaf85034c6829838188e4e4c8ffb2b" + integrity sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA== -async@^2.6.2: - version "2.6.4" - resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221" - integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA== +async-retry@^1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/async-retry/-/async-retry-1.3.3.tgz#0e7f36c04d8478e7a58bdbed80cedf977785f280" + integrity sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw== dependencies: - lodash "^4.17.14" - -async@^3.2.3: - version "3.2.4" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" - integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + retry "0.13.1" -async@^3.2.4: - version "3.2.5" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" - integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== +async@^3.2.3, async@^3.2.4: + version "3.2.6" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.6.tgz#1b0728e14929d51b85b449b7f06e27c1145e38ce" + integrity sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA== asynckit@^0.4.0: version "0.4.0" - resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" - integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== -at-least-node@^1.0.0: +atomic-sleep@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz" - integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" + integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== +atomically@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/atomically/-/atomically-2.0.3.tgz#27e47bbe39994d324918491ba7c0edb7783e56cb" + integrity sha512-kU6FmrwZ3Lx7/7y3hPS5QnbJfaohcIul5fGqf7ok+4KklIEk9tJ0C2IQPdacSbVUWv6zVHXEBWoWd6NrVMT7Cw== + dependencies: + stubborn-fs "^1.2.5" + when-exit "^2.1.1" -autolinker@~0.28.0: - version "0.28.1" - resolved "https://registry.npmjs.org/autolinker/-/autolinker-0.28.1.tgz" - integrity sha1-BlK0kYgYefB3XazgzcoyM5QqTkc= +available-typed-arrays@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz#a5cc375d6a03c2efc87a553f3e0b1522def14846" + integrity sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ== dependencies: - gulp-header "^1.7.1" + possible-typed-array-names "^1.0.0" aws-sign2@~0.7.0: version "0.7.0" - resolved "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" - integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== aws4@^1.12.0: - version "1.12.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.12.0.tgz#ce1c9d143389679e253b314241ea9aa5cec980d3" - integrity sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg== + version "1.13.2" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.13.2.tgz#0aa167216965ac9474ccfa83892cfb6b3e1e52ef" + integrity sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw== -aws4@^1.8.0: - version "1.11.0" - resolved "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz" - integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== - -axios@^0.21.1: - version "0.21.4" - resolved "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== - dependencies: - follow-redirects "^1.14.0" - -axios@^0.27.2: - version "0.27.2" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.27.2.tgz#207658cc8621606e586c85db4b41a750e756d972" - integrity sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ== - dependencies: - follow-redirects "^1.14.9" - form-data "^4.0.0" - -axios@^1.7.4: - version "1.7.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.4.tgz#4c8ded1b43683c8dd362973c393f3ede24052aa2" - integrity sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw== +axios@^1.11.0, axios@^1.12.1: + version "1.12.1" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.12.1.tgz#0747b39c5b615f81f93f2c138e6d82a71426937f" + integrity "sha1-B0eznFthX4H5PywTjm2CpxQmk38= sha512-Kn4kbSXpkFHCGE6rBFNwIv0GQs4AvDT80jlveJDKFxjbTYMUeB4QtsdPCv6H8Cm19Je7IU6VFtRl2zWZI0rudQ==" dependencies: follow-redirects "^1.15.6" - form-data "^4.0.0" + form-data "^4.0.4" proxy-from-env "^1.1.0" -azure-devops-node-api@^12.0.0: - version "12.0.0" - resolved "https://registry.yarnpkg.com/azure-devops-node-api/-/azure-devops-node-api-12.0.0.tgz#38b9892f88e86da46246218411920923d8dd6a52" - integrity sha512-S6Il++7dQeMlZDokBDWw7YVoPeb90tWF10pYxnoauRMnkuL91jq9M7SOYRVhtO3FUC5URPkB/qzGa7jTLft0Xw== +azure-devops-node-api@^14.1.0: + version "14.1.0" + resolved "https://registry.yarnpkg.com/azure-devops-node-api/-/azure-devops-node-api-14.1.0.tgz#ec5393de9fa146399deaab6904e41da03edce180" + integrity sha512-QhpgjH1LQ+vgDJ7oBwcmsZ3+o4ZpjLVilw0D3oJQpYpRzN+L39lk5jZDLJ464hLUgsDzWn/Ksv7zLLMKLfoBzA== dependencies: tunnel "0.0.6" - typed-rest-client "^1.8.4" + typed-rest-client "2.1.0" + +b4a@^1.6.4: + version "1.6.7" + resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.6.7.tgz#a99587d4ebbfbd5a6e3b21bdb5d5fa385767abe4" + integrity sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg== balanced-match@^1.0.0: version "1.0.2" - resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base64-js@^1.1.2, base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== +balanced-match@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-3.0.1.tgz#e854b098724b15076384266497392a271f4a26a0" + integrity sha512-vjtV3hiLqYDNRoiAv0zC4QaGAMPomEoq83PRmYIofPswwZurCeWR5LByXm7SyoL0Zh5+2z0+HC7jG8gSZJUh0w== -base64-url@^2.2.0: - version "2.3.3" - resolved "https://registry.npmjs.org/base64-url/-/base64-url-2.3.3.tgz" - integrity sha512-dLMhIsK7OplcDauDH/tZLvK7JmUZK3A7KiQpjNzsBrM6Etw7hzNI1tLEywqJk9NnwkgWuFKSlx/IUO7vF6Mo8Q== - -base@^0.11.1: - version "0.11.2" - resolved "https://registry.npmjs.org/base/-/base-0.11.2.tgz" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" +bare-events@^2.2.0, bare-events@^2.5.4: + version "2.6.0" + resolved "https://registry.yarnpkg.com/bare-events/-/bare-events-2.6.0.tgz#11d9506da109e363a2f3af050fbb005ccdb3ee8f" + integrity sha512-EKZ5BTXYExaNqi3I3f9RtEsaI/xBSGjE0XZCZilPzFAV/goswFHuPd9jEZlPIZ/iNZJwDSao9qRiScySz7MbQg== -bash-glob@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/bash-glob/-/bash-glob-2.0.0.tgz#a8ef19450783403ed93fccca2dbe09f2cf6320dc" - integrity sha512-53/NJ+t2UAkEYgQPO6aFjbx1Ue8vNNXCYaA4EljNKP1SR8A9dSQQoBmYWR8BLXO0/NDRJEMSJ4BxWihi//m3Kw== +bare-fs@^4.0.1: + version "4.1.6" + resolved "https://registry.yarnpkg.com/bare-fs/-/bare-fs-4.1.6.tgz#0925521e7310f65cb1f154cab264f0b647a7cdef" + integrity sha512-25RsLF33BqooOEFNdMcEhMpJy8EoR88zSMrnOQOaM3USnOK2VmaJ1uaQEwPA6AQjrv1lXChScosN6CzbwbO9OQ== dependencies: - bash-path "^1.0.1" - component-emitter "^1.2.1" - cross-spawn "^5.1.0" - each-parallel-async "^1.0.0" - extend-shallow "^2.0.1" - is-extglob "^2.1.1" - is-glob "^4.0.0" + bare-events "^2.5.4" + bare-path "^3.0.0" + bare-stream "^2.6.4" -bash-path@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/bash-path/-/bash-path-1.0.3.tgz#dbc9efbdf18b1c11413dcb59b960e6aa56c84258" - integrity sha512-mGrYvOa6yTY/qNCiZkPFJqWmODK68y6kmVRAJ1NNbWlNoJrUrsFxu7FU2EKg7gbrer6ttrKkF2s/E/lhRy7/OA== +bare-os@^3.0.1: + version "3.6.1" + resolved "https://registry.yarnpkg.com/bare-os/-/bare-os-3.6.1.tgz#9921f6f59edbe81afa9f56910658422c0f4858d4" + integrity sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g== + +bare-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bare-path/-/bare-path-3.0.0.tgz#b59d18130ba52a6af9276db3e96a2e3d3ea52178" + integrity sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw== + dependencies: + bare-os "^3.0.1" + +bare-stream@^2.6.4: + version "2.6.5" + resolved "https://registry.yarnpkg.com/bare-stream/-/bare-stream-2.6.5.tgz#bba8e879674c4c27f7e27805df005c15d7a2ca07" + integrity sha512-jSmxKJNJmHySi6hC42zlZnq00rga4jjxcgNZjY9N5WlOe/iOoGRtdwGsHzQv2RlH2KOYMwGUXhf2zXd32BA9RA== dependencies: - arr-union "^3.1.0" - is-windows "^1.0.1" + streamx "^2.21.0" + +base64-js@^1.3.1, base64-js@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +base64url@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/base64url/-/base64url-3.0.1.tgz#6399d572e2bc3f90a9a8b22d5dbb0a32d33f788d" + integrity sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== + +basic-ftp@^5.0.2: + version "5.0.5" + resolved "https://registry.yarnpkg.com/basic-ftp/-/basic-ftp-5.0.5.tgz#14a474f5fffecca1f4f406f1c26b18f800225ac0" + integrity sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg== bcrypt-pbkdf@^1.0.0: version "1.0.2" - resolved "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz" - integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== dependencies: tweetnacl "^0.14.3" @@ -2058,15 +4406,30 @@ before-after-hook@^2.1.0, before-after-hook@^2.2.0: resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== +before-after-hook@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-3.0.2.tgz#d5665a5fa8b62294a5aa0a499f933f4a1016195d" + integrity sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A== + big-integer@^1.6.17: version "1.6.52" resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.52.tgz#60a887f3047614a8e1bffe5d7173490a97dc8c85" integrity sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg== -binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== +bin-links@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/bin-links/-/bin-links-4.0.4.tgz#c3565832b8e287c85f109a02a17027d152a58a63" + integrity sha512-cMtq4W5ZsEwcutJrVId+a/tjt8GSbS+h0oNkdl6+6rBuEv8Ot33Bevj5KPm40t309zuhVic8NjpuL42QCiJWWA== + dependencies: + cmd-shim "^6.0.0" + npm-normalize-package-bin "^3.0.0" + read-cmd-shim "^4.0.0" + write-file-atomic "^5.0.0" + +binary-extensions@^2.0.0, binary-extensions@^2.2.0, binary-extensions@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522" + integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== binary@~0.3.0: version "0.3.0" @@ -2076,15 +4439,17 @@ binary@~0.3.0: buffers "~0.1.1" chainsaw "~0.1.0" -binaryextensions@^2.1.2: - version "2.3.0" - resolved "https://registry.npmjs.org/binaryextensions/-/binaryextensions-2.3.0.tgz" - integrity sha512-nAihlQsYGyc5Bwq6+EsubvANYGExeJKHDO3RjnvwU042fawQTQfM3Kxn7IHUXQOz4bzfwsGYYHGSvXyW4zOGLg== +binaryextensions@^6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/binaryextensions/-/binaryextensions-6.11.0.tgz#c36b3e6b5c59e621605709b099cda8dda824cc72" + integrity sha512-sXnYK/Ij80TO3lcqZVV2YgfKN5QjUWIRk/XSm2J/4bd/lPko3lvk0O4ZppH6m+6hB2/GTu+ptNwVFe1xh+QLQw== + dependencies: + editions "^6.21.0" -bitbucket@^2.11.0: - version "2.11.0" - resolved "https://registry.yarnpkg.com/bitbucket/-/bitbucket-2.11.0.tgz#21d6f27f7e6acd1fa215d957aa81b369be5d2802" - integrity sha512-YoUekxzBybCnwbh+9IzKE0sEWIp/nz1JId+yGc5qXFCFN1cZnUCDSesWYQokG1NvXXNE4oPdRezhwgyjBhIckg== +bitbucket@^2.12.0: + version "2.12.0" + resolved "https://registry.yarnpkg.com/bitbucket/-/bitbucket-2.12.0.tgz#bb13796502c1d3ace0143fc01777140e7e18e78b" + integrity sha512-YqaiTPEmn5mkwdU2gGcJZcQ6B8/DhCHhc3SSYqSpnef6nSTTSa/2GSBoLEgPLqAuqrQITGKq8MgYkfDMtnJPuw== dependencies: before-after-hook "^2.1.0" deepmerge "^4.2.2" @@ -2094,7 +4459,7 @@ bitbucket@^2.11.0: bl@^4.0.3, bl@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== dependencies: buffer "^5.5.0" @@ -2106,100 +4471,107 @@ bluebird@^2.6.2: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-2.11.0.tgz#534b9033c022c9579c56ba3b3e5a5caafbb650e1" integrity sha512-UfFSr22dmHPQqPP9XWHRhq+gWnHCYguQGkXQlbyPtW5qTnhFWA8/iXg765tH0cAjy7l/zPJ1aBTO0g5XgA7kvQ== +bluebird@^3.7.2: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + bluebird@~3.4.1: version "3.4.7" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3" integrity sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA== -boxen@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/boxen/-/boxen-5.0.0.tgz" - integrity sha512-5bvsqw+hhgUi3oYGK0Vf4WpIkyemp60WBInn7+WNfoISzAqk/HX4L7WNROq38E6UR/y3YADpv6pEm4BfkeEAdA== +bowser@^2.11.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== + +boxen@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-7.1.1.tgz#f9ba525413c2fec9cdb88987d835c4f7cad9c8f4" + integrity sha512-2hCgjEmP8YLWQ130n2FerGv7rYpfBmnmp9Uy2Le1vge6X3gZIfSmEzP5QTDElFxcvVcXlEn8Aq6MU/PZygIOog== dependencies: - ansi-align "^3.0.0" - camelcase "^6.2.0" - chalk "^4.1.0" - cli-boxes "^2.2.1" - string-width "^4.2.0" - type-fest "^0.20.2" - widest-line "^3.1.0" - wrap-ansi "^7.0.0" + ansi-align "^3.0.1" + camelcase "^7.0.1" + chalk "^5.2.0" + cli-boxes "^3.0.0" + string-width "^5.1.2" + type-fest "^2.13.0" + widest-line "^4.0.1" + wrap-ansi "^8.1.0" + +boxen@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-8.0.1.tgz#7e9fcbb45e11a2d7e6daa8fdcebfc3242fc19fe3" + integrity sha512-F3PH5k5juxom4xktynS7MoFY+NUWH5LC4CnH11YB8NPew+HLpmBLCybSAEyb2F+4pRXhuhWqFesoQd6DAyc2hw== + dependencies: + ansi-align "^3.0.1" + camelcase "^8.0.0" + chalk "^5.3.0" + cli-boxes "^3.0.0" + string-width "^7.2.0" + type-fest "^4.21.0" + widest-line "^5.0.0" + wrap-ansi "^9.0.0" brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + version "1.1.12" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.12.tgz#ab9b454466e5a8cc3a187beaad580412a9c5b843" + integrity sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + version "2.0.2" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.2.tgz#54fc53237a613d854c7bd37463aad17df87214e7" + integrity sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ== dependencies: balanced-match "^1.0.0" -braces@^2.3.1: - version "2.3.2" - resolved "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== +brace-expansion@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-4.0.1.tgz#3387e13eaa2992025d05ea47308f77e4a8dedd1e" + integrity sha512-YClrbvTCXGe70pU2JiEiPLYXO9gQkyxYeKpJIQHVS/gOs6EWMQP2RYBwjFLNT322Ji8TOC3IMPfsYCedNpzKfA== dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" + balanced-match "^3.0.0" -braces@^3.0.1, braces@^3.0.2, braces@~3.0.2: +braces@^3.0.3, braces@~3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" - integrity "sha1-SQMy9AkZRSJy1VqEgK3AxEE1h4k= sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== dependencies: fill-range "^7.1.1" -brotli@^1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/brotli/-/brotli-1.3.3.tgz#7365d8cc00f12cf765d2b2c898716bcf4b604d48" - integrity sha512-oTKjJdShmDuGW94SyyaoQvAjf30dZaHnjJ8uAF+u2/vGJkJbJPJAT1gDiOJP5v1Zb6f9KEyW/1HpuaWIXtGHPg== - dependencies: - base64-js "^1.1.2" - -browser-stdout@1.3.1: +browser-stdout@^1.3.1: version "1.3.1" - resolved "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz" + resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== -browserslist@^4.14.5: - version "4.16.6" - resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.16.6.tgz" - integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== +browserslist@^4.24.0, browserslist@^4.25.1: + version "4.25.1" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.25.1.tgz#ba9e8e6f298a1d86f829c9b975e07948967bb111" + integrity sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw== dependencies: - caniuse-lite "^1.0.30001219" - colorette "^1.2.2" - electron-to-chromium "^1.3.723" - escalade "^3.1.1" - node-releases "^1.1.71" + caniuse-lite "^1.0.30001726" + electron-to-chromium "^1.5.173" + node-releases "^2.0.19" + update-browserslist-db "^1.1.3" buffer-crc32@^0.2.1, buffer-crc32@^0.2.13, buffer-crc32@~0.2.3: version "0.2.13" - resolved "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz" - integrity sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI= + resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" + integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== -buffer-equal-constant-time@1.0.1: +buffer-equal-constant-time@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz" - integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk= + resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== buffer-from@^1.0.0: - version "1.1.1" - resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz" - integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== buffer-indexof-polyfill@~1.0.0: version "1.0.2" @@ -2208,54 +4580,112 @@ buffer-indexof-polyfill@~1.0.0: buffer@^5.2.1, buffer@^5.5.0: version "5.7.1" - resolved "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== dependencies: base64-js "^1.3.1" ieee754 "^1.1.13" +buffer@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" + integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.2.1" + buffers@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb" integrity sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ== -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== +builtin-modules@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" + integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== + +bundle-name@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bundle-name/-/bundle-name-4.1.0.tgz#f3b96b34160d6431a19d7688135af7cfb8797889" + integrity sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q== + dependencies: + run-applescript "^7.0.0" + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +cacache@^18.0.0, cacache@^18.0.3: + version "18.0.4" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-18.0.4.tgz#4601d7578dadb59c66044e157d02a3314682d6a5" + integrity sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ== + dependencies: + "@npmcli/fs" "^3.1.0" + fs-minipass "^3.0.0" + glob "^10.2.2" + lru-cache "^10.0.1" + minipass "^7.0.3" + minipass-collect "^2.0.1" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + p-map "^4.0.0" + ssri "^10.0.0" + tar "^6.1.11" + unique-filename "^3.0.0" + +cacache@^19.0.1: + version "19.0.1" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-19.0.1.tgz#3370cc28a758434c85c2585008bd5bdcff17d6cd" + integrity sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ== + dependencies: + "@npmcli/fs" "^4.0.0" + fs-minipass "^3.0.0" + glob "^10.2.2" + lru-cache "^10.0.1" + minipass "^7.0.3" + minipass-collect "^2.0.1" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + p-map "^7.0.2" + ssri "^12.0.0" + tar "^7.4.3" + unique-filename "^4.0.0" + +cache-point@^3.0.0, cache-point@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/cache-point/-/cache-point-3.0.1.tgz#4a1997794695be780e1d080235aa7a289161f181" + integrity sha512-itTIMLEKbh6Dw5DruXbxAgcyLnh/oPGVLBfTPqBOftASxHe8bAeXy7JkO4F0LvHqht7XqP5O/09h5UcHS2w0FA== dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" + array-back "^6.2.2" cacheable-lookup@^5.0.3: version "5.0.4" resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== -cacheable-request@^6.0.0: - version "6.1.0" - resolved "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^3.0.0" - lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" +cacheable-lookup@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz#3476a8215d046e5a3202a9209dd13fec1f933a27" + integrity sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w== + +cacheable-request@^10.2.8: + version "10.2.14" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-10.2.14.tgz#eb915b665fda41b79652782df3f553449c406b9d" + integrity sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ== + dependencies: + "@types/http-cache-semantics" "^4.0.2" + get-stream "^6.0.1" + http-cache-semantics "^4.1.1" + keyv "^4.5.3" + mimic-response "^4.0.0" + normalize-url "^8.0.0" + responselike "^3.0.0" cacheable-request@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.2.tgz#ea0d0b889364a25854757301ca12b2da77f91d27" - integrity sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew== + version "7.0.4" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.4.tgz#7a33ebf08613178b403635be7b899d3e69bbe817" + integrity sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg== dependencies: clone-response "^1.0.2" get-stream "^5.1.0" @@ -2267,7 +4697,7 @@ cacheable-request@^7.0.2: caching-transform@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/caching-transform/-/caching-transform-4.0.0.tgz#00d297a4206d71e2163c39eaffa8157ac0651f0f" integrity sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA== dependencies: hasha "^5.0.0" @@ -2275,68 +4705,117 @@ caching-transform@^4.0.0: package-hash "^4.0.0" write-file-atomic "^3.0.0" -call-bind@^1.0.0: +call-bind-apply-helpers@^1.0.0, call-bind-apply-helpers@^1.0.1, call-bind-apply-helpers@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + resolved "https://registry.yarnpkg.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz#4b5428c222be985d79c3d82657479dbe0b59b2d6" + integrity sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ== dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" + es-errors "^1.3.0" + function-bind "^1.1.2" -call-me-maybe@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.1.tgz" - integrity sha1-JtII6onje1y95gJQoV8DHBak1ms= +call-bind@^1.0.7, call-bind@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.8.tgz#0736a9660f537e3388826f440d5ec45f744eaa4c" + integrity sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww== + dependencies: + call-bind-apply-helpers "^1.0.0" + es-define-property "^1.0.0" + get-intrinsic "^1.2.4" + set-function-length "^1.2.2" + +call-bound@^1.0.2, call-bound@^1.0.3, call-bound@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/call-bound/-/call-bound-1.0.4.tgz#238de935d2a2a692928c538c7ccfa91067fd062a" + integrity sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg== + dependencies: + call-bind-apply-helpers "^1.0.2" + get-intrinsic "^1.3.0" callsites@^3.0.0: version "3.1.0" - resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== +camel-case@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-keys@^6.2.2: + version "6.2.2" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0" + integrity sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg== + dependencies: + camelcase "^5.3.1" + map-obj "^4.0.0" + quick-lru "^4.0.1" + +camelcase@6, camelcase@^6.0.0, camelcase@^6.2.1: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + camelcase@^5.0.0, camelcase@^5.3.1: version "5.3.1" - resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -camelcase@^6.0.0, camelcase@^6.2.0: - version "6.2.0" - resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz" - integrity sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg== +camelcase@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-7.0.1.tgz#f02e50af9fd7782bc8b88a3558c32fd3a388f048" + integrity sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw== -caniuse-lite@^1.0.30001219: - version "1.0.30001232" - resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001232.tgz" - integrity sha512-e4Gyp7P8vqC2qV2iHA+cJNf/yqUKOShXQOJHQt81OHxlIZl/j/j3soEA0adAQi8CPUQgvOdDENyQ5kd6a6mNSg== +camelcase@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-8.0.0.tgz#c0d36d418753fb6ad9c5e0437579745c1c14a534" + integrity sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA== -capture-stack-trace@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz" - integrity sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw== +caniuse-lite@^1.0.30001726: + version "1.0.30001727" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz#22e9706422ad37aa50556af8c10e40e2d93a8b85" + integrity sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q== -cardinal@^2.1.1: - version "2.1.1" - resolved "https://registry.npmjs.org/cardinal/-/cardinal-2.1.1.tgz" - integrity sha1-fMEFXYItISlU0HsIXeolHMe8VQU= +capital-case@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669" + integrity sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A== dependencies: - ansicolors "~0.3.2" - redeyed "~2.1.0" + no-case "^3.0.4" + tslib "^2.0.3" + upper-case-first "^2.0.2" caseless@~0.12.0: version "0.12.0" - resolved "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" - integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== -chai@^4: - version "4.3.3" - resolved "https://registry.npmjs.org/chai/-/chai-4.3.3.tgz" - integrity sha512-MPSLOZwxxnA0DhLE84klnGPojWFK5KuhP7/j5dTsxpr2S3XlkqJP5WbyYl1gCTWvG2Z5N+HD4F472WsbEZL6Pw== +catharsis@^0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/catharsis/-/catharsis-0.9.0.tgz#40382a168be0e6da308c277d3a2b3eb40c7d2121" + integrity sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A== + dependencies: + lodash "^4.17.15" + +ccount@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5" + integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg== + +chai@^4.3.10: + version "4.5.0" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.5.0.tgz#707e49923afdd9b13a8b0b47d33d732d13812fd8" + integrity sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw== dependencies: assertion-error "^1.1.0" - check-error "^1.0.2" - deep-eql "^3.0.1" - get-func-name "^2.0.0" + check-error "^1.0.3" + deep-eql "^4.1.3" + get-func-name "^2.0.2" + loupe "^2.3.6" pathval "^1.1.1" - type-detect "^4.0.5" + type-detect "^4.1.0" chainsaw@~0.1.0: version "0.1.0" @@ -2345,10 +4824,17 @@ chainsaw@~0.1.0: dependencies: traverse ">=0.3.0 <0.4" -chalk@^1.0.0: +chalk-template@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/chalk-template/-/chalk-template-0.4.0.tgz#692c034d0ed62436b9062c1707fadcd0f753204b" + integrity sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg== + dependencies: + chalk "^4.1.2" + +chalk@^1.0.0, chalk@^1.1.3: version "1.1.3" - resolved "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A== dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" @@ -2356,193 +4842,205 @@ chalk@^1.0.0: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2: +chalk@^2.4.1: version "2.4.2" - resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.0.0, chalk@^4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz" - integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^4.0.2, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" - resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^5.0.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.1.2.tgz#d957f370038b75ac572471e83be4c5ca9f8e8c45" - integrity sha512-E5CkT4jWURs1Vy5qGJye+XwCkNj7Od3Af7CP6SujMetSMkLs8Do2RWJK5yx1wamHV/op8Rz+9rltjaTQWDnEFQ== +chalk@^5.0.0, chalk@^5.2.0, chalk@^5.3.0, chalk@^5.6.0: + version "5.6.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.6.0.tgz#a1a8d294ea3526dbb77660f12649a08490e33ab8" + integrity sha512-46QrSQFyVSEyYAgQ22hQ+zDa60YHA4fBstHmtSApj1Y5vKtG27fWowW03jCk5KcbXEWPZUIR894aARCA/G1kfQ== + +change-case@^4, change-case@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12" + integrity sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A== + dependencies: + camel-case "^4.1.2" + capital-case "^1.0.4" + constant-case "^3.0.4" + dot-case "^3.0.4" + header-case "^2.0.4" + no-case "^3.0.4" + param-case "^3.0.4" + pascal-case "^3.1.2" + path-case "^3.0.4" + sentence-case "^3.0.4" + snake-case "^3.0.4" + tslib "^2.0.3" + +character-entities-html4@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-2.1.0.tgz#1f1adb940c971a4b22ba39ddca6b618dc6e56b2b" + integrity sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA== + +character-entities-legacy@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz#76bc83a90738901d7bc223a9e93759fdd560125b" + integrity sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ== chardet@^0.7.0: version "0.7.0" - resolved "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== -check-error@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" - integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII= +chardet@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-2.1.0.tgz#1007f441a1ae9f9199a4a67f6e978fb0aa9aa3fe" + integrity sha512-bNFETTG/pM5ryzQ9Ad0lJOTa6HWD/YsScAR3EnCPZRPlQh77JocYktSHOUHelyhm8IARL+o4c4F1bP5KVOjiRA== + +check-error@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.3.tgz#a6502e4312a7ee969f646e83bb3ddd56281bd694" + integrity sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg== + dependencies: + get-func-name "^2.0.2" -chokidar@3.5.1: - version "3.5.1" - resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz" - integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== +chokidar@^3.5.2, chokidar@^3.5.3: + version "3.6.0" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b" + integrity sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw== dependencies: - anymatch "~3.1.1" + anymatch "~3.1.2" braces "~3.0.2" - glob-parent "~5.1.0" + glob-parent "~5.1.2" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" - readdirp "~3.5.0" + readdirp "~3.6.0" optionalDependencies: - fsevents "~2.3.1" + fsevents "~2.3.2" chownr@^1.1.1: version "1.1.4" - resolved "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== -ci-info@^2.0.0: +chownr@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz" - integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" + integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== + +chownr@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-3.0.0.tgz#9855e64ecd240a9cc4267ce8a4aa5d24a1da15e4" + integrity sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g== + +chrome-launcher@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/chrome-launcher/-/chrome-launcher-1.2.0.tgz#bba61f558f450aef70bbda1f011c83c31c129302" + integrity sha512-JbuGuBNss258bvGil7FT4HKdC3SC2K7UAEUqiPy3ACS3Yxo3hAW6bvFpCu2HsIJLgTqxgEX6BkujvzZfLpUD0Q== + dependencies: + "@types/node" "*" + escape-string-regexp "^4.0.0" + is-wsl "^2.2.0" + lighthouse-logger "^2.0.1" + +chromium-bidi@0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/chromium-bidi/-/chromium-bidi-0.11.0.tgz#9c3c42ee7b42d8448e9fce8d649dc8bfbcc31153" + integrity sha512-6CJWHkNRoyZyjV9Rwv2lYONZf1Xm0IuDyNq97nwSsxxP3wf5Bwy15K5rOvVKMtJ127jJBmxFUanSAOjgFRxgrA== + dependencies: + mitt "3.0.1" + zod "3.23.8" + +chromium-bidi@7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/chromium-bidi/-/chromium-bidi-7.1.1.tgz#22da11f1022f549e66cf86d213ebde427328576f" + integrity sha512-L2BKQ0rSLADgbPMIdDh3wnYHs3EiUiMay2Sq0CTolheaADmWIf6Pe+T9LJRcnh5rcMz0U7MVk0cQVvKsGRMa1g== + dependencies: + mitt "^3.0.1" + zod "^3.24.1" -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== +ci-info@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-4.3.0.tgz#c39b1013f8fdbd28cd78e62318357d02da160cd7" + integrity sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ== + +clean-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/clean-regexp/-/clean-regexp-1.0.0.tgz#8df7c7aae51fd36874e8f8d05b9180bc11a3fed7" + integrity sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw== dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" + escape-string-regexp "^1.0.5" clean-stack@^2.0.0: version "2.2.0" - resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== -clean-stack@^3.0.0: +clean-stack@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-3.0.1.tgz#155bf0b2221bf5f4fba89528d24c5953f17fe3a8" integrity sha512-lR9wNiMRcVQjSB3a7xXGLuz4cr4wJuuXlaAEbRutGowQTmlp7R72/DOgN21e8jdwblMWl9UOJMJXarX94pzKdg== dependencies: escape-string-regexp "4.0.0" -cli-boxes@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-1.0.0.tgz" - integrity sha1-T6kXw+WclKAEzWH47lCdplFocUM= +cli-boxes@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-3.0.0.tgz#71a10c716feeba005e4504f36329ef0b17cf3145" + integrity sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g== -cli-boxes@^2.2.1: - version "2.2.1" - resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz" - integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== +cli-cursor@^2.0.0, cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw== + dependencies: + restore-cursor "^2.0.0" cli-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== dependencies: restore-cursor "^3.1.0" -cli-progress@^3.4.0: - version "3.9.0" - resolved "https://registry.npmjs.org/cli-progress/-/cli-progress-3.9.0.tgz" - integrity sha512-g7rLWfhAo/7pF+a/STFH/xPyosaL1zgADhI0OM83hl3c7S43iGvJWEAV2QuDOnQ8i6EMBj/u4+NTd0d5L+4JfA== +cli-cursor@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-5.0.0.tgz#24a4831ecf5a6b01ddeb32fb71a4b2088b0dce38" + integrity sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw== dependencies: - colors "^1.1.2" - string-width "^4.2.0" + restore-cursor "^5.0.0" + +cli-progress@^3.12.0: + version "3.12.0" + resolved "https://registry.yarnpkg.com/cli-progress/-/cli-progress-3.12.0.tgz#807ee14b66bcc086258e444ad0f19e7d42577942" + integrity sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A== + dependencies: + string-width "^4.2.3" cli-spinners@^2.5.0, cli-spinners@^2.9.2: version "2.9.2" resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== -cli-table@^0.3.1: - version "0.3.6" - resolved "https://registry.npmjs.org/cli-table/-/cli-table-0.3.6.tgz" - integrity sha512-ZkNZbnZjKERTY5NwC2SeMeLeifSPq/pubeRoTpdr3WchLlnZg6hEgvHkK5zL7KNFdd9PmHN8lxrENUwI3cE8vQ== +cli-table@^0.3.11: + version "0.3.11" + resolved "https://registry.yarnpkg.com/cli-table/-/cli-table-0.3.11.tgz#ac69cdecbe81dccdba4889b9a18b7da312a9d3ee" + integrity sha512-IqLQi4lO0nIB4tcdTpN4LCB9FI3uqrJZK7RC515EnhZ6qBaglkIgICb1wjeAqpdoOabm1+SuQtkXIPdYC93jhQ== dependencies: colors "1.0.3" -cli-ux@^4.9.3: - version "4.9.3" - resolved "https://registry.npmjs.org/cli-ux/-/cli-ux-4.9.3.tgz" - integrity sha512-/1owvF0SZ5Gn54cgrikJ0QskgTzeg30HGjkmjFoaHDJzAqFpuX1DBpFR8aLvsE1J5s9MgeYRENQK4BFwOag5VA== +cli-truncate@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-0.2.1.tgz#9f15cfbb0705005369216c626ac7d05ab90dd574" + integrity sha512-f4r4yJnbT++qUPI9NR4XLDLq41gQ+uqnPItWG0F5ZkehuNiTTa3EY0S4AqTSUOeJ7/zU41oWPQSNkW5BqPL9bg== dependencies: - "@oclif/errors" "^1.2.2" - "@oclif/linewrap" "^1.0.0" - "@oclif/screen" "^1.0.3" - ansi-escapes "^3.1.0" - ansi-styles "^3.2.1" - cardinal "^2.1.1" - chalk "^2.4.1" - clean-stack "^2.0.0" - extract-stack "^1.0.0" - fs-extra "^7.0.0" - hyperlinker "^1.0.0" - indent-string "^3.2.0" - is-wsl "^1.1.0" - lodash "^4.17.11" - password-prompt "^1.0.7" - semver "^5.6.0" - strip-ansi "^5.0.0" - supports-color "^5.5.0" - supports-hyperlinks "^1.0.1" - treeify "^1.1.0" - tslib "^1.9.3" - -cli-ux@^5.2.1: - version "5.5.1" - resolved "https://registry.npmjs.org/cli-ux/-/cli-ux-5.5.1.tgz" - integrity sha512-t3DT1U1C3rArLGYLpKa3m9dr/8uKZRI8HRm/rXKL7UTjm4c+Yd9zHNWg1tP8uaJkUbhmvx5SQHwb3VWpPUVdHQ== - dependencies: - "@oclif/command" "^1.6.0" - "@oclif/errors" "^1.2.1" - "@oclif/linewrap" "^1.0.0" - "@oclif/screen" "^1.0.3" - ansi-escapes "^4.3.0" - ansi-styles "^4.2.0" - cardinal "^2.1.1" - chalk "^4.1.0" - clean-stack "^3.0.0" - cli-progress "^3.4.0" - extract-stack "^2.0.0" - fs-extra "^8.1" - hyperlinker "^1.0.0" - indent-string "^4.0.0" - is-wsl "^2.2.0" - js-yaml "^3.13.1" - lodash "^4.17.11" - natural-orderby "^2.0.1" - object-treeify "^1.1.4" - password-prompt "^1.1.2" - semver "^7.3.2" - string-width "^4.2.0" - strip-ansi "^6.0.0" - supports-color "^7.1.0" - supports-hyperlinks "^2.1.0" - tslib "^2.0.0" - -cli-width@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz" - integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== + slice-ansi "0.0.4" + string-width "^1.0.1" cli-width@^4.1.0: version "4.1.0" @@ -2551,7 +5049,7 @@ cli-width@^4.1.0: cliui@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== dependencies: string-width "^4.2.0" @@ -2560,176 +5058,165 @@ cliui@^6.0.0: cliui@^7.0.2: version "7.0.4" - resolved "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== dependencies: string-width "^4.2.0" strip-ansi "^6.0.0" wrap-ansi "^7.0.0" -clone-buffer@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz" - integrity sha1-4+JbIHrE5wGvch4staFnksrD3Fg= - -clone-deep@^4.0.1: - version "4.0.1" - resolved "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz" - integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== dependencies: - is-plain-object "^2.0.4" - kind-of "^6.0.2" - shallow-clone "^3.0.0" + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= + version "1.0.3" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== dependencies: mimic-response "^1.0.0" -clone-stats@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz" - integrity sha1-s3gt/4u1R04Yuba/D9/ngvh3doA= - clone@^1.0.2: version "1.0.4" - resolved "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz" - integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== -clone@^2.1.1: +clone@^2.1.2: version "2.1.2" - resolved "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz" - integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + integrity sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w== -cloneable-readable@^1.0.0: - version "1.1.3" - resolved "https://registry.npmjs.org/cloneable-readable/-/cloneable-readable-1.1.3.tgz" - integrity sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ== +cloudflare@^4.4.1: + version "4.5.0" + resolved "https://registry.yarnpkg.com/cloudflare/-/cloudflare-4.5.0.tgz#dd7521270382663823288fd4f36d4d804f7a4663" + integrity sha512-fPcbPKx4zF45jBvQ0z7PCdgejVAPBBCZxwqk1k7krQNfpM07Cfj97/Q6wBzvYqlWXx/zt1S9+m8vnfCe06umbQ== dependencies: - inherits "^2.0.1" - process-nextick-args "^2.0.0" - readable-stream "^2.3.5" - -cluster-key-slot@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz" - integrity sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw== + "@types/node" "^18.11.18" + "@types/node-fetch" "^2.6.4" + abort-controller "^3.0.0" + agentkeepalive "^4.2.1" + form-data-encoder "1.7.2" + formdata-node "^4.3.2" + node-fetch "^2.6.7" -co-prompt@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/co-prompt/-/co-prompt-1.0.0.tgz" - integrity sha1-+zcOntrEhXayenMv5dfyHZ/G5vY= - dependencies: - keypress "~0.2.1" +cmd-shim@^6.0.0: + version "6.0.3" + resolved "https://registry.yarnpkg.com/cmd-shim/-/cmd-shim-6.0.3.tgz#c491e9656594ba17ac83c4bd931590a9d6e26033" + integrity sha512-FMabTRlc5t5zjdenF6mS0MBeFZm0XqHqeOkcskKFb/LYCcRQ5fVgLOHVc4Lq9CqABd9zhjwPjMBCJvMCziSVtA== code-point-at@^1.0.0: version "1.1.0" - resolved "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz" - integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= - -coffee-script@^1.12.4: - version "1.12.7" - resolved "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz" - integrity sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw== - -coffeescript@^1.10.0: - version "1.12.7" - resolved "https://registry.npmjs.org/coffeescript/-/coffeescript-1.12.7.tgz" - integrity sha512-pLXHFxQMPklVoEekowk8b3erNynC+DVJzChxS/LCBBgR6/8AJkHivkm//zbowcfc7BTCAjryuhx6gPqPRfsFoA== - -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA== color-convert@^1.9.0: version "1.9.3" - resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-convert@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: color-name "~1.1.4" color-name@1.1.3: version "1.1.3" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" - integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== color-name@~1.1.4: version "1.1.4" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -colorette@^1.2.2: - version "1.2.2" - resolved "https://registry.npmjs.org/colorette/-/colorette-1.2.2.tgz" - integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== +colorette@^2.0.7: + version "2.0.20" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" + integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== colors@1.0.3: version "1.0.3" - resolved "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz" - integrity sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs= - -colors@1.4.0, colors@^1.1.2: - version "1.4.0" - resolved "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz" - integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== - -columnify@^1.5.4: - version "1.5.4" - resolved "https://registry.npmjs.org/columnify/-/columnify-1.5.4.tgz" - integrity sha1-Rzfd8ce2mop8NAVweC6UfuyOeLs= - dependencies: - strip-ansi "^3.0.0" - wcwidth "^1.0.0" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.0.3.tgz#0433f44d809680fdeb60ed260f1b0c262e82a40b" + integrity sha512-pFGrxThWcWQ2MsAz6RtgeWe4NK2kUE1WfsrvvlctdII745EW9I0yflqhe7++M5LEc7bV2c/9/5zc8sFcpL0Drw== -combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: +combined-stream@^1.0.8, combined-stream@~1.0.6: version "1.0.8" - resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" -commander@^2.11.0, commander@^2.9.0: - version "2.20.3" - resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== +comma-separated-tokens@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee" + integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== -commander@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== +command-line-args@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/command-line-args/-/command-line-args-6.0.1.tgz#cbd1efb4f72b285dbd54bde9a8585c2d9694b070" + integrity sha512-Jr3eByUjqyK0qd8W0SGFW1nZwqCaNCtbXjRo2cRJC1OYxWl3MZ5t1US3jq+cO4sPavqgw4l9BMGX0CBe+trepg== + dependencies: + array-back "^6.2.2" + find-replace "^5.0.2" + lodash.camelcase "^4.3.0" + typical "^7.2.0" + +command-line-usage@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/command-line-usage/-/command-line-usage-7.0.3.tgz#6bce992354f6af10ecea2b631bfdf0c8b3bfaea3" + integrity sha512-PqMLy5+YGwhMh1wS04mVG44oqDsgyLRSKJBdOo1bnYhMKBW65gZF1dRp2OZRhiTjgUHljy99qkO7bsctLaw35Q== + dependencies: + array-back "^6.2.2" + chalk-template "^0.4.0" + table-layout "^4.1.0" + typical "^7.1.1" -commander@^9.4.0: - version "9.4.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-9.4.1.tgz#d1dd8f2ce6faf93147295c0df13c7c21141cfbdd" - integrity sha512-5EEkTNyHNGFPD2H+c/dXXfQZYa/scCKasxWcXJaWnNJ99pnQN9Vnmqow+p+PlFPE63Q6mThaZws1T+HxfpgtPw== +commander@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-11.1.0.tgz#62fdce76006a68e5c1ab3314dc92e800eb83d906" + integrity sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ== + +commander@^12.0.0: + version "12.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3" + integrity sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA== + +comment-parser@1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/comment-parser/-/comment-parser-1.4.1.tgz#bdafead37961ac079be11eb7ec65c4d021eaf9cc" + integrity sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg== -common-path-prefix@^3.0.0: +common-ancestor-path@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz#4f7d2d1394d91b7abdf51871c62f71eadb0182a7" + integrity sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w== + +common-sequence@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" - integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== + resolved "https://registry.yarnpkg.com/common-sequence/-/common-sequence-3.0.0.tgz#d631cf0306fb2dea97e1d6669a1627950803fca1" + integrity sha512-g/CgSYk93y+a1IKm50tKl7kaT/OjjTYVQlEbUlt/49ZLV1mcKpUU7iyDiqTAeLdb4QDtQfq3ako8y8v//fzrWQ== commondir@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz" - integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== +compare-func@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/compare-func/-/compare-func-2.0.0.tgz#fb65e75edbddfd2e568554e8b5b05fff7a51fcb3" + integrity sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA== + dependencies: + array-ify "^1.0.0" + dot-prop "^5.1.0" compress-commons@^4.1.2: version "4.1.2" @@ -2743,70 +5230,142 @@ compress-commons@^4.1.2: concat-map@0.0.1: version "0.0.1" - resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== -concat-stream@^1.5.2: - version "1.6.2" - resolved "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== +config-chain@^1.1.11: + version "1.1.13" + resolved "https://registry.yarnpkg.com/config-chain/-/config-chain-1.1.13.tgz#fad0795aa6a6cdaff9ed1b68e9dff94372c232f4" + integrity sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ== dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" + ini "^1.3.4" + proto-list "~1.2.1" -concat-with-sourcemaps@*: - version "1.1.0" - resolved "https://registry.npmjs.org/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz" - integrity sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg== +config-master@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/config-master/-/config-master-3.1.0.tgz#667663590505a283bf26a484d68489d74c5485da" + integrity sha512-n7LBL1zBzYdTpF1mx5DNcZnZn05CWIdsdvtPL4MosvqbBUK3Rq6VWEtGUuF3Y0s9/CIhMejezqlSkP6TnCJ/9g== dependencies: - source-map "^0.6.1" + walk-back "^2.0.1" -configstore@^5.0.1: - version "5.0.1" - resolved "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz" - integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== +configstore@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-7.0.0.tgz#4461561fc51cb40e5ee1161230bc0337e069cc6b" + integrity sha512-yk7/5PN5im4qwz0WFZW3PXnzHgPu9mX29Y8uZ3aefe2lBPC1FYttWZRcaW9fKkT0pBCJyuQ2HfbmPVaODi9jcQ== dependencies: - dot-prop "^5.2.0" - graceful-fs "^4.1.2" - make-dir "^3.0.0" - unique-string "^2.0.0" - write-file-atomic "^3.0.0" - xdg-basedir "^4.0.0" + atomically "^2.0.3" + dot-prop "^9.0.0" + graceful-fs "^4.2.11" + xdg-basedir "^5.1.0" + +console-table-printer@^2.12.1: + version "2.14.6" + resolved "https://registry.yarnpkg.com/console-table-printer/-/console-table-printer-2.14.6.tgz#edfe0bf311fa2701922ed509443145ab51e06436" + integrity sha512-MCBl5HNVaFuuHW6FGbL/4fB7N/ormCy+tQ+sxTrF6QtSbSNETvPuOVbkJBhzDgYhvjWGrTma4eYJa37ZuoQsPw== + dependencies: + simple-wcswidth "^1.0.1" + +constant-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1" + integrity sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + upper-case "^2.0.2" + +content-disposition@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4" + integrity sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA== content-type@^1.0.4: - version "1.0.4" - resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== -convert-source-map@^1.7.0: - version "1.7.0" - resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz" - integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== +conventional-changelog-angular@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/conventional-changelog-angular/-/conventional-changelog-angular-6.0.0.tgz#a9a9494c28b7165889144fd5b91573c4aa9ca541" + integrity sha512-6qLgrBF4gueoC7AFVHu51nHL9pF9FRjXrH+ceVf7WmAfH3gs+gEYOkvxhjMPjZu57I4AGUGoNTY8V7Hrgf1uqg== dependencies: - safe-buffer "~5.1.1" + compare-func "^2.0.0" -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= +conventional-changelog-conventionalcommits@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-6.1.0.tgz#3bad05f4eea64e423d3d90fc50c17d2c8cf17652" + integrity sha512-3cS3GEtR78zTfMzk0AizXKKIdN4OvSh7ibNz6/DPbhWWQu7LqE/8+/GqSodV+sywUR2gpJAdP/1JFf4XtN7Zpw== + dependencies: + compare-func "^2.0.0" + +conventional-commits-parser@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/conventional-commits-parser/-/conventional-commits-parser-4.0.0.tgz#02ae1178a381304839bce7cea9da5f1b549ae505" + integrity sha512-WRv5j1FsVM5FISJkoYMR6tPk07fkKT0UodruX4je86V4owk451yjXAKzKAPOs9l7y59E2viHUS9eQ+dfUA9NSg== + dependencies: + JSONStream "^1.3.5" + is-text-path "^1.0.1" + meow "^8.1.2" + split2 "^3.2.2" + +convert-source-map@^1.7.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== + +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + +core-js-compat@^3.34.0: + version "3.44.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.44.0.tgz#62b9165b97e4cbdb8bca16b14818e67428b4a0f8" + integrity sha512-JepmAj2zfl6ogy34qfWtcE7nHKAJnKsQFRn++scjVS2bZFllwptzw61BZcZFYBPpUznLfAvh0LGhxKppk04ClA== + dependencies: + browserslist "^4.25.1" -core-util-is@1.0.2, core-util-is@~1.0.0: +core-util-is@1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" - integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== -cosmiconfig@^7.0.0: - version "7.0.0" - resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.0.tgz" - integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig-typescript-loader@^4.0.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.4.0.tgz#f3feae459ea090f131df5474ce4b1222912319f9" + integrity sha512-BabizFdC3wBHhbI4kJh0VkQP9GkBfoHPydD0COMce1nJ1kJAB3F2TmJ/I7diULBKtmEWSwEbuN/KDtgnmUUVmw== + +cosmiconfig-typescript-loader@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-5.1.0.tgz#d8d02bff04e63faa2dc794d618168bd764c704be" + integrity sha512-7PtBB+6FdsOvZyJtlF3hEPpACq7RQX6BVGsgC7/lfVXnKMvNCu/XY3ykreqG5w/rBNdu2z8LCIKoF3kpHHdHlA== dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" + jiti "^1.21.6" + +cosmiconfig@*, cosmiconfig@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-9.0.0.tgz#34c3fc58287b915f3ae905ab6dc3de258b55ad9d" + integrity sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg== + dependencies: + env-paths "^2.2.1" + import-fresh "^3.3.0" + js-yaml "^4.1.0" + parse-json "^5.2.0" + +cosmiconfig@^8.0.0, cosmiconfig@^8.3.6: + version "8.3.6" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.3.6.tgz#060a2b871d66dba6c8538ea1118ba1ac16f5fae3" + integrity sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA== + dependencies: + import-fresh "^3.3.0" + js-yaml "^4.1.0" + parse-json "^5.2.0" path-type "^4.0.0" - yaml "^1.10.0" crc-32@^1.2.0: version "1.2.2" @@ -2821,106 +5380,78 @@ crc32-stream@^4.0.2: crc-32 "^1.2.0" readable-stream "^3.4.0" -create-error-class@^3.0.0: - version "3.0.2" - resolved "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz" - integrity sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y= - dependencies: - capture-stack-trace "^1.0.0" - create-require@^1.1.0: version "1.1.1" - resolved "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== -cross-fetch@3.1.5: - version "3.1.5" - resolved "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz" - integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== - dependencies: - node-fetch "2.6.7" - -cross-spawn@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - integrity sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A== - dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" - -cross-spawn@^6.0.0, cross-spawn@^6.0.5: - version "6.0.5" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== +cross-fetch@^3.1.5: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3" + integrity sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q== dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" + node-fetch "^2.7.0" -cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: - version "7.0.3" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== +cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3, cross-spawn@^7.0.6: + version "7.0.6" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" + integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== dependencies: path-key "^3.1.0" shebang-command "^2.0.0" which "^2.0.1" -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - csprng@*: version "0.1.2" - resolved "https://registry.npmjs.org/csprng/-/csprng-0.1.2.tgz" - integrity sha1-S8aPEvo2jSUqWYQcusqXSxirReI= + resolved "https://registry.yarnpkg.com/csprng/-/csprng-0.1.2.tgz#4bc68f12fa368d252a59841cbaca974b18ab45e2" + integrity sha512-D3WAbvvgUVIqSxUfdvLeGjuotsB32bvfVPd+AaaTWMtyUeC9zgCnw5xs94no89yFLVsafvY9dMZEhTwsY/ZecA== dependencies: sequin "*" -cssstyle@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-4.0.1.tgz#ef29c598a1e90125c870525490ea4f354db0660a" - integrity sha512-8ZYiJ3A/3OkDd093CBT/0UKDWry7ak4BdPTFP2+QEP7cmhouyq/Up709ASSj2cK02BbZiMgk7kYjZNS4QP5qrQ== +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssstyle@^4.2.1: + version "4.6.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-4.6.0.tgz#ea18007024e3167f4f105315f3ec2d982bf48ed9" + integrity sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg== dependencies: - rrweb-cssom "^0.6.0" + "@asamuzakjp/css-color" "^3.2.0" + rrweb-cssom "^0.8.0" -csv-parse@^4.10.1: - version "4.15.3" - resolved "https://registry.npmjs.org/csv-parse/-/csv-parse-4.15.3.tgz" - integrity sha512-jlTqDvLdHnYMSr08ynNfk4IAUSJgJjTKy2U5CQBSu4cN9vQOJonLVZP4Qo4gKKrIgIQ5dr07UwOJdi+lRqT12w== +csv-parse@^5.5.2: + version "5.6.0" + resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-5.6.0.tgz#219beace2a3e9f28929999d2aa417d3fb3071c7f" + integrity sha512-l3nz3euub2QMg5ouu5U09Ew9Wf6/wQ8I++ch1loQ0ljmzhmfZYrH9fflS22i/PQEvsPvxCwxgz5q7UB8K1JO4Q== -csv-stringify@^1.0.4: - version "1.1.2" - resolved "https://registry.npmjs.org/csv-stringify/-/csv-stringify-1.1.2.tgz" - integrity sha1-d6QVJlgbzjOA8SsA18W7rHDIK1g= - dependencies: - lodash.get "~4.4.2" +csv-stringify@^6.6.0: + version "6.6.0" + resolved "https://registry.yarnpkg.com/csv-stringify/-/csv-stringify-6.6.0.tgz#d384859cfb71d0a4a73c5bcc36a4daf5440cb033" + integrity sha512-YW32lKOmIBgbxtu3g5SaiqWNwa/9ISQt2EcgOq0+RAIFufFp9is6tqNnKahqE5kuKvrnYAzs28r+s6pXJR8Vcw== -csv-stringify@^5.6.1: - version "5.6.2" - resolved "https://registry.npmjs.org/csv-stringify/-/csv-stringify-5.6.2.tgz" - integrity sha512-n3rIVbX6ylm1YsX2NEug9IaPV8xRnT+9/NNZbrA/bcHgOSSeqtWla6XnI/xmyu57wIw+ASCAoX1oM6EZtqJV0A== +current-module-paths@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/current-module-paths/-/current-module-paths-1.1.2.tgz#13a2d821b2f864c3adead261b7954b068510c32f" + integrity sha512-H4s4arcLx/ugbu1XkkgSvcUZax0L6tXUqnppGniQb8l5VjUKGHoayXE5RiriiPhYDd+kjZnaok1Uig13PKtKYQ== -dargs@^6.1.0: - version "6.1.0" - resolved "https://registry.npmjs.org/dargs/-/dargs-6.1.0.tgz" - integrity sha512-5dVBvpBLBnPwSsYXqfybFyehMmC/EenKEcf23AhCTgTf48JFBbmJKqoZBsERDnjL0FyiVTYWdFsRfTLHxLyKdQ== +dargs@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/dargs/-/dargs-7.0.0.tgz#04015c41de0bcb69ec84050f3d9be0caf8d6d5cc" + integrity sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg== dashdash@^1.12.0: version "1.14.1" - resolved "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz" - integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g== dependencies: assert-plus "^1.0.0" -data-uri-to-buffer@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz" - integrity sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA== +data-uri-to-buffer@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz#8a58bb67384b261a38ef18bea1810cb01badd28b" + integrity sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw== data-urls@^5.0.0: version "5.0.0" @@ -2930,80 +5461,101 @@ data-urls@^5.0.0: whatwg-mimetype "^4.0.0" whatwg-url "^14.0.0" -dateformat@^3.0.3: - version "3.0.3" - resolved "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz" - integrity sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q== +data-view-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/data-view-buffer/-/data-view-buffer-1.0.2.tgz#211a03ba95ecaf7798a8c7198d79536211f88570" + integrity sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ== + dependencies: + call-bound "^1.0.3" + es-errors "^1.3.0" + is-data-view "^1.0.2" -dayjs-plugin-utc@^0.1.2: - version "0.1.2" - resolved "https://registry.npmjs.org/dayjs-plugin-utc/-/dayjs-plugin-utc-0.1.2.tgz" - integrity sha512-ExERH5o3oo6jFOdkvMP3gytTCQ9Ksi5PtylclJWghr7k7m3o2U5QrwtdiJkOxLOH4ghr0EKhpqGefzGz1VvVJg== +data-view-byte-length@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz#9e80f7ca52453ce3e93d25a35318767ea7704735" + integrity sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ== + dependencies: + call-bound "^1.0.3" + es-errors "^1.3.0" + is-data-view "^1.0.2" -dayjs@^1.8.16: - version "1.10.4" - resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.10.4.tgz" - integrity sha512-RI/Hh4kqRc1UKLOAf/T5zdMMX5DQIlDxwUe3wSyMMnEbGunnpENCdbUgM+dW7kXidZqCttBrmw7BhN4TMddkCw== +data-view-byte-offset@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz#068307f9b71ab76dbbe10291389e020856606191" + integrity sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ== + dependencies: + call-bound "^1.0.2" + es-errors "^1.3.0" + is-data-view "^1.0.1" -dayjs@^1.8.34: - version "1.11.10" - resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.10.tgz#68acea85317a6e164457d6d6947564029a6a16a0" - integrity sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ== +date-fns@^1.27.2: + version "1.30.1" + resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c" + integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw== -debug@4, debug@4.3.4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.4: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" +dateformat@^4.6.3: + version "4.6.3" + resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5" + integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA== -debug@4.3.1: - version "4.3.1" - resolved "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== +dateformat@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-5.0.3.tgz#fe2223eff3cc70ce716931cb3038b59a9280696e" + integrity sha512-Kvr6HmPXUMerlLcLF+Pwq3K7apHpYmGDVqrxcDasBg86UcKeTSNWbEzU8bwdXnxnR44FtMhJAxI4Bov6Y/KUfA== + +dayjs@^1.8.34: + version "1.11.13" + resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.13.tgz#92430b0139055c3ebb60150aa13e860a4b5a366c" + integrity sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg== + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4, debug@^4.3.5, debug@^4.4.0, debug@^4.4.1: + version "4.4.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b" + integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ== dependencies: - ms "2.1.2" + ms "^2.1.3" -debug@^2.2.0, debug@^2.3.3: +debug@^2.6.9: version "2.6.9" - resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -debug@^3.1.0: +debug@^3.2.7: version "3.2.7" - resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== dependencies: - ms "^2.1.1" + ms "^2.1.1" + +decamelize-keys@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/decamelize-keys/-/decamelize-keys-1.1.1.tgz#04a2d523b2f18d80d0158a43b895d56dff8d19d8" + integrity sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg== + dependencies: + decamelize "^1.1.0" + map-obj "^1.0.0" -decamelize@^1.2.0: +decamelize@1.2.0, decamelize@^1.1.0, decamelize@^1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" - integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== decamelize@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-4.0.0.tgz#aa472d7bf660eb15f3494efd531cab7f2a709837" integrity sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ== -decimal.js@^10.4.3: - version "10.4.3" - resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.4.3.tgz#1044092884d245d1b7f65725fa4ad4c6f781cc23" - integrity sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA== +decimal.js@^10.5.0: + version "10.6.0" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.6.0.tgz#e649a43e3ab953a72192ff5983865e509f37ed9a" + integrity sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg== -decode-uri-component@^0.2.0, decode-uri-component@^0.2.2: +decode-uri-component@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" - integrity "sha1-5p2+JdN5QRcd1UDgJMREzVGI4ek= sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==" - -decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz" - integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= - dependencies: - mimic-response "^1.0.0" + integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== decompress-response@^6.0.0: version "6.0.0" @@ -3012,78 +5564,91 @@ decompress-response@^6.0.0: dependencies: mimic-response "^3.1.0" -deep-eql@^3.0.1: - version "3.0.1" - resolved "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz" - integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw== +deep-eql@^4.1.3: + version "4.1.4" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-4.1.4.tgz#d0d3912865911bb8fac5afb4e3acfa6a28dc72b7" + integrity sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg== dependencies: type-detect "^4.0.0" deep-extend@^0.6.0: version "0.6.0" - resolved "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@^0.1.3: - version "0.1.3" - resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz" - integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== deepmerge@^4.2.2: version "4.3.1" resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== +default-browser-id@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/default-browser-id/-/default-browser-id-5.0.0.tgz#a1d98bf960c15082d8a3fa69e83150ccccc3af26" + integrity sha512-A6p/pu/6fyBcA1TRz/GqWYPViplrftcW2gZC9q79ngNCKAeR/X3gcEdXQHl4KNXV+3wgIJ1CPkJQ3IHM6lcsyA== + +default-browser@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/default-browser/-/default-browser-5.2.1.tgz#7b7ba61204ff3e425b556869ae6d3e9d9f1712cf" + integrity sha512-WY/3TUME0x3KPYdRRxEJJvXRHV4PyPoUsxtZa78lwItwRQRHhd2U9xOscaT/YTf8uCXIAjeJOFBVEh/7FtD8Xg== + dependencies: + bundle-name "^4.1.0" + default-browser-id "^5.0.0" + default-require-extensions@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz" - integrity sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg== + version "3.0.1" + resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.1.tgz#bfae00feeaeada68c2ae256c62540f60b80625bd" + integrity sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw== dependencies: strip-bom "^4.0.0" defaults@^1.0.3: - version "1.0.3" - resolved "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz" - integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== dependencies: clone "^1.0.2" -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== - -defer-to-connect@^2.0.0: +defer-to-connect@^2.0.0, defer-to-connect@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== -define-lazy-prop@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" - integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== - -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= +define-data-property@^1.0.1, define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== dependencies: - is-descriptor "^0.1.0" + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= +define-lazy-prop@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz#dbb19adfb746d7fc6d734a06b72f4a00d021255f" + integrity sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg== + +define-properties@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== dependencies: - is-descriptor "^1.0.0" + define-data-property "^1.0.1" + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== +degenerator@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/degenerator/-/degenerator-5.0.1.tgz#9403bf297c6dad9a1ece409b37db27954f91f2f5" + integrity sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ== dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" + ast-types "^0.13.4" + escodegen "^2.1.0" + esprima "^4.0.1" delay@^5.0.0: version "5.0.0" @@ -3092,105 +5657,180 @@ delay@^5.0.0: delayed-stream@~1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" - integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -denque@^1.1.0: - version "1.5.1" - resolved "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz" - integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw== - -deprecation@^2.0.0, deprecation@^2.3.1: +deprecation@^2.0.0: version "2.3.1" resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== -detect-indent@^6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/detect-indent/-/detect-indent-6.0.0.tgz" - integrity sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA== +dequal@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" + integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== -devtools-protocol@0.0.969999: - version "0.0.969999" - resolved "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.969999.tgz" - integrity sha512-6GfzuDWU0OFAuOvBokXpXPLxjOJ5DZ157Ue3sGQQM3LgAamb8m0R0ruSfN0DDu+XG5XJgT50i6zZ/0o8RglreQ== +des.js@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.1.0.tgz#1d37f5766f3bbff4ee9638e871a8768c173b81da" + integrity sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg== + dependencies: + inherits "^2.0.1" + minimalistic-assert "^1.0.0" -diacritics-map@^0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/diacritics-map/-/diacritics-map-0.1.0.tgz" - integrity sha1-bfwP+dAQAKLt8oZTccrDFulJd68= +detect-indent@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-7.0.1.tgz#cbb060a12842b9c4d333f1cac4aa4da1bb66bc25" + integrity sha512-Mc7QhQ8s+cLrnUfU/Ji94vG/r8M26m8f++vyres4ZoojaRDpZ1eSIh/EpzLNwlWuvzSZ3UbDFspjFvTDXe6e/g== -diff@5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz" - integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== +detect-libc@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.4.tgz#f04715b8ba815e53b4d8109655b6508a6865a7e8" + integrity sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA== -diff@^3.5.0: - version "3.5.0" - resolved "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz" - integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== +detect-newline@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-4.0.1.tgz#fcefdb5713e1fb8cb2839b8b6ee22e6716ab8f23" + integrity sha512-qE3Veg1YXzGHQhlA6jzebZN2qVf6NX+A7m7qlhCGG30dJixrAQhYOsJjsnBjJkCSmuOPpCk30145fr8FV0bzog== + +devlop@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/devlop/-/devlop-1.1.0.tgz#4db7c2ca4dc6e0e834c30be70c94bbc976dc7018" + integrity sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA== + dependencies: + dequal "^2.0.0" -diff@^4.0.1: +devtools-protocol@0.0.1367902: + version "0.0.1367902" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1367902.tgz#7333bfc4466c5a54a4c6de48a9dfbcb4b811660c" + integrity sha512-XxtPuC3PGakY6PD7dG66/o8KwJ/LkH2/EKe19Dcw58w53dv4/vSQEkn/SzuyhHE2q4zPgCkxQBxus3VV4ql+Pg== + +devtools-protocol@0.0.1464554: + version "0.0.1464554" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1464554.tgz#54d88398c99ad93fc81c35569fba9473b6e223d0" + integrity sha512-CAoP3lYfwAGQTaAXYvA6JZR0fjGUb7qec1qf4mToyoH2TZgUFeIqYcjh6f9jNuhHfuZiEdH+PONHYrLhRQX6aw== + +diff@^4.0.1, diff@^4.0.2: version "4.0.2" - resolved "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== -dir-glob@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz" - integrity sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag== - dependencies: - arrify "^1.0.1" - path-type "^3.0.0" +diff@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.0.tgz#26ded047cd1179b78b9537d5ef725503ce1ae531" + integrity sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A== -dir-glob@^2.2.2: - version "2.2.2" - resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-2.2.2.tgz" - integrity sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw== - dependencies: - path-type "^3.0.0" +diff@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-7.0.0.tgz#3fb34d387cd76d803f6eebea67b921dab0182a9a" + integrity sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw== dir-glob@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== dependencies: path-type "^4.0.0" +dmd@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/dmd/-/dmd-7.1.1.tgz#acf791a47aec88c0a80873695bb2c1cc67a04ffe" + integrity sha512-Ap2HP6iuOek7eShReDLr9jluNJm9RMZESlt29H/Xs1qrVMkcS9X6m5h1mBC56WMxNiSo0wvjGICmZlYUSFjwZQ== + dependencies: + array-back "^6.2.2" + cache-point "^3.0.0" + common-sequence "^3.0.0" + file-set "^5.2.2" + handlebars "^4.7.8" + marked "^4.3.0" + walk-back "^5.1.1" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + doctrine@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: esutils "^2.0.2" -dompurify@^3.0.8: - version "3.0.8" - resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.0.8.tgz#e0021ab1b09184bc8af7e35c7dd9063f43a8a437" - integrity sha512-b7uwreMYL2eZhrSCRC4ahLTeZcPZxSmYfmcQGXGkXiZSNW1X85v+SDM5KsWcpivIiUBH47Ji7NtyUdpLeF5JZQ== +dom-serializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53" + integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + entities "^4.2.0" + +domelementtype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + +dompurify@^3.2.6: + version "3.2.6" + resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.2.6.tgz#ca040a6ad2b88e2a92dc45f38c79f84a714a1cad" + integrity sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ== + optionalDependencies: + "@types/trusted-types" "^2.0.7" + +domutils@^3.2.1: + version "3.2.2" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.2.2.tgz#edbfe2b668b0c1d97c24baf0f1062b132221bc78" + integrity sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw== + dependencies: + dom-serializer "^2.0.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + +dot-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" -dot-prop@^5.2.0: +dot-prop@^5.1.0: version "5.3.0" - resolved "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== dependencies: is-obj "^2.0.0" -download-stats@^0.3.4: - version "0.3.4" - resolved "https://registry.npmjs.org/download-stats/-/download-stats-0.3.4.tgz" - integrity sha512-ic2BigbyUWx7/CBbsfGjf71zUNZB4edBGC3oRliSzsoNmvyVx3Ycfp1w3vp2Y78Ee0eIIkjIEO5KzW0zThDGaA== +dot-prop@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-9.0.0.tgz#bae5982fe6dc6b8fddb92efef4f2ddff26779e92" + integrity sha512-1gxPBJpI/pcjQhKgIU91II6Wkay+dLcN3M6rf2uwP8hRur3HtQXjVrdAK3sjC0piaEuxzMwjXChcETiJl47lAQ== dependencies: - JSONStream "^1.2.1" - lazy-cache "^2.0.1" - moment "^2.15.1" + type-fest "^4.18.2" -dtrace-provider@~0.6: - version "0.6.0" - resolved "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz" - integrity sha1-CweNVReTfYcxAUUtkUZzdVe3XlE= +dotenv@^16.6.1: + version "16.6.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.6.1.tgz#773f0e69527a8315c7285d5ee73c4459d20a8020" + integrity sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow== + +dunder-proto@^1.0.0, dunder-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/dunder-proto/-/dunder-proto-1.0.1.tgz#d7ae667e1dc83482f8b70fd0f6eefc50da30f58a" + integrity sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A== dependencies: - nan "^2.0.8" + call-bind-apply-helpers "^1.0.1" + es-errors "^1.3.0" + gopd "^1.2.0" duplexer2@~0.1.4: version "0.1.4" @@ -3199,16 +5839,6 @@ duplexer2@~0.1.4: dependencies: readable-stream "^2.0.2" -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz" - integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= - -each-parallel-async@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/each-parallel-async/-/each-parallel-async-1.0.0.tgz#91783e190000c7dd588336b2d468ebaf71980f7b" - integrity sha512-P/9kLQiQj0vZNzphvKKTgRgMnlqs5cJsxeAiuog1jrUnwv0Z3hVUwJDQiP7MnLb2I9S15nR9SRUceFT9IxtqRg== - eastasianwidth@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" @@ -3216,62 +5846,61 @@ eastasianwidth@^0.2.0: ecc-jsbn@~0.1.1: version "0.1.2" - resolved "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" - integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw== dependencies: jsbn "~0.1.0" safer-buffer "^2.1.0" ecdsa-sig-formatter@1.0.11: version "1.0.11" - resolved "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz" + resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== dependencies: safe-buffer "^5.0.1" -editions@^2.2.0: - version "2.3.1" - resolved "https://registry.npmjs.org/editions/-/editions-2.3.1.tgz" - integrity sha512-ptGvkwTvGdGfC0hfhKg0MT+TRLRKGtUiWGBInxOm5pz7ssADezahjCUaYuZ8Dr+C05FW0AECIIPt4WBxVINEhA== +editions@^6.21.0: + version "6.21.0" + resolved "https://registry.yarnpkg.com/editions/-/editions-6.21.0.tgz#8da2d85611106e0891a72619b7bee8e0c830089b" + integrity sha512-ofkXJtn7z0urokN62DI3SBo/5xAtF0rR7tn+S/bSYV79Ka8pTajIIl+fFQ1q88DQEImymmo97M4azY3WX/nUdg== dependencies: - errlop "^2.0.0" - semver "^6.3.0" + version-range "^4.13.0" -ejs@^2.6.1: - version "2.7.4" - resolved "https://registry.npmjs.org/ejs/-/ejs-2.7.4.tgz" - integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA== - -ejs@^3.1.5: +ejs@^3.1.10: version "3.1.10" resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.10.tgz#69ab8358b14e896f80cc39e62087b88500c3ac3b" - integrity "sha1-aauDWLFOiW+AzDnmIIe4hQDDrDs= sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==" + integrity sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA== dependencies: jake "^10.8.5" -electron-to-chromium@^1.3.723: - version "1.3.743" - resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.743.tgz" - integrity sha512-K2wXfo9iZQzNJNx67+Pld0DRF+9bYinj62gXCdgPhcu1vidwVuLPHQPPFnCdO55njWigXXpfBiT90jGUPbw8Zg== +electron-to-chromium@^1.5.173: + version "1.5.189" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.189.tgz#a5c41d2e5c64e2e6cd11bdf4eeeebc1ec8601e08" + integrity sha512-y9D1ntS1ruO/pZ/V2FtLE+JXLQe28XoRpZ7QCCo0T8LdQladzdcOVQZH/IWLVJvCw12OGMb6hYOeOAjntCmJRQ== + +elegant-spinner@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" + integrity sha512-B+ZM+RXvRqQaAmkMlO/oSe5nMUOaUnyfGYCEHoR8wrXsZR2mA0XVibsxV1bvTwxdRWah1PkQqso2EzhILGHtEQ== email-validator@^2.0.4: version "2.0.4" - resolved "https://registry.npmjs.org/email-validator/-/email-validator-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/email-validator/-/email-validator-2.0.4.tgz#b8dfaa5d0dae28f1b03c95881d904d4e40bfe7ed" integrity sha512-gYCwo7kh5S3IDyZPLZf6hSS0MnZT8QmJFqYvbqlDZSbwdZlY6QZWxJ4i/6UhITOJ4XzyI647Bm2MXKCLqnJ4nQ== -"emoji-regex@>=6.0.0 <=6.1.1": - version "6.1.1" - resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-6.1.1.tgz" - integrity sha1-xs0OwbBkLio8Z6ETfvxeeW2k+I4= +emoji-regex-xs@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex-xs/-/emoji-regex-xs-1.0.0.tgz#e8af22e5d9dbd7f7f22d280af3d19d2aab5b0724" + integrity sha512-LRlerrMYoIDrT6jgpeZ2YYl/L8EulRTt5hQcYjy5AInh7HWXKimpqx68aknBFpGL2+/IcogTcaydJEgaTmOpDg== -emoji-regex@^7.0.1: - version "7.0.3" - resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz" - integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== +emoji-regex@^10.3.0: + version "10.4.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.4.0.tgz#03553afea80b3975749cfcb36f776ca268e413d4" + integrity sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw== emoji-regex@^8.0.0: version "8.0.0" - resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== emoji-regex@^9.2.2: @@ -3279,200 +5908,438 @@ emoji-regex@^9.2.2: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== -end-of-stream@^1.1.0, end-of-stream@^1.4.1: - version "1.4.4" - resolved "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== +encoding@^0.1.13: + version "0.1.13" + resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" + integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== dependencies: - once "^1.4.0" + iconv-lite "^0.6.2" -enquirer@^2.3.5: - version "2.3.6" - resolved "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz" - integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== +end-of-stream@^1.1.0, end-of-stream@^1.4.1: + version "1.4.5" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.5.tgz#7344d711dea40e0b74abc2ed49778743ccedb08c" + integrity sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg== dependencies: - ansi-colors "^4.1.1" + once "^1.4.0" -entities@^4.4.0: +entities@^4.2.0, entities@^4.4.0: version "4.5.0" resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== -eol@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/eol/-/eol-0.9.1.tgz#f701912f504074be35c6117a5c4ade49cd547acd" - integrity sha512-Ds/TEoZjwggRoz/Q2O7SE3i4Jm66mqTDfmdHdq/7DKVk3bro9Q8h6WdXKdPqFLMoqxrDK5SVRzHVPOS6uuGtrg== +entities@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/entities/-/entities-6.0.1.tgz#c28c34a43379ca7f61d074130b2f5f7020a30694" + integrity sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g== -errlop@^2.0.0: - version "2.2.0" - resolved "https://registry.npmjs.org/errlop/-/errlop-2.2.0.tgz" - integrity sha512-e64Qj9+4aZzjzzFpZC7p5kmm/ccCrbLhAJplhsDXQFs87XTsXwOpH4s1Io2s90Tau/8r2j9f4l/thhDevRjzxw== +env-paths@^2.2.0, env-paths@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2" + integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A== + +env-paths@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-3.0.0.tgz#2f1e89c2f6dbd3408e1b1711dd82d62e317f58da" + integrity sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A== + +err-code@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" + integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA== error-ex@^1.3.1: version "1.3.2" - resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" -error@^7.0.2: - version "7.2.1" - resolved "https://registry.npmjs.org/error/-/error-7.2.1.tgz" - integrity sha512-fo9HBvWnx3NGUKMvMwB/CBCMMrfEJgbDTVDEkPygA3Bdd3lM1OyCd+rbQ8BwnpF6GdVeOLDNmyL4N5Bg80ZvdA== +es-abstract@^1.23.2, es-abstract@^1.23.5, es-abstract@^1.23.9, es-abstract@^1.24.0: + version "1.24.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.24.0.tgz#c44732d2beb0acc1ed60df840869e3106e7af328" + integrity sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg== + dependencies: + array-buffer-byte-length "^1.0.2" + arraybuffer.prototype.slice "^1.0.4" + available-typed-arrays "^1.0.7" + call-bind "^1.0.8" + call-bound "^1.0.4" + data-view-buffer "^1.0.2" + data-view-byte-length "^1.0.2" + data-view-byte-offset "^1.0.1" + es-define-property "^1.0.1" + es-errors "^1.3.0" + es-object-atoms "^1.1.1" + es-set-tostringtag "^2.1.0" + es-to-primitive "^1.3.0" + function.prototype.name "^1.1.8" + get-intrinsic "^1.3.0" + get-proto "^1.0.1" + get-symbol-description "^1.1.0" + globalthis "^1.0.4" + gopd "^1.2.0" + has-property-descriptors "^1.0.2" + has-proto "^1.2.0" + has-symbols "^1.1.0" + hasown "^2.0.2" + internal-slot "^1.1.0" + is-array-buffer "^3.0.5" + is-callable "^1.2.7" + is-data-view "^1.0.2" + is-negative-zero "^2.0.3" + is-regex "^1.2.1" + is-set "^2.0.3" + is-shared-array-buffer "^1.0.4" + is-string "^1.1.1" + is-typed-array "^1.1.15" + is-weakref "^1.1.1" + math-intrinsics "^1.1.0" + object-inspect "^1.13.4" + object-keys "^1.1.1" + object.assign "^4.1.7" + own-keys "^1.0.1" + regexp.prototype.flags "^1.5.4" + safe-array-concat "^1.1.3" + safe-push-apply "^1.0.0" + safe-regex-test "^1.1.0" + set-proto "^1.0.0" + stop-iteration-iterator "^1.1.0" + string.prototype.trim "^1.2.10" + string.prototype.trimend "^1.0.9" + string.prototype.trimstart "^1.0.8" + typed-array-buffer "^1.0.3" + typed-array-byte-length "^1.0.3" + typed-array-byte-offset "^1.0.4" + typed-array-length "^1.0.7" + unbox-primitive "^1.1.0" + which-typed-array "^1.1.19" + +es-define-property@^1.0.0, es-define-property@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.1.tgz#983eb2f9a6724e9303f61addf011c72e09e0b0fa" + integrity sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g== + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +es-object-atoms@^1.0.0, es-object-atoms@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz#1c4f2c4837327597ce69d2ca190a7fdd172338c1" + integrity sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA== + dependencies: + es-errors "^1.3.0" + +es-set-tostringtag@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz#f31dbbe0c183b00a6d26eb6325c810c0fd18bd4d" + integrity sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA== + dependencies: + es-errors "^1.3.0" + get-intrinsic "^1.2.6" + has-tostringtag "^1.0.2" + hasown "^2.0.2" + +es-shim-unscopables@^1.0.2, es-shim-unscopables@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz#438df35520dac5d105f3943d927549ea3b00f4b5" + integrity sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw== dependencies: - string-template "~0.2.1" + hasown "^2.0.2" + +es-to-primitive@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.3.0.tgz#96c89c82cc49fd8794a24835ba3e1ff87f214e18" + integrity sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g== + dependencies: + is-callable "^1.2.7" + is-date-object "^1.0.5" + is-symbol "^1.0.4" es6-error@^4.0.1: version "4.1.1" - resolved "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz" + resolved "https://registry.yarnpkg.com/es6-error/-/es6-error-4.1.1.tgz#9e3af407459deed47e9a91f9b885a84eb05c561d" integrity sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg== -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.1.1, escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== -escape-goat@^2.0.0: - version "2.1.1" - resolved "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz" - integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q== +escape-goat@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-4.0.0.tgz#9424820331b510b0666b98f7873fe11ac4aa8081" + integrity sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg== + +escape-html@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@4.0.0: +escape-string-regexp@4.0.0, escape-string-regexp@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" - integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== -eslint-scope@^5.0.0, eslint-scope@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== -eslint-utils@^2.0.0, eslint-utils@^2.1.0: +escodegen@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz" - integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.1.0.tgz#ba93bbb7a43986d29d6041f99f5262da773e2e17" + integrity sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w== dependencies: - eslint-visitor-keys "^1.1.0" + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionalDependencies: + source-map "~0.6.1" -eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: - version "1.3.0" - resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz" - integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== +eslint-config-prettier@^9.1.0: + version "9.1.2" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-9.1.2.tgz#90deb4fa0259592df774b600dbd1d2249a78ce91" + integrity sha512-iI1f+D2ViGn+uvv5HuHVUamg8ll4tN+JRHGc6IJi4TP9Kl976C57fzPXgseXNs8v0iA8aSJpHsTWjDb9QJamGQ== -eslint-visitor-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz" - integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ== +eslint-config-salesforce-license@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eslint-config-salesforce-license/-/eslint-config-salesforce-license-0.2.0.tgz#323193f1aa15dd33fbf108d25fc1210afc11065e" + integrity sha512-DJdBvgj82Erum82YMe+YvG/o6ukna3UA++lRl0HSTldj0VlBl3Q8hzCp97nRXZHra6JH1I912yievZzklXDw6w== + +eslint-config-salesforce-typescript@^3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/eslint-config-salesforce-typescript/-/eslint-config-salesforce-typescript-3.4.0.tgz#3542e96aa6054b3df3b7c636b3b7f5bf4238bfb3" + integrity sha512-pT+kJsmLrXIsVw1f24gWB+a2Iefan9qp02iSdx5mk4Jb/Jv68LhS+V/dfJxN5vvKhzvc86UwUPEIQBX9OCSbpQ== + dependencies: + "@typescript-eslint/eslint-plugin" "^6.21.0" + "@typescript-eslint/parser" "^6.21.0" + eslint "^8.56.0" + eslint-config-prettier "^9.1.0" + eslint-config-salesforce "^2.2.0" + eslint-config-salesforce-license "^0.2.0" + eslint-plugin-header "^3.1.1" + eslint-plugin-import "^2.29.1" + eslint-plugin-jsdoc "^46.10.1" + eslint-plugin-unicorn "^50.0.1" + +eslint-config-salesforce@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/eslint-config-salesforce/-/eslint-config-salesforce-2.2.0.tgz#04b6cf07dcbaabc32fc9edb0915860497db55c30" + integrity sha512-0zUEFJ2nNpMvVO3MgKEDUTGtaFZjL3xEIErr5h+BOft+OhGoIvZBNPnBBu12lvv29ylqIAQz5SwoVCCUzBhyPQ== + +eslint-import-resolver-node@^0.3.9: + version "0.3.9" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz#d4eaac52b8a2e7c3cd1903eb00f7e053356118ac" + integrity sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g== + dependencies: + debug "^3.2.7" + is-core-module "^2.13.0" + resolve "^1.22.4" + +eslint-module-utils@^2.12.1: + version "2.12.1" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz#f76d3220bfb83c057651359295ab5854eaad75ff" + integrity sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw== + dependencies: + debug "^3.2.7" + +eslint-plugin-header@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-header/-/eslint-plugin-header-3.1.1.tgz#6ce512432d57675265fac47292b50d1eff11acd6" + integrity sha512-9vlKxuJ4qf793CmeeSrZUvVClw6amtpghq3CuWcB5cUNnWHQhgcqy5eF8oVKFk1G3Y/CbchGfEaw3wiIJaNmVg== + +eslint-plugin-import@^2.29.1: + version "2.32.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz#602b55faa6e4caeaa5e970c198b5c00a37708980" + integrity sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA== + dependencies: + "@rtsao/scc" "^1.1.0" + array-includes "^3.1.9" + array.prototype.findlastindex "^1.2.6" + array.prototype.flat "^1.3.3" + array.prototype.flatmap "^1.3.3" + debug "^3.2.7" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.9" + eslint-module-utils "^2.12.1" + hasown "^2.0.2" + is-core-module "^2.16.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.fromentries "^2.0.8" + object.groupby "^1.0.3" + object.values "^1.2.1" + semver "^6.3.1" + string.prototype.trimend "^1.0.9" + tsconfig-paths "^3.15.0" + +eslint-plugin-jsdoc@^46.10.1: + version "46.10.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-46.10.1.tgz#77c871309c4ed93758a3b2fdf384dc6189cf8605" + integrity sha512-x8wxIpv00Y50NyweDUpa+58ffgSAI5sqe+zcZh33xphD0AVh+1kqr1ombaTRb7Fhpove1zfUuujlX9DWWBP5ag== + dependencies: + "@es-joy/jsdoccomment" "~0.41.0" + are-docs-informative "^0.0.2" + comment-parser "1.4.1" + debug "^4.3.4" + escape-string-regexp "^4.0.0" + esquery "^1.5.0" + is-builtin-module "^3.2.1" + semver "^7.5.4" + spdx-expression-parse "^4.0.0" + +eslint-plugin-sf-plugin@^1.20.31: + version "1.20.31" + resolved "https://registry.yarnpkg.com/eslint-plugin-sf-plugin/-/eslint-plugin-sf-plugin-1.20.31.tgz#d655ea3bf2f41376021d93612ee1e99c28415b3d" + integrity sha512-vFVhmiUVbiqvbD6wa/+O0xNHNhP+zlNMlDEHDNelKwNzEKnI8lrL5CzJKsKOw1r9aa0iEEUuaaHplRUeanflGQ== + dependencies: + "@salesforce/core" "^8.19.1" + "@typescript-eslint/utils" "^7.18.0" -eslint@^7.21.0: - version "7.21.0" - resolved "https://registry.npmjs.org/eslint/-/eslint-7.21.0.tgz" - integrity sha512-W2aJbXpMNofUp0ztQaF40fveSsJBjlSCSWpy//gzfTvwC+USs/nceBrKmlJOiM8r1bLwP2EuYkCqArn/6QTIgg== +eslint-plugin-unicorn@^50.0.1: + version "50.0.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-unicorn/-/eslint-plugin-unicorn-50.0.1.tgz#e539cdb02dfd893c603536264c4ed9505b70e3bf" + integrity sha512-KxenCZxqSYW0GWHH18okDlOQcpezcitm5aOSz6EnobyJ6BIByiPDviQRjJIUAjG/tMN11958MxaQ+qCoU6lfDA== dependencies: - "@babel/code-frame" "7.12.11" - "@eslint/eslintrc" "^0.4.0" - ajv "^6.10.0" + "@babel/helper-validator-identifier" "^7.22.20" + "@eslint-community/eslint-utils" "^4.4.0" + "@eslint/eslintrc" "^2.1.4" + ci-info "^4.0.0" + clean-regexp "^1.0.0" + core-js-compat "^3.34.0" + esquery "^1.5.0" + indent-string "^4.0.0" + is-builtin-module "^3.2.1" + jsesc "^3.0.2" + pluralize "^8.0.0" + read-pkg-up "^7.0.1" + regexp-tree "^0.1.27" + regjsparser "^0.10.0" + semver "^7.5.4" + strip-indent "^3.0.0" + +eslint-scope@^7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" + integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: + version "3.4.3" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" + integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== + +eslint@^8.56.0: + version "8.57.1" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.57.1.tgz#7df109654aba7e3bbe5c8eae533c5e461d3c6ca9" + integrity sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@eslint-community/regexpp" "^4.6.1" + "@eslint/eslintrc" "^2.1.4" + "@eslint/js" "8.57.1" + "@humanwhocodes/config-array" "^0.13.0" + "@humanwhocodes/module-importer" "^1.0.1" + "@nodelib/fs.walk" "^1.2.8" + "@ungap/structured-clone" "^1.2.0" + ajv "^6.12.4" chalk "^4.0.0" cross-spawn "^7.0.2" - debug "^4.0.1" + debug "^4.3.2" doctrine "^3.0.0" - enquirer "^2.3.5" - eslint-scope "^5.1.1" - eslint-utils "^2.1.0" - eslint-visitor-keys "^2.0.0" - espree "^7.3.1" - esquery "^1.4.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.2.2" + eslint-visitor-keys "^3.4.3" + espree "^9.6.1" + esquery "^1.4.2" esutils "^2.0.2" + fast-deep-equal "^3.1.3" file-entry-cache "^6.0.1" - functional-red-black-tree "^1.0.1" - glob-parent "^5.0.0" - globals "^12.1.0" - ignore "^4.0.6" - import-fresh "^3.0.0" + find-up "^5.0.0" + glob-parent "^6.0.2" + globals "^13.19.0" + graphemer "^1.4.0" + ignore "^5.2.0" imurmurhash "^0.1.4" is-glob "^4.0.0" - js-yaml "^3.13.1" + is-path-inside "^3.0.3" + js-yaml "^4.1.0" json-stable-stringify-without-jsonify "^1.0.1" levn "^0.4.1" - lodash "^4.17.20" - minimatch "^3.0.4" + lodash.merge "^4.6.2" + minimatch "^3.1.2" natural-compare "^1.4.0" - optionator "^0.9.1" - progress "^2.0.0" - regexpp "^3.1.0" - semver "^7.2.1" - strip-ansi "^6.0.0" - strip-json-comments "^3.1.0" - table "^6.0.4" + optionator "^0.9.3" + strip-ansi "^6.0.1" text-table "^0.2.0" - v8-compile-cache "^2.0.3" -espree@^7.3.0, espree@^7.3.1: - version "7.3.1" - resolved "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz" - integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== +espree@^9.6.0, espree@^9.6.1: + version "9.6.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" + integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== dependencies: - acorn "^7.4.0" - acorn-jsx "^5.3.1" - eslint-visitor-keys "^1.3.0" + acorn "^8.9.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.4.1" -esprima@^4.0.0, esprima@~4.0.0: +esprima@^4.0.0, esprima@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== -esquery@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz" - integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== +esquery@^1.4.2, esquery@^1.5.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.6.0.tgz#91419234f804d852a82dceec3e16cdc22cf9dae7" + integrity sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg== dependencies: estraverse "^5.1.0" esrecurse@^4.3.0: version "4.3.0" - resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== dependencies: estraverse "^5.2.0" -estraverse@^4.1.1: - version "4.3.0" - resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - estraverse@^5.1.0, estraverse@^5.2.0: - version "5.2.0" - resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz" - integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== esutils@^2.0.2: version "2.0.3" - resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== event-target-shim@^5.0.0: version "5.0.1" - resolved "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== -eventemitter3@^3.1.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" - integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== - eventemitter3@^4.0.4: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== +eventemitter3@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" + integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== + +events@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + exceljs@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/exceljs/-/exceljs-4.4.0.tgz#cfb1cb8dcc82c760a9fc9faa9e52dadab66b0156" @@ -3488,22 +6355,9 @@ exceljs@^4.4.0: unzipper "^0.10.11" uuid "^8.3.0" -execa@^0.10.0: - version "0.10.0" - resolved "https://registry.npmjs.org/execa/-/execa-0.10.0.tgz" - integrity sha512-7XOMnz8Ynx1gGo/3hyV9loYNPWM94jG3+3T3Y8tsfSstFmETmENCMU/A/zj8Lyaj1lkgEepKepvd6240tBRvlw== - dependencies: - cross-spawn "^6.0.0" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^4.0.0: +execa@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a" integrity sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA== dependencies: cross-spawn "^7.0.0" @@ -3516,82 +6370,90 @@ execa@^4.0.0: signal-exit "^3.0.2" strip-final-newline "^2.0.0" -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" -expand-range@^1.8.1: - version "1.8.2" - resolved "https://registry.npmjs.org/expand-range/-/expand-range-1.8.2.tgz" - integrity sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc= +execa@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-7.2.0.tgz#657e75ba984f42a70f38928cedc87d6f2d4fe4e9" + integrity sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA== dependencies: - fill-range "^2.1.0" + cross-spawn "^7.0.3" + get-stream "^6.0.1" + human-signals "^4.3.0" + is-stream "^3.0.0" + merge-stream "^2.0.0" + npm-run-path "^5.1.0" + onetime "^6.0.0" + signal-exit "^3.0.7" + strip-final-newline "^3.0.0" + +execa@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-8.0.1.tgz#51f6a5943b580f963c3ca9c6321796db8cc39b8c" + integrity sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^8.0.1" + human-signals "^5.0.0" + is-stream "^3.0.0" + merge-stream "^2.0.0" + npm-run-path "^5.1.0" + onetime "^6.0.0" + signal-exit "^4.1.0" + strip-final-newline "^3.0.0" + +expand-template@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" + integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== + +exponential-backoff@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/exponential-backoff/-/exponential-backoff-3.1.2.tgz#a8f26adb96bf78e8cd8ad1037928d5e5c0679d91" + integrity sha512-8QxYTVXUkuy7fIIoitQkPwGonB8F3Zj8eEO8Sqg9Zv/bkI7RJAzowee4gr81Hak/dUTpA2Z7VfQgoijjPNlUZA== + +expr-eval@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/expr-eval/-/expr-eval-2.0.2.tgz#fa6f044a7b0c93fde830954eb9c5b0f7fbc7e201" + integrity sha512-4EMSHGOPSwAfBiibw3ndnP0AvjDWLsMvGOvWEZ2F96IGk0bIVdjQisOHxReSkE13mHcfbuCiXw+G4y0zv6N8Eg== extend-shallow@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug== dependencies: is-extendable "^0.1.0" -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - -extend@~3.0.2: +extend@^3.0.2, extend@~3.0.2: version "3.0.2" - resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== -external-editor@^3.0.3, external-editor@^3.1.0: +external-editor@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== dependencies: chardet "^0.7.0" iconv-lite "^0.4.24" tmp "^0.0.33" -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -extract-stack@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/extract-stack/-/extract-stack-1.0.0.tgz" - integrity sha1-uXrK+UQe6iMyUpYktzL8WhyBZfo= - -extract-stack@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/extract-stack/-/extract-stack-2.0.0.tgz" - integrity sha512-AEo4zm+TenK7zQorGK1f9mJ8L14hnTDi2ZQPR+Mub1NX8zimka1mXpV5LpH8x9HoUmFSHZCfLHqWvp0Y4FxxzQ== - -extract-zip@2.0.1, extract-zip@^2.0.1: +extract-zip@*, extract-zip@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== dependencies: debug "^4.1.1" @@ -3602,27 +6464,31 @@ extract-zip@2.0.1, extract-zip@^2.0.1: extsprintf@1.3.0: version "1.3.0" - resolved "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" - integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== extsprintf@^1.2.0: - version "1.4.0" - resolved "https://registry.npmjs.org/extsprintf/-/extsprintf-1.4.0.tgz" - integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= + version "1.4.1" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" + integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== -fancy-test@^1.4.3: - version "1.4.10" - resolved "https://registry.npmjs.org/fancy-test/-/fancy-test-1.4.10.tgz" - integrity sha512-AaUX6wKS7D5OP2YK2q5G7c8PGx2lgoyLUD7Bbg8z323sb9aebBqzb9UN6phzI73UgO/ViihmNfOxF3kdfZLhew== +farmhash@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/farmhash/-/farmhash-4.0.2.tgz#4b78df735316973e136b3c3316985a697e53f82e" + integrity sha512-i6FbQ0ZUPV6yhFSRI2SQBEqJzoWDiN4cnulTT2jm0f0lUIXg8/iPebACCrOY80rggd9LaSU65GFOI/xnJBdzyA== dependencies: - "@types/chai" "*" - "@types/lodash" "*" - "@types/node" "*" - "@types/sinon" "*" - lodash "^4.17.13" - mock-stdin "^1.0.0" - nock "^13.0.0" - stdout-stderr "^0.1.9" + node-addon-api "^8.4.0" + prebuild-install "^7.1.3" + +fast-content-type-parse@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz#c236124534ee2cb427c8d8e5ba35a4856947847b" + integrity sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q== + +fast-copy@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/fast-copy/-/fast-copy-3.0.2.tgz#59c68f59ccbcac82050ba992e0d5c389097c9d35" + integrity sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ== fast-csv@^4.3.1: version "4.3.6" @@ -3632,74 +6498,96 @@ fast-csv@^4.3.1: "@fast-csv/format" "4.3.5" "@fast-csv/parse" "4.3.6" -fast-deep-equal@^3.1.1: +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" - resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^2.0.2, fast-glob@^2.2.6: - version "2.2.7" - resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz" - integrity sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw== - dependencies: - "@mrmlnc/readdir-enhanced" "^2.2.1" - "@nodelib/fs.stat" "^1.1.2" - glob-parent "^3.1.0" - is-glob "^4.0.0" - merge2 "^1.2.3" - micromatch "^3.1.10" - -fast-glob@^3.0.3, fast-glob@^3.1.1: - version "3.2.5" - resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.5.tgz" - integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.0" - merge2 "^1.3.0" - micromatch "^4.0.2" - picomatch "^2.2.1" +fast-fifo@^1.2.0, fast-fifo@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.3.2.tgz#286e31de96eb96d38a97899815740ba2a4f3640c" + integrity sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ== -fast-glob@^3.2.11, fast-glob@^3.2.9: - version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== +fast-glob@^3.2.11, fast-glob@^3.2.9, fast-glob@^3.3.2, fast-glob@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.3.tgz#d06d585ce8dba90a16b0505c543c3ccfb3aeb818" + integrity sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" glob-parent "^5.1.2" merge2 "^1.3.0" - micromatch "^4.0.4" + micromatch "^4.0.8" fast-json-stable-stringify@^2.0.0: version "2.1.0" - resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== fast-levenshtein@^2.0.6: version "2.0.6" - resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" - integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fast-levenshtein@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-3.0.0.tgz#37b899ae47e1090e40e3fd2318e4d5f0142ca912" + integrity sha512-hKKNajm46uNmTlhHSyZkmToAc56uZJwYq7yrciZjqOxnlfQwERDQJmHPUp7m1m9wx8vgOe8IaCKZ5Kv2k1DdCQ== + dependencies: + fastest-levenshtein "^1.0.7" + +fast-redact@^3.1.1: + version "3.5.0" + resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.5.0.tgz#e9ea02f7e57d0cd8438180083e93077e496285e4" + integrity sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A== + +fast-safe-stringify@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" + integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== + +fast-uri@^3.0.1: + version "3.0.6" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.6.tgz#88f130b77cfaea2378d56bf970dea21257a68748" + integrity sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw== + +fast-xml-parser@5.2.5: + version "5.2.5" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-5.2.5.tgz#4809fdfb1310494e341098c25cb1341a01a9144a" + integrity sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ== + dependencies: + strnum "^2.1.0" + +fast-xml-parser@^4.4.0, fast-xml-parser@^4.4.1, fast-xml-parser@^4.5.3: + version "4.5.3" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz#c54d6b35aa0f23dc1ea60b6c884340c006dc6efb" + integrity sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig== + dependencies: + strnum "^1.1.1" + +fastest-levenshtein@^1.0.7: + version "1.0.16" + resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz#210e61b6ff181de91ea9b3d1b84fdedd47e034e5" + integrity sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg== fastq@^1.6.0: - version "1.11.0" - resolved "https://registry.npmjs.org/fastq/-/fastq-1.11.0.tgz" - integrity sha512-7Eczs8gIPDrVzT+EksYBcupqMyxSHXXrHOLRRxU2/DicV8789MRBRR8+Hc2uWzUupOs4YS4JzBmBxjjCVBxD/g== + version "1.19.1" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.19.1.tgz#d50eaba803c8846a883c16492821ebcd2cda55f5" + integrity sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ== dependencies: reusify "^1.0.4" faye-websocket@>=0.9.1: - version "0.11.3" - resolved "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.3.tgz" - integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA== + version "0.11.4" + resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== dependencies: websocket-driver ">=0.5.1" -faye@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/faye/-/faye-1.4.0.tgz" - integrity sha512-kRrIg4be8VNYhycS2PY//hpBJSzZPr/DBbcy9VWelhZMW3KhyLkQR0HL0k0MNpmVoNFF4EdfMFkNAWjTP65g6w== +faye@^1.4.0, faye@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/faye/-/faye-1.4.1.tgz#fd2f84e21d62b1cced1bd34968925b466d44a381" + integrity sha512-Cg/khikhqlvumHO3efwx2tps2ZgQRjUMrO24G0quz7MMzRYYaEjU224YFXOeuPIvanRegIchVxj6pmHK1W0ikA== dependencies: asap "*" csprng "*" @@ -3710,65 +6598,57 @@ faye@^1.4.0: fd-slicer@~1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz" - integrity sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4= + resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" + integrity sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g== dependencies: pend "~1.2.0" -fetch-blob@^3.1.2, fetch-blob@^3.1.4: - version "3.1.4" - resolved "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.1.4.tgz" - integrity sha512-Eq5Xv5+VlSrYWEqKrusxY1C3Hm/hjeAsCGVG3ft7pZahlUAChpGZT/Ms1WmSLnEAisEXszjzu/s+ce6HZB2VHA== +fdir@^6.4.4: + version "6.4.6" + resolved "https://registry.yarnpkg.com/fdir/-/fdir-6.4.6.tgz#2b268c0232697063111bbf3f64810a2a741ba281" + integrity sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w== + +figures@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" + integrity sha512-UxKlfCRuCBxSXU4C6t9scbDyWZ4VlaFFdojKtzJuSkuOBQ5CNFum+zZXFwHjo+CxBC1t6zlYPgHIgFjL8ggoEQ== dependencies: - node-domexception "^1.0.0" - web-streams-polyfill "^3.0.3" + escape-string-regexp "^1.0.5" + object-assign "^4.1.0" -figures@^3.0.0: - version "3.2.0" - resolved "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz" - integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + integrity sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA== dependencies: escape-string-regexp "^1.0.5" file-entry-cache@^6.0.1: version "6.0.1" - resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== dependencies: flat-cache "^3.0.4" -filelist@^1.0.1: - version "1.0.2" - resolved "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz" - integrity sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ== - dependencies: - minimatch "^3.0.4" - -fill-range@^2.1.0: - version "2.2.4" - resolved "https://registry.npmjs.org/fill-range/-/fill-range-2.2.4.tgz" - integrity sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q== +file-set@^5.2.2, file-set@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/file-set/-/file-set-5.3.0.tgz#f8ab3a930bd0912cc6fe372581c3aac71682ebfb" + integrity sha512-FKCxdjLX0J6zqTWdT0RXIxNF/n7MyXXnsSUp0syLEOCKdexvPZ02lNNv2a+gpK9E3hzUYF3+eFZe32ci7goNUg== dependencies: - is-number "^2.1.0" - isobject "^2.0.0" - randomatic "^3.0.0" - repeat-element "^1.1.2" - repeat-string "^1.5.2" + array-back "^6.2.2" + fast-glob "^3.3.2" -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= +filelist@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" + integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" + minimatch "^5.0.1" fill-range@^7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" - integrity "sha1-RCZdPKwH4+p9wkdRY4BkN1SgUpI= sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== dependencies: to-regex-range "^5.0.1" @@ -3778,50 +6658,45 @@ filter-obj@^1.1.0: integrity sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ== find-cache-dir@^3.2.0: - version "3.3.1" - resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz" - integrity sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ== + version "3.3.2" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== dependencies: commondir "^1.0.1" make-dir "^3.0.2" pkg-dir "^4.1.0" -find-cache-dir@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-4.0.0.tgz#a30ee0448f81a3990708f6453633c733e2f6eec2" - integrity sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg== - dependencies: - common-path-prefix "^3.0.0" - pkg-dir "^7.0.0" - find-package-json@^1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/find-package-json/-/find-package-json-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/find-package-json/-/find-package-json-1.2.0.tgz#4057d1b943f82d8445fe52dc9cf456f6b8b58083" integrity sha512-+SOGcLGYDJHtyqHd87ysBhmaeQ95oWspDKnMXBrnQ9Eq4OkLNqejgoaD8xVWu6GPa0B6roa6KinCMEMcVeqONw== -find-up@5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" +find-replace@^5.0.1, find-replace@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/find-replace/-/find-replace-5.0.2.tgz#fe27ff0be05975aef6fc679c1139bbabea564e26" + integrity sha512-Y45BAiE3mz2QsrN2fb5QEtO4qb44NcS7en/0y9PEVsg351HsLeVclP8QPMH79Le9sH3rs5RSwJu99W0WPZO43Q== -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" +find-up-simple@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/find-up-simple/-/find-up-simple-1.0.1.tgz#18fb90ad49e45252c4d7fca56baade04fa3fca1e" + integrity sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ== find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== dependencies: locate-path "^5.0.0" path-exists "^4.0.0" +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + find-up@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-6.3.0.tgz#2abab3d3280b2dc7ac10199ef324c4e002c8c790" @@ -3832,97 +6707,97 @@ find-up@^6.3.0: find-yarn-workspace-root@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz#f47fb8d239c900eb78179aa81b66673eac88f7bd" integrity sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ== dependencies: micromatch "^4.0.2" -first-chunk-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/first-chunk-stream/-/first-chunk-stream-2.0.0.tgz" - integrity sha1-G97NuOCDwGZLkZRVgVd6Q6nzHXA= - dependencies: - readable-stream "^2.0.2" +first-chunk-stream@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/first-chunk-stream/-/first-chunk-stream-5.0.0.tgz#6089fd0add05b058ec0c122a21c0c1801d06d5ff" + integrity sha512-WdHo4ejd2cG2Dl+sLkW79SctU7mUQDfr4s1i26ffOZRs5mgv+BRttIM9gwcq0rDbemo0KlpVPaa3LBVLqPXzcQ== flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + version "3.2.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.2.0.tgz#2c0c2d5040c99b1632771a9d105725c0115363ee" + integrity sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw== dependencies: - flatted "^3.1.0" + flatted "^3.2.9" + keyv "^4.5.3" rimraf "^3.0.2" flat@^5.0.2: version "5.0.2" - resolved "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz" + resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== -flatted@^3.1.0: - version "3.1.1" - resolved "https://registry.npmjs.org/flatted/-/flatted-3.1.1.tgz" - integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== +flatted@^3.2.9: + version "3.3.3" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.3.tgz#67c8fad95454a7c7abebf74bb78ee74a44023358" + integrity sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg== -follow-redirects@^1.14.0, follow-redirects@^1.14.9, follow-redirects@^1.15.6: - version "1.15.6" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" - integrity "sha1-f4FcDNpCScdP8J6V75fCO1/QOZs= sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==" +fly-import@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/fly-import/-/fly-import-0.4.1.tgz#33d0dfd67b973d50d4a8d941fbb9b59a64877c90" + integrity sha512-9gqEx0nnQ6SF0pKKOEexVNYCTBiLb3g+a5JMLkiaBIqwM/pEZH0Le83owUA/tkRLxWNwIso+sB3E+epuOCPWlw== + dependencies: + "@npmcli/arborist" "^7.2.0" + env-paths "^3.0.0" + registry-auth-token "^5.0.2" + registry-url "^6.0.1" -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= +follow-redirects@^1.15.6: + version "1.15.9" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" + integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== + +for-each@^0.3.3, for-each@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.5.tgz#d650688027826920feeb0af747ee7b9421a41d47" + integrity sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg== + dependencies: + is-callable "^1.2.7" foreground-child@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53" integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA== dependencies: cross-spawn "^7.0.0" signal-exit "^3.0.2" -foreground-child@^3.1.0: - version "3.2.1" - resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.2.1.tgz#767004ccf3a5b30df39bed90718bab43fe0a59f7" - integrity sha512-PXUUyLqrR2XCWICfv6ukppP96sdFwWbNEnfEMt7jNsISjMsvaLNinAHNDYyvkyU+SZG2BTSbT5NjG+vZslfGTA== +foreground-child@^3.1.0, foreground-child@^3.3.0, foreground-child@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.3.1.tgz#32e8e9ed1b68a3497befb9ac2b6adf92a638576f" + integrity sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw== dependencies: - cross-spawn "^7.0.0" + cross-spawn "^7.0.6" signal-exit "^4.0.1" forever-agent@~0.6.1: version "0.6.1" - resolved "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" - integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== form-data-encoder@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040" integrity sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A== -form-data@^2.5.0: - version "2.5.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4" - integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" - mime-types "^2.1.12" +form-data-encoder@^2.1.2: + version "2.1.4" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-2.1.4.tgz#261ea35d2a70d48d30ec7a9603130fa5515e9cd5" + integrity sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw== -form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== +form-data@4.0.4, form-data@^2.5.0, form-data@^4.0.0, form-data@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.4.tgz#784cdcce0669a9d68e94d11ac4eea98088edd2c4" + integrity sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" - mime-types "^2.1.12" - -form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" - integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" + es-set-tostringtag "^2.1.0" + hasown "^2.0.2" mime-types "^2.1.12" formdata-node@^4.3.2: @@ -3933,90 +6808,67 @@ formdata-node@^4.3.2: node-domexception "1.0.0" web-streams-polyfill "4.0.0-beta.3" -formdata-polyfill@^4.0.10: - version "4.0.10" - resolved "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz" - integrity sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g== - dependencies: - fetch-blob "^3.1.2" - -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" +fp-ts@^2.16.8: + version "2.16.10" + resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-2.16.10.tgz#829b82a46571c2dc202bed38a9c2eeec603e38c4" + integrity sha512-vuROzbNVfCmUkZSUbnWSltR1sbheyQbTzug7LB/46fEa1c0EucLeBaCEUE0gF3ZGUGBt9lVUiziGOhhj6K1ORA== fromentries@^1.2.0: version "1.3.2" - resolved "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz" + resolved "https://registry.yarnpkg.com/fromentries/-/fromentries-1.3.2.tgz#e4bca6808816bf8f93b52750f1127f5a6fd86e3a" integrity sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg== fs-constants@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== -fs-extra@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" - integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== +fs-extra@^11.0.0, fs-extra@^11.1.1, fs-extra@^11.3.2: + version "11.3.2" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.3.2.tgz#c838aeddc6f4a8c74dd15f85e11fe5511bfe02a4" + integrity sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A== dependencies: graceful-fs "^4.2.0" jsonfile "^6.0.1" universalify "^2.0.0" -fs-extra@^6.0.1: - version "6.0.1" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-6.0.1.tgz" - integrity sha512-GnyIkKhhzXZUWFCaJzvyDLEEgDkPfb4/TPvJCJVuS8MWZgoSsErf++QpiAlDnKFcqhRlm+tIOcencCjyJE6ZCA== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs-extra@^7.0.0: - version "7.0.1" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz" - integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - fs-extra@^8.1: version "8.1.0" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== dependencies: graceful-fs "^4.2.0" jsonfile "^4.0.0" universalify "^0.1.0" -fs-extra@^9.1.0: - version "9.1.0" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz" - integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== +fs-minipass@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" + integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== dependencies: - at-least-node "^1.0.0" - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" + minipass "^3.0.0" + +fs-minipass@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-3.0.3.tgz#79a85981c4dc120065e96f62086bf6f9dc26cc54" + integrity sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw== + dependencies: + minipass "^7.0.3" fs-readdir-recursive@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz#e32fc030a2ccee44a6b5371308da54be0b397d27" integrity sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA== fs.realpath@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -fsevents@~2.3.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== +fsevents@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== fstream@^1.0.12: version "1.0.12" @@ -4028,226 +6880,267 @@ fstream@^1.0.12: mkdirp ">=0.5 0" rimraf "2" -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz" - integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +function.prototype.name@^1.1.6, function.prototype.name@^1.1.8: + version "1.1.8" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.8.tgz#e68e1df7b259a5c949eeef95cdbde53edffabb78" + integrity sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.3" + define-properties "^1.2.1" + functions-have-names "^1.2.3" + hasown "^2.0.2" + is-callable "^1.2.7" + +functions-have-names@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gaxios@^6.0.0: + version "6.7.1" + resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-6.7.1.tgz#ebd9f7093ede3ba502685e73390248bb5b7f71fb" + integrity sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ== + dependencies: + extend "^3.0.2" + https-proxy-agent "^7.0.1" + is-stream "^2.0.0" + node-fetch "^2.6.9" + uuid "^9.0.1" gensync@^1.0.0-beta.2: version "1.0.0-beta.2" - resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== get-caller-file@^2.0.1, get-caller-file@^2.0.5: version "2.0.5" - resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-func-name@^2.0.0: +get-east-asian-width@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz#21b4071ee58ed04ee0db653371b55b4299875389" + integrity sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ== + +get-func-name@^2.0.1, get-func-name@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.2.tgz#0d7cf20cd13fda808669ffa88f4ffc7a3943fc41" integrity sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ== -get-intrinsic@^1.0.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.0.tgz#7ad1dc0535f3a2904bba075772763e5051f6d05f" - integrity sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.3" +get-intrinsic@^1.2.4, get-intrinsic@^1.2.5, get-intrinsic@^1.2.6, get-intrinsic@^1.2.7, get-intrinsic@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz#743f0e3b6964a93a5491ed1bffaae054d7f98d01" + integrity sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ== + dependencies: + call-bind-apply-helpers "^1.0.2" + es-define-property "^1.0.1" + es-errors "^1.3.0" + es-object-atoms "^1.1.1" + function-bind "^1.1.2" + get-proto "^1.0.1" + gopd "^1.2.0" + has-symbols "^1.1.0" + hasown "^2.0.2" + math-intrinsics "^1.1.0" get-package-type@^0.1.0: version "0.1.0" - resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-stdin@^4.0.1: - version "4.0.1" - resolved "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz" - integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= - -get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz" - integrity sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ= +get-port@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" + integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== +get-proto@^1.0.0, get-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/get-proto/-/get-proto-1.0.1.tgz#150b3f2743869ef3e851ec0c49d15b1d14d00ee1" + integrity sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g== dependencies: - pump "^3.0.0" + dunder-proto "^1.0.1" + es-object-atoms "^1.0.0" + +get-stdin@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" + integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== + +get-stdin@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-9.0.0.tgz#3983ff82e03d56f1b2ea0d3e60325f39d703a575" + integrity sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA== get-stream@^5.0.0, get-stream@^5.1.0: version "5.2.0" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== dependencies: pump "^3.0.0" -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= +get-stream@^6.0.0, get-stream@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-stream@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-8.0.1.tgz#def9dfd71742cd7754a7761ed43749a27d02eca2" + integrity sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA== + +get-symbol-description@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.1.0.tgz#7bdd54e0befe8ffc9f3b4e203220d9f1e881b6ee" + integrity sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg== + dependencies: + call-bound "^1.0.3" + es-errors "^1.3.0" + get-intrinsic "^1.2.6" + +get-uri@^6.0.1: + version "6.0.5" + resolved "https://registry.yarnpkg.com/get-uri/-/get-uri-6.0.5.tgz#714892aa4a871db671abc5395e5e9447bc306a16" + integrity sha512-b1O07XYq8eRuVzBNgJLstU6FYc1tS6wnMtF1I1D9lE8LxZSOGZ7LhxN54yPP6mGw5f2CkXY2BQUL9Fx41qvcIg== + dependencies: + basic-ftp "^5.0.2" + data-uri-to-buffer "^6.0.2" + debug "^4.3.4" getpass@^0.1.1: version "0.1.7" - resolved "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz" - integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng== dependencies: assert-plus "^1.0.0" -gh-got@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/gh-got/-/gh-got-5.0.0.tgz" - integrity sha1-7pW+NxBv2HSKlvjR20uuqJ4b+oo= - dependencies: - got "^6.2.0" - is-plain-obj "^1.1.0" +git-hooks-list@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/git-hooks-list/-/git-hooks-list-3.2.0.tgz#ffe5d5895e29d24f930f9a98dd604b7e407d2f5f" + integrity sha512-ZHG9a1gEhUMX1TvGrLdyWb9kDopCBbTnI8z4JgRMYxsijWipgjSEYoPWqBuIB0DnRnvqlQSEeVmzpeuPm7NdFQ== -github-slugger@^1.2.1: - version "1.3.0" - resolved "https://registry.npmjs.org/github-slugger/-/github-slugger-1.3.0.tgz" - integrity sha512-gwJScWVNhFYSRDvURk/8yhcFBee6aFjye2a7Lhb2bUyRulpIoek9p0I9Kt7PT67d/nUlZbFu8L9RLiA0woQN8Q== +git-raw-commits@^2.0.11: + version "2.0.11" + resolved "https://registry.yarnpkg.com/git-raw-commits/-/git-raw-commits-2.0.11.tgz#bc3576638071d18655e1cc60d7f524920008d723" + integrity sha512-VnctFhw+xfj8Va1xtfEqCUD2XDrbAPSJx+hSrE5K7fGdjZruW7XV+QOrN7LF/RJyvspRiD2I0asWsxFp0ya26A== dependencies: - emoji-regex ">=6.0.0 <=6.1.1" + dargs "^7.0.0" + lodash "^4.17.15" + meow "^8.0.0" + split2 "^3.0.0" + through2 "^4.0.0" -github-username@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/github-username/-/github-username-3.0.0.tgz" - integrity sha1-CnciGbMTB0NCnyRW0L3T21Xc57E= - dependencies: - gh-got "^5.0.0" +github-from-package@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" + integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== -glob-parent@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= +github-slugger@^2: + version "2.0.0" + resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-2.0.0.tgz#52cf2f9279a21eb6c59dd385b410f0c0adda8f1a" + integrity sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw== + +github-username@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/github-username/-/github-username-9.0.0.tgz#0f165278e5db68eb038a732e9d54d3d3c06c4430" + integrity sha512-lY7+mymwQUEhRwWTLxieKkxcZkVNnUh8iAGnl30DMB1ZtYODHkMAckZk8Jx5dLQs1YKPYM2ibnzQu02aCLFcYQ== dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" + "@octokit/rest" "^21.1.1" -glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.2, glob-parent@~5.1.0: +glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" - resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" -glob-to-regexp@^0.3.0: - version "0.3.0" - resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz" - integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= - -glob@7.1.6: - version "7.1.6" - resolved "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== +glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" + is-glob "^4.0.3" -glob@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/glob/-/glob-11.0.0.tgz#6031df0d7b65eaa1ccb9b29b5ced16cea658e77e" - integrity sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g== +glob@^10.2.2, glob@^10.3.10: + version "10.4.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" + integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== dependencies: foreground-child "^3.1.0" - jackspeak "^4.0.1" - minimatch "^10.0.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" minipass "^7.1.2" package-json-from-dist "^1.0.0" - path-scurry "^2.0.0" + path-scurry "^1.11.1" -glob@^6.0.1: - version "6.0.4" - resolved "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz" - integrity sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI= +glob@^11.0.0, glob@^11.0.3: + version "11.0.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-11.0.3.tgz#9d8087e6d72ddb3c4707b1d2778f80ea3eaefcd6" + integrity sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA== dependencies: - inflight "^1.0.4" - inherits "2" - minimatch "2 || 3" - once "^1.3.0" - path-is-absolute "^1.0.0" + foreground-child "^3.3.1" + jackspeak "^4.1.1" + minimatch "^10.0.3" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^2.0.0" -glob@^7.0.0, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: - version "7.2.0" - resolved "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz" - integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== +glob@^7.0.0, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.2.3: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" - minimatch "^3.0.4" + minimatch "^3.1.1" once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.2.3: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== +glob@^8.0.3, glob@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-8.1.0.tgz#d388f656593ef708ee3e34640fdfb99a9fd1c33e" + integrity sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" - minimatch "^3.1.1" + minimatch "^5.0.1" once "^1.3.0" - path-is-absolute "^1.0.0" -global-dirs@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz" - integrity sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA== +global-directory@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/global-directory/-/global-directory-4.0.1.tgz#4d7ac7cfd2cb73f304c53b8810891748df5e361e" + integrity sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q== dependencies: - ini "2.0.0" + ini "4.1.1" -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^12.1.0: - version "12.4.0" - resolved "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz" - integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== +global-dirs@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-0.1.1.tgz#b319c0dd4607f353f3be9cca4c72fc148c49f445" + integrity sha512-NknMLn7F2J7aflwFOlGdNIuCDpN3VGoSoB+aap3KABFWbHVn1TCgFC+np23J8W2BiZbjfEw3BFBycSMv1AFblg== dependencies: - type-fest "^0.8.1" + ini "^1.3.4" -globby@^10.0.1: - version "10.0.2" - resolved "https://registry.npmjs.org/globby/-/globby-10.0.2.tgz" - integrity sha512-7dUi7RvCoT/xast/o/dLN53oqND4yk0nsHkhRgn9w65C4PofCLOoJ39iSOg+qVDdWQPIEj+eszMHQ+aLVwwQSg== +globals@^13.19.0: + version "13.24.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" + integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== dependencies: - "@types/glob" "^7.1.1" - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.0.3" - glob "^7.1.3" - ignore "^5.1.1" - merge2 "^1.2.3" - slash "^3.0.0" + type-fest "^0.20.2" -globby@^11.0.1, globby@^11.0.2: - version "11.0.2" - resolved "https://registry.npmjs.org/globby/-/globby-11.0.2.tgz" - integrity sha512-2ZThXDvvV8fYFRVIxnrMQBipZQDr7MxKAmQK1vujaj9/7eF0efG7BPUKJ7jP7G5SLF37xKDXvO4S/KKLj/Z0og== +globalthis@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.4.tgz#7430ed3a975d97bfb59bcce41f5cabbafa651236" + integrity sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ== dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" + define-properties "^1.2.1" + gopd "^1.0.1" globby@^11.1.0: version "11.1.0" @@ -4261,45 +7154,24 @@ globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" -globby@^13.1.2: - version "13.1.2" - resolved "https://registry.yarnpkg.com/globby/-/globby-13.1.2.tgz#29047105582427ab6eca4f905200667b056da515" - integrity sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ== +globby@^14.0.0, globby@^14.0.2, globby@^14.1.0: + version "14.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-14.1.0.tgz#138b78e77cf5a8d794e327b15dce80bf1fb0a73e" + integrity sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA== dependencies: - dir-glob "^3.0.1" - fast-glob "^3.2.11" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^4.0.0" + "@sindresorhus/merge-streams" "^2.1.0" + fast-glob "^3.3.3" + ignore "^7.0.3" + path-type "^6.0.0" + slash "^5.1.0" + unicorn-magic "^0.3.0" -globby@^8.0.1: - version "8.0.2" - resolved "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz" - integrity sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w== - dependencies: - array-union "^1.0.1" - dir-glob "2.0.0" - fast-glob "^2.0.2" - glob "^7.1.2" - ignore "^3.3.5" - pify "^3.0.0" - slash "^1.0.0" - -globby@^9.2.0: - version "9.2.0" - resolved "https://registry.npmjs.org/globby/-/globby-9.2.0.tgz" - integrity sha512-ollPHROa5mcxDEkwg6bPt3QbEf4pDQSNtd6JPL1YvOvAo/7/0VAm9TccUeoTmarjPw4pfUthSCqcyfNB1I3ZSg== - dependencies: - "@types/glob" "^7.1.1" - array-union "^1.0.2" - dir-glob "^2.2.2" - fast-glob "^2.2.6" - glob "^7.1.3" - ignore "^4.0.3" - pify "^4.0.1" - slash "^2.0.0" +gopd@^1.0.1, gopd@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.2.0.tgz#89f56b8217bdbc8802bd299df6d7f1081d7e51a1" + integrity sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg== -got@^11.8.3: +got@^11.8.3, got@^11.8.6: version "11.8.6" resolved "https://registry.yarnpkg.com/got/-/got-11.8.6.tgz#276e827ead8772eddbcfc97170590b841823233a" integrity sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g== @@ -4316,185 +7188,212 @@ got@^11.8.3: p-cancelable "^2.0.0" responselike "^2.0.0" -got@^6.2.0: - version "6.7.1" - resolved "https://registry.npmjs.org/got/-/got-6.7.1.tgz" - integrity sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA= - dependencies: - create-error-class "^3.0.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - is-redirect "^1.0.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - lowercase-keys "^1.0.0" - safe-buffer "^5.0.1" - timed-out "^4.0.0" - unzip-response "^2.0.1" - url-parse-lax "^1.0.0" - -got@^9.6.0: - version "9.6.0" - resolved "https://registry.npmjs.org/got/-/got-9.6.0.tgz" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - -graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.2, graceful-fs@^4.2.4: - version "4.2.6" - resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz" - integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== - -gray-matter@^2.1.0: - version "2.1.1" - resolved "https://registry.npmjs.org/gray-matter/-/gray-matter-2.1.1.tgz" - integrity sha1-MELZrewqHe1qdwep7SOA+KF6Qw4= +got@^13: + version "13.0.0" + resolved "https://registry.yarnpkg.com/got/-/got-13.0.0.tgz#a2402862cef27a5d0d1b07c0fb25d12b58175422" + integrity sha512-XfBk1CxOOScDcMr9O1yKkNaQyy865NbYs+F7dr4H0LZMVgCj2Le59k6PqbNHoL5ToeaEQUYh6c6yMfVcc6SJxA== dependencies: - ansi-red "^0.1.1" - coffee-script "^1.12.4" - extend-shallow "^2.0.1" - js-yaml "^3.8.1" - toml "^2.3.2" + "@sindresorhus/is" "^5.2.0" + "@szmarczak/http-timer" "^5.0.1" + cacheable-lookup "^7.0.0" + cacheable-request "^10.2.8" + decompress-response "^6.0.0" + form-data-encoder "^2.1.2" + get-stream "^6.0.1" + http2-wrapper "^2.1.10" + lowercase-keys "^3.0.0" + p-cancelable "^3.0.0" + responselike "^3.0.0" + +graceful-fs@4.2.10: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.2, graceful-fs@^4.2.4, graceful-fs@^4.2.6: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +graphemer@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -grouped-queue@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/grouped-queue/-/grouped-queue-1.1.0.tgz" - integrity sha512-rZOFKfCqLhsu5VqjBjEWiwrYqJR07KxIkH4mLZlNlGDfntbb4FbMyGFP14TlvRPrU9S3Hnn/sgxbC5ZeN0no3Q== +graphql-request@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-6.1.0.tgz#f4eb2107967af3c7a5907eb3131c671eac89be4f" + integrity sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw== dependencies: - lodash "^4.17.15" + "@graphql-typed-document-node/core" "^3.2.0" + cross-fetch "^3.1.5" -growl@1.10.5: - version "1.10.5" - resolved "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz" - integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA== +graphql@^16.11.0: + version "16.11.0" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.11.0.tgz#96d17f66370678027fdf59b2d4c20b4efaa8a633" + integrity sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw== -gulp-header@^1.7.1: - version "1.8.12" - resolved "https://registry.npmjs.org/gulp-header/-/gulp-header-1.8.12.tgz" - integrity sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ== +gray-matter@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/gray-matter/-/gray-matter-4.0.3.tgz#e893c064825de73ea1f5f7d88c7a9f7274288798" + integrity sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q== dependencies: - concat-with-sourcemaps "*" - lodash.template "^4.4.0" - through2 "^2.0.0" + js-yaml "^3.13.1" + kind-of "^6.0.2" + section-matter "^1.0.0" + strip-bom-string "^1.0.0" -har-schema@^2.0.0: +grouped-queue@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" - integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= + resolved "https://registry.yarnpkg.com/grouped-queue/-/grouped-queue-2.0.0.tgz#a2c6713f2171e45db2c300a3a9d7c119d694dac8" + integrity sha512-/PiFUa7WIsl48dUeCvhIHnwNmAAzlI/eHoJl0vu3nsFA366JleY7Ff8EVTplZu5kO0MIdZjKTTnzItL61ahbnw== -har-validator@~5.1.3: - version "5.1.5" - resolved "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz" - integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== +handlebars@^4.7.8: + version "4.7.8" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.8.tgz#41c42c18b1be2365439188c77c6afae71c0cd9e9" + integrity sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ== dependencies: - ajv "^6.12.3" - har-schema "^2.0.0" + minimist "^1.2.5" + neo-async "^2.6.2" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + +hard-rejection@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" + integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== has-ansi@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg== dependencies: ansi-regex "^2.0.0" -has-flag@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz" - integrity sha1-6CB68cx7MNRGzHC3NLXovhj4jVE= +has-bigints@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.1.0.tgz#28607e965ac967e03cd2a2c70a2636a1edad49fe" + integrity sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg== has-flag@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" - integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== has-flag@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== -has-symbols@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= +has-property-descriptors@^1.0.0, has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= + es-define-property "^1.0.0" -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= +has-proto@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.2.0.tgz#5de5a6eabd95fdffd9818b43055e8065e39fe9d5" + integrity sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ== dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" + dunder-proto "^1.0.0" -has-yarn@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz" - integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw== +has-symbols@^1.0.3, has-symbols@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.1.0.tgz#fc9c6a783a084951d0b971fe1018de813707a338" + integrity sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ== -has@^1.0.3: - version "1.0.3" - resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== +has-tostringtag@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz#2cdc42d40bef2e5b4eeab7c01a73c54ce7ab5abc" + integrity sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw== dependencies: - function-bind "^1.1.1" + has-symbols "^1.0.3" hasha@^5.0.0: version "5.2.2" - resolved "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz" + resolved "https://registry.yarnpkg.com/hasha/-/hasha-5.2.2.tgz#a48477989b3b327aea3c04f53096d816d97522a1" integrity sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ== dependencies: is-stream "^2.0.0" type-fest "^0.8.0" -he@1.2.0, he@^1.2.0: +hasown@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + +hast-util-to-html@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz#ccc673a55bb8e85775b08ac28380f72d47167005" + integrity sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + ccount "^2.0.0" + comma-separated-tokens "^2.0.0" + hast-util-whitespace "^3.0.0" + html-void-elements "^3.0.0" + mdast-util-to-hast "^13.0.0" + property-information "^7.0.0" + space-separated-tokens "^2.0.0" + stringify-entities "^4.0.0" + zwitch "^2.0.4" + +hast-util-whitespace@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz#7778ed9d3c92dd9e8c5c8f648a49c21fc51cb621" + integrity sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw== + dependencies: + "@types/hast" "^3.0.0" + +he@^1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== +header-case@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/header-case/-/header-case-2.0.4.tgz#5a42e63b55177349cf405beb8d775acabb92c063" + integrity sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q== + dependencies: + capital-case "^1.0.4" + tslib "^2.0.3" + +help-me@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/help-me/-/help-me-5.0.0.tgz#b1ebe63b967b74060027c2ac61f9be12d354a6f6" + integrity sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg== + +highlight.js@^11.7.0: + version "11.11.1" + resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-11.11.1.tgz#fca06fa0e5aeecf6c4d437239135fabc15213585" + integrity sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w== + hosted-git-info@^2.1.4: version "2.8.9" - resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== -hosted-git-info@^3.0.6: - version "3.0.8" - resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-3.0.8.tgz" - integrity sha512-aXpmwoOhRBrw6X3j0h5RloK4x1OzsxMPyxqIHyNfSe2pypkVTZFpEiRoSipPEPlMrh0HW/XsjkJ5WgnCirpNUw== +hosted-git-info@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224" + integrity sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA== dependencies: lru-cache "^6.0.0" +hosted-git-info@^7.0.0, hosted-git-info@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-7.0.2.tgz#9b751acac097757667f30114607ef7b661ff4f17" + integrity sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w== + dependencies: + lru-cache "^10.0.1" + html-encoding-sniffer@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz#696df529a7cfd82446369dc5193e590a3735b448" @@ -4504,17 +7403,32 @@ html-encoding-sniffer@^4.0.0: html-escaper@^2.0.0: version "2.0.2" - resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== -http-cache-semantics@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" - integrity "sha1-q+AvyymFRgvwMjvmZENuw0dqbVo= sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==" +html-void-elements@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-3.0.0.tgz#fc9dbd84af9e747249034d4d62602def6517f1d7" + integrity sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg== -http-call@^5.1.2: +htmlparser2@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-10.0.0.tgz#77ad249037b66bf8cc99c6e286ef73b83aeb621d" + integrity sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.2.1" + entities "^6.0.0" + +http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz#205f4db64f8562b76a4ff9235aa5279839a09dd5" + integrity sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ== + +http-call@^5.2.2: version "5.3.0" - resolved "https://registry.npmjs.org/http-call/-/http-call-5.3.0.tgz" + resolved "https://registry.yarnpkg.com/http-call/-/http-call-5.3.0.tgz#4ded815b13f423de176eb0942d69c43b25b148db" integrity sha512-ahwimsC23ICE4kPl9xTBjKB4inbRaeLyZeRunC/1Jy/Z6X8tv22MEAjK+KBOMSVLaqXPTTmd8638waVIKLGx2w== dependencies: content-type "^1.0.4" @@ -4525,35 +7439,26 @@ http-call@^5.1.2: tunnel-agent "^0.6.0" http-parser-js@>=0.5.1: - version "0.5.3" - resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.3.tgz" - integrity sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg== + version "0.5.10" + resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.10.tgz#b3277bd6d7ed5588e20ea73bf724fcbe44609075" + integrity sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA== -http-proxy-agent@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.1.tgz#f1c7df4bd6c30ba90f2c713fd4b60d3989d4b3d9" - integrity sha512-My1KCEPs6A0hb4qCVzYp8iEvA8j8YqcvXLZZH8C9OFuTYpYjHE7N2dtG3mRl1HMD4+VGXpF3XcDVcxGBT7yDZQ== +http-proxy-agent@^7.0.0, http-proxy-agent@^7.0.1, http-proxy-agent@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz#9a8b1f246866c028509486585f62b8f2c18c270e" + integrity sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig== dependencies: agent-base "^7.1.0" debug "^4.3.4" -http-signature@~1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" - integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= - dependencies: - assert-plus "^1.0.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -http-signature@~1.3.1: - version "1.3.6" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.3.6.tgz#cb6fbfdf86d1c974f343be94e87f7fc128662cf9" - integrity sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw== +http-signature@~1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.4.0.tgz#dee5a9ba2bf49416abc544abd6d967f6a94c8c3f" + integrity sha512-G5akfn7eKbpDN+8nPS/cb57YeA1jLTVxjpCj7tmm3QKPdyDy7T+qSC40e9ptydSWvkwjSXw1VbkpyEm39ukeAg== dependencies: assert-plus "^1.0.0" jsprim "^2.0.2" - sshpk "^1.14.1" + sshpk "^1.18.0" http2-wrapper@^1.0.0-beta.5.2: version "1.0.3" @@ -4563,27 +7468,50 @@ http2-wrapper@^1.0.0-beta.5.2: quick-lru "^5.1.1" resolve-alpn "^1.0.0" -https-proxy-agent@5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz" - integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== +http2-wrapper@^2.1.10: + version "2.2.1" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-2.2.1.tgz#310968153dcdedb160d8b72114363ef5fce1f64a" + integrity sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.2.0" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== dependencies: agent-base "6" debug "4" -https-proxy-agent@^7.0.2: - version "7.0.3" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.3.tgz#93f115f0f106a746faf364d1301b2e561cdf70de" - integrity sha512-kCnwztfX0KZJSLOBrcL0emLeFako55NWMovvyPP2AjsghNk9RB1yjSI+jVumPHYZsNXegNoqupSW9IY3afSH8w== +https-proxy-agent@^7.0.1, https-proxy-agent@^7.0.6: + version "7.0.6" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz#da8dfeac7da130b05c2ba4b59c9b6cd66611a6b9" + integrity sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw== dependencies: - agent-base "^7.0.2" + agent-base "^7.1.2" debug "4" human-signals@^1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +human-signals@^4.3.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-4.3.1.tgz#ab7f811e851fca97ffbd2c1fe9a958964de321b2" + integrity sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ== + +human-signals@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-5.0.0.tgz#42665a284f9ae0dade3ba41ebc37eb4b852f3a28" + integrity sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ== + humanize-ms@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" @@ -4591,12 +7519,17 @@ humanize-ms@^1.2.1: dependencies: ms "^2.0.0" -hyperlinker@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/hyperlinker/-/hyperlinker-1.0.0.tgz" - integrity sha512-Ty8UblRWFEcfSuIaajM34LdPXIhbs1ajEX/BBPv24J+enSVaEVY63xQ6lTO9VRYS5LAoghIG0IDJ+p+IPzKUQQ== +husky@^7.0.4: + version "7.0.4" + resolved "https://registry.yarnpkg.com/husky/-/husky-7.0.4.tgz#242048245dc49c8fb1bf0cc7cfb98dd722531535" + integrity sha512-vbaCKN2QLtP/vD4yvs6iz6hBEo6wkSzs8HpRah1Z6aGmF2KW5PdYuAd7uX5a+OyBZHBhd+TFLqgjUgytQr4RvQ== -iconv-lite@0.6.3: +hyperdyperid@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/hyperdyperid/-/hyperdyperid-1.2.0.tgz#59668d323ada92228d2a869d3e474d5a33b69e6b" + integrity sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A== + +iconv-lite@0.6.3, iconv-lite@^0.6.2, iconv-lite@^0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== @@ -4605,356 +7538,407 @@ iconv-lite@0.6.3: iconv-lite@^0.4.24: version "0.4.24" - resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" -ieee754@^1.1.13: +ieee754@^1.1.13, ieee754@^1.2.1: version "1.2.1" - resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^3.3.5: - version "3.3.10" - resolved "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz" - integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug== - -ignore@^4.0.3, ignore@^4.0.6: - version "4.0.6" - resolved "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== +ignore-walk@^6.0.4: + version "6.0.5" + resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-6.0.5.tgz#ef8d61eab7da169078723d1f82833b36e200b0dd" + integrity sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A== + dependencies: + minimatch "^9.0.0" -ignore@^5.1.1, ignore@^5.1.4: - version "5.1.8" - resolved "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz" - integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== +ignore@^5.2.0, ignore@^5.2.4, ignore@^5.3.0, ignore@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.2.tgz#3cd40e729f3643fd87cb04e50bf0eb722bc596f5" + integrity sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g== -ignore@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" - integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== +ignore@^7.0.3: + version "7.0.5" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-7.0.5.tgz#4cb5f6cd7d4c7ab0365738c7aea888baa6d7efd9" + integrity sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg== immediate@~3.0.5: version "3.0.6" resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" integrity sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ== -import-fresh@^3.0.0, import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== +import-fresh@^3.0.0, import-fresh@^3.2.1, import-fresh@^3.3.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.1.tgz#9cecb56503c0ada1f2741dbbd6546e4b13b57ccf" + integrity sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ== dependencies: parent-module "^1.0.0" resolve-from "^4.0.0" -import-lazy@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz" - integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM= - imurmurhash@^0.1.4: version "0.1.4" - resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== -indent-string@^3.2.0: +indent-string@^3.0.0: version "3.2.0" - resolved "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz" - integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" + integrity sha512-BYqTHXTGUIvg7t1r4sJNKcbDZkL92nkXA8YtRpbjFHRHGDL/NtUeiBJMeE60kIFN/Mg8ESaWQvftaYMGJzQZCQ== indent-string@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== +index-to-position@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/index-to-position/-/index-to-position-1.1.0.tgz#2e50bd54c8040bdd6d9b3d95ec2a8fedf86b4d44" + integrity sha512-XPdx9Dq4t9Qk1mTMbWONJqU7boCoumEH7fRET37HX5+khDUl3J2W6PdALxhILYlIYx2amlwYcRPp28p0tSiojg== + inflight@^1.0.4: version "1.0.6" - resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.0, inherits@~2.0.3: version "2.0.4" - resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== -ini@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz" - integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== +ini@4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ini/-/ini-4.1.1.tgz#d95b3d843b1e906e56d6747d5447904ff50ce7a1" + integrity sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g== -ini@~1.3.0: +ini@^1.3.4, ini@~1.3.0: version "1.3.8" - resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== -inquirer@^10.1.8: - version "10.1.8" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-10.1.8.tgz#727887af8a3646d8dd3eebc098c42884dc4d7151" - integrity sha512-syxGpOzLyqVeZi1KDBjRTnCn5PiGWySGHP0BbqXbqsEK0ckkZk3egAepEWslUjZXj0rhkUapVXM/IpADWe4D6w== +ini@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/ini/-/ini-4.1.3.tgz#4c359675a6071a46985eb39b14e4a2c0ec98a795" + integrity sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg== + +inquirer@^10.2.2: + version "10.2.2" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-10.2.2.tgz#36b866443a9fb2747151766c01ef9c8ba2d585eb" + integrity sha512-tyao/4Vo36XnUItZ7DnUXX4f1jVao2mSrleV/5IPtW/XAEA26hRVsbc68nuTEKWcr5vMP/1mVoT2O7u8H4v1Vg== dependencies: - "@inquirer/prompts" "^5.3.8" - "@inquirer/type" "^1.5.2" + "@inquirer/core" "^9.1.0" + "@inquirer/prompts" "^5.5.0" + "@inquirer/type" "^1.5.3" "@types/mute-stream" "^0.0.4" ansi-escapes "^4.3.2" mute-stream "^1.0.0" run-async "^3.0.0" rxjs "^7.8.1" -inquirer@^7.1.0: - version "7.3.3" - resolved "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz" - integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== +inquirer@^9.2.2: + version "9.3.7" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-9.3.7.tgz#0b562bf843812208844741c9aec9244c939b83d4" + integrity sha512-LJKFHCSeIRq9hanN14IlOtPSTe3lNES7TYDTE2xxdAy1LS5rYphajK1qtwvj3YmQXvvk0U2Vbmcni8P9EIQW9w== dependencies: - ansi-escapes "^4.2.1" - chalk "^4.1.0" - cli-cursor "^3.1.0" - cli-width "^3.0.0" - external-editor "^3.0.3" - figures "^3.0.0" - lodash "^4.17.19" - mute-stream "0.0.8" - run-async "^2.4.0" - rxjs "^6.6.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - through "^2.3.6" + "@inquirer/figures" "^1.0.3" + ansi-escapes "^4.3.2" + cli-width "^4.1.0" + external-editor "^3.1.0" + mute-stream "1.0.0" + ora "^5.4.1" + run-async "^3.0.0" + rxjs "^7.8.1" + string-width "^4.2.3" + strip-ansi "^6.0.1" + wrap-ansi "^6.2.0" + yoctocolors-cjs "^2.1.2" + +internal-slot@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.1.0.tgz#1eac91762947d2f7056bc838d93e13b2e9604961" + integrity sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw== + dependencies: + es-errors "^1.3.0" + hasown "^2.0.2" + side-channel "^1.1.0" interpret@^1.0.0: version "1.4.0" - resolved "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== -ioredis@~4.17.1: - version "4.17.3" - resolved "https://registry.npmjs.org/ioredis/-/ioredis-4.17.3.tgz" - integrity sha512-iRvq4BOYzNFkDnSyhx7cmJNOi1x/HWYe+A4VXHBu4qpwJaGT1Mp+D2bVGJntH9K/Z/GeOM/Nprb8gB3bmitz1Q== - dependencies: - cluster-key-slot "^1.1.0" - debug "^4.1.1" - denque "^1.1.0" - lodash.defaults "^4.2.0" - lodash.flatten "^4.4.0" - redis-commands "1.5.0" - redis-errors "^1.2.0" - redis-parser "^3.0.0" - standard-as-callback "^2.0.1" - -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= +ip-address@^9.0.5: + version "9.0.5" + resolved "https://registry.yarnpkg.com/ip-address/-/ip-address-9.0.5.tgz#117a960819b08780c3bd1f14ef3c1cc1d3f3ea5a" + integrity sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g== dependencies: - kind-of "^3.0.2" + jsbn "1.1.0" + sprintf-js "^1.1.3" -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== +is-array-buffer@^3.0.4, is-array-buffer@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.5.tgz#65742e1e687bd2cc666253068fd8707fe4d44280" + integrity sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A== dependencies: - kind-of "^6.0.0" + call-bind "^1.0.8" + call-bound "^1.0.3" + get-intrinsic "^1.2.6" is-arrayish@^0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" - integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-async-function@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-async-function/-/is-async-function-2.1.1.tgz#3e69018c8e04e73b738793d020bfe884b9fd3523" + integrity sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ== + dependencies: + async-function "^1.0.0" + call-bound "^1.0.3" + get-proto "^1.0.1" + has-tostringtag "^1.0.2" + safe-regex-test "^1.1.0" + +is-bigint@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.1.0.tgz#dda7a3445df57a42583db4228682eba7c4170672" + integrity sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ== + dependencies: + has-bigints "^1.0.2" is-binary-path@~2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - -is-ci@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz" - integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== - dependencies: - ci-info "^2.0.0" - -is-core-module@^2.2.0: - version "2.2.0" - resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz" - integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== +is-boolean-object@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.2.2.tgz#7067f47709809a393c71ff5bb3e135d8a9215d9e" + integrity sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A== dependencies: - has "^1.0.3" + call-bound "^1.0.3" + has-tostringtag "^1.0.2" -is-core-module@^2.8.0: - version "2.8.1" - resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz" - integrity sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA== +is-builtin-module@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169" + integrity sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A== dependencies: - has "^1.0.3" + builtin-modules "^3.3.0" -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" +is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== +is-core-module@^2.13.0, is-core-module@^2.16.0, is-core-module@^2.16.1, is-core-module@^2.5.0: + version "2.16.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4" + integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w== dependencies: - kind-of "^6.0.0" + hasown "^2.0.2" -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== +is-data-view@^1.0.1, is-data-view@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-data-view/-/is-data-view-1.0.2.tgz#bae0a41b9688986c2188dda6657e56b8f9e63b8e" + integrity sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw== dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" + call-bound "^1.0.2" + get-intrinsic "^1.2.6" + is-typed-array "^1.1.13" -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== +is-date-object@^1.0.5, is-date-object@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.1.0.tgz#ad85541996fc7aa8b2729701d27b7319f95d82f7" + integrity sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg== dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" + call-bound "^1.0.2" + has-tostringtag "^1.0.2" is-docker@^2.0.0: - version "2.1.1" - resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.1.1.tgz" - integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== - -is-docker@^2.1.1: version "2.2.1" resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== +is-docker@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-3.0.0.tgz#90093aa3106277d8a77a5910dbae71747e15a200" + integrity sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ== + is-electron@2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/is-electron/-/is-electron-2.2.2.tgz#3778902a2044d76de98036f5dc58089ac4d80bb9" integrity sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg== -is-extendable@^0.1.0, is-extendable@^0.1.1: +is-extendable@^0.1.0: version "0.1.1" - resolved "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw== -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - -is-extglob@^2.1.0, is-extglob@^2.1.1: +is-extglob@^2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-finalizationregistry@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz#eefdcdc6c94ddd0674d9c85887bf93f944a97c90" + integrity sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg== + dependencies: + call-bound "^1.0.3" is-fullwidth-code-point@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz" - integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw== dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w== is-fullwidth-code-point@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= +is-fullwidth-code-point@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz#9609efced7c2f97da7b60145ef481c787c7ba704" + integrity sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA== dependencies: - is-extglob "^2.1.0" + get-east-asian-width "^1.0.0" -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: - version "4.0.1" - resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== +is-generator-function@^1.0.10: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.1.0.tgz#bf3eeda931201394f57b5dba2800f91a238309ca" + integrity sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ== + dependencies: + call-bound "^1.0.3" + get-proto "^1.0.0" + has-tostringtag "^1.0.2" + safe-regex-test "^1.1.0" + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" -is-installed-globally@^0.4.0: - version "0.4.0" - resolved "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz" - integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ== +is-in-ci@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-in-ci/-/is-in-ci-1.0.0.tgz#9a86bbda7e42c6129902e0574c54b018fbb6ab88" + integrity sha512-eUuAjybVTHMYWm/U+vBO1sY/JOCgoPCXRxzdju0K+K0BiGW0SChEL1MLC0PoCIR1OlPo5YAp8HuQoUlsWEICwg== + +is-inside-container@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-inside-container/-/is-inside-container-1.0.0.tgz#e81fba699662eb31dbdaf26766a61d4814717ea4" + integrity sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA== + dependencies: + is-docker "^3.0.0" + +is-installed-globally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-1.0.0.tgz#08952c43758c33d815692392f7f8437b9e436d5a" + integrity sha512-K55T22lfpQ63N4KEN57jZUAaAYqYHEe8veb/TycJRk9DdSCLLcovXz/mL6mOnhQaZsQGwPhuFopdQIlqGSEjiQ== dependencies: - global-dirs "^3.0.0" - is-path-inside "^3.0.2" + global-directory "^4.0.1" + is-path-inside "^4.0.0" is-interactive@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== -is-npm@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz" - integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA== +is-interactive@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-2.0.0.tgz#40c57614593826da1100ade6059778d597f16e90" + integrity sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ== -is-number@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/is-number/-/is-number-2.1.0.tgz" - integrity sha1-Afy7s5NGOlSPL0ZszhbezknbkI8= - dependencies: - kind-of "^3.0.2" +is-lambda@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5" + integrity sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ== -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" +is-map@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.3.tgz#ede96b7fe1e270b3c4465e3a465658764926d62e" + integrity sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw== -is-number@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz" - integrity sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ== +is-negative-zero@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.3.tgz#ced903a027aca6381b777a5743069d7376a49747" + integrity sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw== + +is-npm@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-6.0.0.tgz#b59e75e8915543ca5d881ecff864077cba095261" + integrity sha512-JEjxbSmtPSt1c8XTkVrlujcXdKV1/tvuQ7GwKcAlyiVLeYFQ2VHat8xfrDJsIkhCdF/tZ7CiIR3sy141c6+gPQ== + +is-number-object@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.1.1.tgz#144b21e95a1bc148205dcc2814a9134ec41b2541" + integrity sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw== + dependencies: + call-bound "^1.0.3" + has-tostringtag "^1.0.2" is-number@^7.0.0: version "7.0.0" - resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-obj@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== -is-path-inside@^3.0.2: +is-observable@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" + integrity sha512-NqCa4Sa2d+u7BWc6CukaObG3Fh+CU9bvixbpcXYhy2VvYS7vVGIdAgnIS5Ks3A/cqk4rebLJ9s8zBstT2aKnIA== + dependencies: + symbol-observable "^1.1.0" + +is-path-inside@^3.0.3: version "3.0.3" - resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== +is-path-inside@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-4.0.0.tgz#805aeb62c47c1b12fc3fd13bfb3ed1e7430071db" + integrity sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA== + is-plain-obj@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz" - integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" + integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg== -is-plain-obj@^2.0.0, is-plain-obj@^2.1.0: +is-plain-obj@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== -is-plain-object@^2.0.3, is-plain-object@^2.0.4: +is-plain-obj@^4.0.0, is-plain-obj@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-4.1.0.tgz#d65025edec3657ce032fd7db63c97883eaed71f0" + integrity sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg== + +is-plain-object@^2.0.4: version "2.0.4" - resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" @@ -4964,11 +7948,6 @@ is-plain-object@^3.0.0: resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-3.0.1.tgz#662d92d24c0aa4302407b0d45d21f2251c85f85b" integrity sha512-Xnpx182SBMrr/aBik8y+GuR4U1L9FqMSojwDQwPMmxyC6bvEqly9UBCxhauBF5vNh2gwWJNX6oDV7O+OM4z34g== -is-plain-object@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" - integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== - is-potential-custom-element-name@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" @@ -4976,207 +7955,290 @@ is-potential-custom-element-name@^1.0.1: is-primitive@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/is-primitive/-/is-primitive-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-3.0.1.tgz#98c4db1abff185485a657fc2905052b940524d05" integrity sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w== -is-redirect@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/is-redirect/-/is-redirect-1.0.0.tgz" - integrity sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ= +is-promise@^2.1.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== + +is-regex@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.2.1.tgz#76d70a3ed10ef9be48eb577887d74205bf0cad22" + integrity sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g== + dependencies: + call-bound "^1.0.2" + gopd "^1.2.0" + has-tostringtag "^1.0.2" + hasown "^2.0.2" -is-retry-allowed@^1.0.0, is-retry-allowed@^1.1.0: +is-retry-allowed@^1.1.0: version "1.2.0" - resolved "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4" integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg== -is-scoped@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/is-scoped/-/is-scoped-1.0.0.tgz" - integrity sha1-RJypgpnnEwOCViieyytUDcQ3yzA= +is-set@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.3.tgz#8ab209ea424608141372ded6e0cb200ef1d9d01d" + integrity sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg== + +is-shared-array-buffer@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz#9b67844bd9b7f246ba0708c3a93e34269c774f6f" + integrity sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A== dependencies: - scoped-regex "^1.0.0" + call-bound "^1.0.3" -is-stream@^1.0.0, is-stream@^1.1.0: +is-stream@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== -is-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz" - integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== +is-stream@^2, is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-3.0.0.tgz#e6bfd7aa6bef69f4f472ce9bb681e3e57b4319ac" + integrity sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA== + +is-string@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.1.1.tgz#92ea3f3d5c5b6e039ca8677e5ac8d07ea773cbb9" + integrity sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA== + dependencies: + call-bound "^1.0.3" + has-tostringtag "^1.0.2" + +is-symbol@^1.0.4, is-symbol@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.1.1.tgz#f47761279f532e2b05a7024a7506dbbedacd0634" + integrity sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w== + dependencies: + call-bound "^1.0.2" + has-symbols "^1.1.0" + safe-regex-test "^1.1.0" + +is-text-path@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-text-path/-/is-text-path-1.0.1.tgz#4e1aa0fb51bfbcb3e92688001397202c1775b66e" + integrity sha512-xFuJpne9oFz5qDaodwmmG08e3CawH/2ZV8Qqza1Ko7Sk8POWbkRdwIoAWVhqvq0XeUzANEhKo2n0IXUGBm7A/w== + dependencies: + text-extensions "^1.0.0" + +is-typed-array@^1.1.13, is-typed-array@^1.1.14, is-typed-array@^1.1.15: + version "1.1.15" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.15.tgz#4bfb4a45b61cee83a5a46fba778e4e8d59c0ce0b" + integrity sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ== + dependencies: + which-typed-array "^1.1.16" is-typedarray@^1.0.0, is-typedarray@~1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" - integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== is-unicode-supported@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== -is-utf8@^0.2.0, is-utf8@^0.2.1: +is-unicode-supported@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz#d824984b616c292a2e198207d4a609983842f714" + integrity sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ== + +is-unicode-supported@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz#09f0ab0de6d3744d48d265ebb98f65d11f2a9b3a" + integrity sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ== + +is-utf8@^0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz" - integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= + resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + integrity sha512-rMYPYvCzsXywIsldgLaSoPlw5PfoB/ssr7hY4pLfcodrA5M/eArza1a9VmTiNIBNMjOGr1Ow9mTyU2o69U6U9Q== + +is-weakmap@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.2.tgz#bf72615d649dfe5f699079c54b83e47d1ae19cfd" + integrity sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w== + +is-weakref@^1.0.2, is-weakref@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.1.1.tgz#eea430182be8d64174bd96bffbc46f21bf3f9293" + integrity sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew== + dependencies: + call-bound "^1.0.3" + +is-weakset@^2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.4.tgz#c9f5deb0bc1906c6d6f1027f284ddf459249daca" + integrity sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ== + dependencies: + call-bound "^1.0.3" + get-intrinsic "^1.2.6" -is-windows@^1.0.1, is-windows@^1.0.2: +is-windows@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== -is-wsl@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz" - integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= - -is-wsl@^2.1.1, is-wsl@^2.2.0: +is-wsl@^2.2.0: version "2.2.0" - resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== dependencies: is-docker "^2.0.0" -is-yarn-global@^0.3.0: - version "0.3.0" - resolved "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz" - integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw== +is-wsl@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-3.1.0.tgz#e1c657e39c10090afcbedec61720f6b924c3cbd2" + integrity sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw== + dependencies: + is-inside-container "^1.0.0" isarray@0.0.1: version "0.0.1" - resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== + +isarray@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== -isarray@1.0.0, isarray@~1.0.0: +isarray@~1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== -isbinaryfile@^4.0.0: - version "4.0.8" - resolved "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-4.0.8.tgz" - integrity sha512-53h6XFniq77YdW+spoRrebh0mnmTxRPTlcuIArO57lmMdq4uBKFKaeTjnb92oYWrSn/LVL+LT+Hap2tFQj8V+w== +isbinaryfile@5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-5.0.3.tgz#d7d9451fad89d7d3e889567f00bed6d3ea950bd3" + integrity sha512-VR4gNjFaDP8csJQvzInG20JvBj8MaHYLxNOMXysxRbGM7tcsHZwCjhch3FubFtZBkuDbN55i4dUukGeIrzF+6g== + +isbinaryfile@^5.0.2: + version "5.0.4" + resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-5.0.4.tgz#2a2edefa76cafa66613fe4c1ea52f7f031017bdf" + integrity sha512-YKBKVkKhty7s8rxddb40oOkuP0NbaeXrQvLin6QMHL7Ypiy2RW9LwOVrVgZRyOrhQlayMd9t+D8yDy8MKFTSDQ== isexe@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" - integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" +isexe@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-3.1.1.tgz#4a407e2bd78ddfb14bea0c27c6f7072dde775f0d" + integrity sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ== -isobject@^3.0.0, isobject@^3.0.1: +isobject@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz" - integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== -isomorphic-dompurify@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/isomorphic-dompurify/-/isomorphic-dompurify-2.3.0.tgz#bc48fbdf52f84cf7e0a63a5e8ec89052e7dbc3c5" - integrity sha512-FCoKY4/mW/jnn/+VgE7wXGC2D/RXzVCAmGYuGWEuZXtyWnwmE2100caciIv+RbHk90q9LA0OW5IBn2f+ywHtww== +isomorphic-dompurify@^2.26.0: + version "2.26.0" + resolved "https://registry.yarnpkg.com/isomorphic-dompurify/-/isomorphic-dompurify-2.26.0.tgz#ea6201953d38488445171443393855fa700ea906" + integrity sha512-nZmoK4wKdzPs5USq4JHBiimjdKSVAOm2T1KyDoadtMPNXYHxiENd19ou4iU/V4juFM6LVgYQnpxCYmxqNP4Obw== dependencies: - "@types/dompurify" "^3.0.5" - dompurify "^3.0.8" - jsdom "^24.0.0" + dompurify "^3.2.6" + jsdom "^26.1.0" isstream@~0.1.2: version "0.1.2" - resolved "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" - integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.0.0-alpha.1: - version "3.0.0" - resolved "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz" - integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz#2d166c4b0644d43a39f04bf6c2edd1e585f31756" + integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg== istanbul-lib-hook@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz#8f84c9434888cc6b1d0a9d7092a76d239ebf0cc6" integrity sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ== dependencies: append-transform "^2.0.0" -istanbul-lib-instrument@^4.0.0: - version "4.0.3" - resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz" - integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== +istanbul-lib-instrument@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz#fa15401df6c15874bcb2105f773325d78c666765" + integrity sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q== dependencies: - "@babel/core" "^7.7.5" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.0.0" - semver "^6.3.0" + "@babel/core" "^7.23.9" + "@babel/parser" "^7.23.9" + "@istanbuljs/schema" "^0.1.3" + istanbul-lib-coverage "^3.2.0" + semver "^7.5.4" istanbul-lib-processinfo@^2.0.2: - version "2.0.2" - resolved "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz" - integrity sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw== + version "2.0.3" + resolved "https://registry.yarnpkg.com/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz#366d454cd0dcb7eb6e0e419378e60072c8626169" + integrity sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg== dependencies: archy "^1.0.0" - cross-spawn "^7.0.0" - istanbul-lib-coverage "^3.0.0-alpha.1" - make-dir "^3.0.0" + cross-spawn "^7.0.3" + istanbul-lib-coverage "^3.2.0" p-map "^3.0.0" rimraf "^3.0.0" - uuid "^3.3.3" + uuid "^8.3.2" istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + version "3.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" + integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== dependencies: istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" + make-dir "^4.0.0" supports-color "^7.1.0" istanbul-lib-source-maps@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz" - integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg== + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== dependencies: debug "^4.1.1" istanbul-lib-coverage "^3.0.0" source-map "^0.6.1" istanbul-reports@^3.0.2: - version "3.0.2" - resolved "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.0.2.tgz" - integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== + version "3.1.7" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.7.tgz#daed12b9e1dca518e15c056e1e537e741280fa0b" + integrity sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g== dependencies: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -istextorbinary@^2.5.1: - version "2.6.0" - resolved "https://registry.npmjs.org/istextorbinary/-/istextorbinary-2.6.0.tgz" - integrity sha512-+XRlFseT8B3L9KyjxxLjfXSLMuErKDsd8DBNrsaxoViABMEZlOSCstwmw0qpoFX3+U6yWU1yhLudAe6/lETGGA== - dependencies: - binaryextensions "^2.1.2" - editions "^2.2.0" - textextensions "^2.5.0" - -jackspeak@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-4.0.1.tgz#9fca4ce961af6083e259c376e9e3541431f5287b" - integrity sha512-cub8rahkh0Q/bw1+GxP7aeSe29hHHn2V4m29nnDlvCdlgU+3UGxkZp7Z53jLUdpX3jdTO0nJZUDl3xvbWc2Xog== +jackspeak@^3.1.2: + version "3.4.3" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.3.tgz#8833a9d89ab4acde6188942bd1c53b6390ed5a8a" + integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== dependencies: "@isaacs/cliui" "^8.0.2" optionalDependencies: "@pkgjs/parseargs" "^0.11.0" +jackspeak@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-4.1.1.tgz#96876030f450502047fc7e8c7fcf8ce8124e43ae" + integrity sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ== + dependencies: + "@isaacs/cliui" "^8.0.2" + jake@^10.8.5: - version "10.8.5" - resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" - integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== + version "10.9.2" + resolved "https://registry.yarnpkg.com/jake/-/jake-10.9.2.tgz#6ae487e6a69afec3a5e167628996b59f35ae2b7f" + integrity sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA== dependencies: async "^3.2.3" chalk "^4.0.2" - filelist "^1.0.1" - minimatch "^3.0.4" + filelist "^1.0.4" + minimatch "^3.1.2" jira-client@^8.2.2: version "8.2.2" @@ -5186,21 +8248,36 @@ jira-client@^8.2.2: "@babel/runtime" "^7.6.0" postman-request "^2.88.1-postman.30" +jiti@^1.21.6: + version "1.21.7" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.7.tgz#9dd81043424a3d28458b193d965f0d18a2300ba9" + integrity sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A== + +joycon@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03" + integrity sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw== + +js-md4@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/js-md4/-/js-md4-0.3.2.tgz#cd3b3dc045b0c404556c81ddb5756c23e59d7cf5" + integrity sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA== + +js-tiktoken@^1.0.12: + version "1.0.20" + resolved "https://registry.yarnpkg.com/js-tiktoken/-/js-tiktoken-1.0.20.tgz#fa2733bf147acaf1bdcf9ab8a878e79c581c95f2" + integrity sha512-Xlaqhhs8VfCd6Sh7a1cFkZHQbYTLCwVJJWiHVxBYzLPxW0XsoxBy1hitmjkdIjD3Aon5BXLHFwU5O8WUx6HH+A== + dependencies: + base64-js "^1.5.1" + js-tokens@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@4.0.0, js-yaml@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz" - integrity sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q== - dependencies: - argparse "^2.0.1" - -js-yaml@^3.13.1, js-yaml@^3.8.1: +js-yaml@^3.13.1: version "3.14.1" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== dependencies: argparse "^1.0.7" @@ -5213,151 +8290,221 @@ js-yaml@^4.1.0: dependencies: argparse "^2.0.1" +js2xmlparser@^4.0.1, js2xmlparser@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/js2xmlparser/-/js2xmlparser-4.0.2.tgz#2a1fdf01e90585ef2ae872a01bc169c6a8d5e60a" + integrity sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA== + dependencies: + xmlcreate "^2.0.4" + +jsbn@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-1.1.0.tgz#b01307cb29b618a1ed26ec79e911f803c4da0040" + integrity sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A== + jsbn@~0.1.0: version "0.1.1" - resolved "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz" - integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== + +jsdoc-api@^9.3.5: + version "9.3.5" + resolved "https://registry.yarnpkg.com/jsdoc-api/-/jsdoc-api-9.3.5.tgz#1f5683b8db9cdc5d8afb606ec0041f3c1b7bbd0f" + integrity sha512-TQwh1jA8xtCkIbVwm/XA3vDRAa5JjydyKx1cC413Sh3WohDFxcMdwKSvn4LOsq2xWyAmOU/VnSChTQf6EF0R8g== + dependencies: + array-back "^6.2.2" + cache-point "^3.0.1" + current-module-paths "^1.1.2" + file-set "^5.3.0" + jsdoc "^4.0.4" + object-to-spawn-args "^2.0.1" + walk-back "^5.1.1" + +jsdoc-parse@^6.2.4: + version "6.2.4" + resolved "https://registry.yarnpkg.com/jsdoc-parse/-/jsdoc-parse-6.2.4.tgz#e770fbb7aea3ad76c852fabf77178b896cbe5959" + integrity sha512-MQA+lCe3ioZd0uGbyB3nDCDZcKgKC7m/Ivt0LgKZdUoOlMJxUWJQ3WI6GeyHp9ouznKaCjlp7CU9sw5k46yZTw== + dependencies: + array-back "^6.2.2" + find-replace "^5.0.1" + lodash.omit "^4.5.0" + sort-array "^5.0.0" + +jsdoc-to-markdown@^9.1.1: + version "9.1.2" + resolved "https://registry.yarnpkg.com/jsdoc-to-markdown/-/jsdoc-to-markdown-9.1.2.tgz#8af7e5b457bb5d8f9211f5dc27ef939f55bd9d75" + integrity sha512-0rhxIZeolCJzQ1SPIqmdtPd4VsK8Jt22sKUnnjHpFaXPDkhmdEuZhkrUQKuQidXGi+j3otleQyqn2BEYhxOpYA== + dependencies: + array-back "^6.2.2" + command-line-args "^6.0.1" + command-line-usage "^7.0.3" + config-master "^3.1.0" + dmd "^7.1.1" + jsdoc-api "^9.3.5" + jsdoc-parse "^6.2.4" + walk-back "^5.1.1" + +jsdoc-type-pratt-parser@~4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-4.0.0.tgz#136f0571a99c184d84ec84662c45c29ceff71114" + integrity sha512-YtOli5Cmzy3q4dP26GraSOeAhqecewG04hoO8DY56CH4KJ9Fvv5qKWUCCo3HZob7esJQHCv6/+bnTy72xZZaVQ== + +jsdoc@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/jsdoc/-/jsdoc-4.0.4.tgz#86565a9e39cc723a3640465b3fb189a22d1206ca" + integrity sha512-zeFezwyXeG4syyYHbvh1A967IAqq/67yXtXvuL5wnqCkFZe8I0vKfm+EO+YEvLguo6w9CDUbrAXVtJSHh2E8rw== + dependencies: + "@babel/parser" "^7.20.15" + "@jsdoc/salty" "^0.2.1" + "@types/markdown-it" "^14.1.1" + bluebird "^3.7.2" + catharsis "^0.9.0" + escape-string-regexp "^2.0.0" + js2xmlparser "^4.0.2" + klaw "^3.0.0" + markdown-it "^14.1.0" + markdown-it-anchor "^8.6.7" + marked "^4.0.10" + mkdirp "^1.0.4" + requizzle "^0.2.3" + strip-json-comments "^3.1.0" + underscore "~1.13.2" -jsdom@^24.0.0: - version "24.0.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-24.0.0.tgz#e2dc04e4c79da368481659818ee2b0cd7c39007c" - integrity sha512-UDS2NayCvmXSXVP6mpTj+73JnNQadZlr9N68189xib2tx5Mls7swlTNao26IoHv46BZJFvXygyRtyXd1feAk1A== +jsdom@^26.1.0: + version "26.1.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-26.1.0.tgz#ab5f1c1cafc04bd878725490974ea5e8bf0c72b3" + integrity sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg== dependencies: - cssstyle "^4.0.1" + cssstyle "^4.2.1" data-urls "^5.0.0" - decimal.js "^10.4.3" - form-data "^4.0.0" + decimal.js "^10.5.0" html-encoding-sniffer "^4.0.0" - http-proxy-agent "^7.0.0" - https-proxy-agent "^7.0.2" + http-proxy-agent "^7.0.2" + https-proxy-agent "^7.0.6" is-potential-custom-element-name "^1.0.1" - nwsapi "^2.2.7" - parse5 "^7.1.2" - rrweb-cssom "^0.6.0" + nwsapi "^2.2.16" + parse5 "^7.2.1" + rrweb-cssom "^0.8.0" saxes "^6.0.0" symbol-tree "^3.2.4" - tough-cookie "^4.1.3" + tough-cookie "^5.1.1" w3c-xmlserializer "^5.0.0" webidl-conversions "^7.0.0" whatwg-encoding "^3.1.1" whatwg-mimetype "^4.0.0" - whatwg-url "^14.0.0" - ws "^8.16.0" + whatwg-url "^14.1.1" + ws "^8.18.0" xml-name-validator "^5.0.0" -jsen@0.6.6: - version "0.6.6" - resolved "https://registry.npmjs.org/jsen/-/jsen-0.6.6.tgz" - integrity sha1-AkDBjPETUKwCFFb0in6xO9Z+BCA= - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -jsforce@^1.11.0: - version "1.11.0" - resolved "https://registry.npmjs.org/jsforce/-/jsforce-1.11.0.tgz" - integrity sha512-vYNXJXXdz9ZQNdfRqq/MCJ/zU7JGA7iEduwafQDzChR9FeqXgTNfHTppLVbw9mIniKkQZemmxSOtl7N04lj/5Q== - dependencies: - base64-url "^2.2.0" - co-prompt "^1.0.0" - coffeescript "^1.10.0" - commander "^2.9.0" - csv-parse "^4.10.1" - csv-stringify "^1.0.4" - faye "^1.4.0" - inherits "^2.0.1" - lodash "^4.17.19" - multistream "^2.0.5" - opn "^5.3.0" - promise "^7.1.1" - readable-stream "^2.1.0" - request "^2.72.0" - xml2js "^0.4.16" - -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz" - integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= +jsesc@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.1.0.tgz#74d335a234f67ed19907fdadfac7ccf9d409825d" + integrity sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== json-buffer@3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== json-parse-better-errors@^1.0.1: version "1.0.2" - resolved "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== json-parse-even-better-errors@^2.3.0: version "2.3.1" - resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== +json-parse-even-better-errors@^3.0.0, json-parse-even-better-errors@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz#b43d35e89c0f3be6b5fbbe9dc6c82467b30c28da" + integrity sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ== + json-schema-traverse@^0.4.1: version "0.4.1" - resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema-traverse@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== -json-schema@0.2.3: - version "0.2.3" - resolved "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" - integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= - -json-schema@0.4.0: +json-schema@0.4.0, json-schema@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" - integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json-stringify-nice@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/json-stringify-nice/-/json-stringify-nice-1.1.4.tgz#2c937962b80181d3f317dd39aa323e14f5a60a67" + integrity sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw== -json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1: +json-stringify-safe@~5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" - integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== + +json5@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== + dependencies: + minimist "^1.2.0" -json5@^2.1.2: +json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" - integrity "sha1-eM1vGhm9wStz21rQxh79ZsHikoM= sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + +jsonc-parser@^3.0.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.3.1.tgz#f2a524b4f7fd11e3d791e559977ad60b98b798b4" + integrity sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ== jsonfile@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz" - integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + integrity sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg== optionalDependencies: graceful-fs "^4.1.6" jsonfile@^6.0.1: version "6.1.0" - resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== dependencies: universalify "^2.0.0" optionalDependencies: graceful-fs "^4.1.6" -jsonparse@^1.2.0: +jsonparse@^1.2.0, jsonparse@^1.3.1: version "1.3.1" - resolved "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz" - integrity sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA= + resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" + integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== -jsonwebtoken@8.5.0: - version "8.5.0" - resolved "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.0.tgz" - integrity sha512-IqEycp0znWHNA11TpYi77bVgyBO/pGESDh7Ajhas+u0ttkGkKYIIAjniL4Bw5+oVejVF+SYkaI7XKfwCCyeTuA== +jsonpointer@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== + +jsonwebtoken@9.0.2: + version "9.0.2" + resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3" + integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ== dependencies: - jws "^3.2.1" + jws "^3.2.2" lodash.includes "^4.3.0" lodash.isboolean "^3.0.3" lodash.isinteger "^4.0.4" @@ -5366,17 +8513,7 @@ jsonwebtoken@8.5.0: lodash.isstring "^4.0.1" lodash.once "^4.0.0" ms "^2.1.1" - semver "^5.6.0" - -jsprim@^1.2.2: - version "1.4.1" - resolved "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz" - integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= - dependencies: - assert-plus "1.0.0" - extsprintf "1.3.0" - json-schema "0.2.3" - verror "1.10.0" + semver "^7.5.4" jsprim@^2.0.2: version "2.0.2" @@ -5388,7 +8525,7 @@ jsprim@^2.0.2: json-schema "0.4.0" verror "1.10.0" -jszip@^3.10.1: +jszip@3.10.1, jszip@^3.10.1: version "3.10.1" resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.10.1.tgz#34aee70eb18ea1faec2f589208a157d1feb091c2" integrity sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g== @@ -5398,91 +8535,113 @@ jszip@^3.10.1: readable-stream "~2.3.6" setimmediate "^1.0.5" +just-diff-apply@^5.2.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/just-diff-apply/-/just-diff-apply-5.5.0.tgz#771c2ca9fa69f3d2b54e7c3f5c1dfcbcc47f9f0f" + integrity sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw== + +just-diff@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/just-diff/-/just-diff-5.2.0.tgz#60dca55891cf24cd4a094e33504660692348a241" + integrity sha512-6ufhP9SHjb7jibNFrNxyFZ6od3g+An6Ai9mhGRvcYe8UJlH0prseN64M+6ZBBUoKYHZsitDP42gAJ8+eVWr3lw== + +just-diff@^6.0.0: + version "6.0.2" + resolved "https://registry.yarnpkg.com/just-diff/-/just-diff-6.0.2.tgz#03b65908543ac0521caf6d8eb85035f7d27ea285" + integrity sha512-S59eriX5u3/QhMNq3v/gm8Kd0w8OS6Tz2FS1NG4blv+z0MuQcBRJyFWjdovM0Rad4/P4aUPFtnkNjMjyMlMSYA== + just-extend@^4.0.2: - version "4.1.1" - resolved "https://registry.npmjs.org/just-extend/-/just-extend-4.1.1.tgz" - integrity sha512-aWgeGFW67BP3e5181Ep1Fv2v8z//iBJfrvyTnq8wG86vEESwmonn1zPBJ0VfmT9CJq2FIT0VsETtrNFm2a+SHA== + version "4.2.1" + resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" + integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== + +just-extend@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-6.2.0.tgz#b816abfb3d67ee860482e7401564672558163947" + integrity sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw== jwa@^1.4.1: - version "1.4.1" - resolved "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz" - integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== + version "1.4.2" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.2.tgz#16011ac6db48de7b102777e57897901520eec7b9" + integrity sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw== dependencies: - buffer-equal-constant-time "1.0.1" + buffer-equal-constant-time "^1.0.1" ecdsa-sig-formatter "1.0.11" safe-buffer "^5.0.1" -jws@^3.2.1: - version "3.2.2" - resolved "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz" - integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== - dependencies: - jwa "^1.4.1" - safe-buffer "^5.0.1" - -keypress@~0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/keypress/-/keypress-0.2.1.tgz" - integrity sha1-HoBFQlABjbrUw/6USX1uZ7YmnHc= - -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== - dependencies: - json-buffer "3.0.0" - -keyv@^4.0.0: - version "4.5.2" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.2.tgz#0e310ce73bf7851ec702f2eaf46ec4e3805cce56" - integrity sha512-5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g== +jws@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== dependencies: - json-buffer "3.0.1" + jwa "^1.4.1" + safe-buffer "^5.0.1" -keyv@^4.0.3: - version "4.0.3" - resolved "https://registry.npmjs.org/keyv/-/keyv-4.0.3.tgz" - integrity sha512-zdGa2TOpSZPq5mU6iowDARnMBZgtCqJ11dJROFi6tg6kTn4nuUdU09lFyLFSaHrWqpIJ+EBq4E8/Dc0Vx5vLdA== +keyv@^4.0.0, keyv@^4.5.3: + version "4.5.4" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" + integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== dependencies: json-buffer "3.0.1" -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" +kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= +klaw@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/klaw/-/klaw-3.0.0.tgz#b11bec9cf2492f06756d6e809ab73a2910259146" + integrity sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g== dependencies: - is-buffer "^1.1.5" + graceful-fs "^4.1.9" -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== +ky@^1.2.0: + version "1.8.2" + resolved "https://registry.yarnpkg.com/ky/-/ky-1.8.2.tgz#161cba76e4a86b43829565b9a9c8d21d03e602db" + integrity sha512-XybQJ3d4Ea1kI27DoelE5ZCT3bSJlibYTtQuMsyzKox3TMyayw1asgQdl54WroAm+fIA3ZCr8zXW2RpR7qWVpA== -latest-version@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz" - integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== +"langchain@>=0.2.3 <0.3.0 || >=0.3.4 <0.4.0", langchain@^0.3.34: + version "0.3.34" + resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.3.34.tgz#b888c22275b75699d4b17c4475f4b67949cb5572" + integrity sha512-OADHLQYRX+36EqQBxIoryCdMKfHex32cJBSWveadIIeRhygqivacIIDNwVjX51Y++c80JIdR0jaQHWn2r3H1iA== dependencies: - package-json "^6.3.0" + "@langchain/openai" ">=0.1.0 <0.7.0" + "@langchain/textsplitters" ">=0.0.0 <0.2.0" + js-tiktoken "^1.0.12" + js-yaml "^4.1.0" + jsonpointer "^5.0.1" + langsmith "^0.3.67" + openapi-types "^12.1.3" + p-retry "4" + uuid "^10.0.0" + yaml "^2.2.1" + zod "^3.25.32" + +langsmith@^0.3.29, langsmith@^0.3.46, langsmith@^0.3.67: + version "0.3.69" + resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.3.69.tgz#ae9f3db6e11cac8c337401ead5d05171002fdc54" + integrity sha512-YKzu92YAP2o+d+1VmR38xqFX0RIRLKYj1IqdflVEY83X0FoiVlrWO3xDLXgnu7vhZ2N2M6jx8VO9fVF8yy9gHA== + dependencies: + "@types/uuid" "^10.0.0" + chalk "^4.1.2" + console-table-printer "^2.12.1" + p-queue "^6.6.2" + p-retry "4" + semver "^7.6.3" + uuid "^10.0.0" -lazy-cache@^2.0.1, lazy-cache@^2.0.2: - version "2.0.2" - resolved "https://registry.npmjs.org/lazy-cache/-/lazy-cache-2.0.2.tgz" - integrity sha1-uRkKT5EzVGlIQIWfio9whNiCImQ= +latest-version@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-9.0.0.tgz#e91ed216e7a4badc6f73b66c65adb46c58ec6ba1" + integrity sha512-7W0vV3rqv5tokqkBAFV1LbR7HPOWzXQDpDgEuib/aJ1jsZZx6x3c2mBI+TJhJzOhkGeaLbCKEHXEXLfirtG2JA== dependencies: - set-getter "^0.1.0" + package-json "^10.0.0" lazystream@^1.0.0: version "1.0.1" @@ -5493,7 +8652,7 @@ lazystream@^1.0.0: levn@^0.4.1: version "0.4.1" - resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: prelude-ls "^1.2.1" @@ -5511,62 +8670,114 @@ lie@~3.3.0: dependencies: immediate "~3.0.5" +lighthouse-logger@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/lighthouse-logger/-/lighthouse-logger-2.0.1.tgz#48895f639b61cca89346bb6f47f7403a3895fa02" + integrity sha512-ioBrW3s2i97noEmnXxmUq7cjIcVRjT5HBpAYy8zE11CxU9HqlWHHeRxfeN1tn8F7OEMVPIC9x1f8t3Z7US9ehQ== + dependencies: + debug "^2.6.9" + marky "^1.2.2" + +lilconfig@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.3.tgz#a1bcfd6257f9585bf5ae14ceeebb7b559025e4c4" + integrity sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw== + lines-and-columns@^1.1.6: - version "1.1.6" - resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz" - integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== -list-item@^1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/list-item/-/list-item-1.1.1.tgz" - integrity sha1-DGXQDih8tmPMs8s4Sad+iewmilY= - dependencies: - expand-range "^1.8.1" - extend-shallow "^2.0.1" - is-number "^2.1.0" - repeat-string "^1.5.2" +linkify-it@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-5.0.0.tgz#9ef238bfa6dc70bd8e7f9572b52d369af569b421" + integrity sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ== + dependencies: + uc.micro "^2.0.0" + +linkinator@^6.1.1: + version "6.1.4" + resolved "https://registry.yarnpkg.com/linkinator/-/linkinator-6.1.4.tgz#9a2fb961ad38c7041d907ecd72d34d32a5b06e8e" + integrity sha512-7DXjwFiJ6rqye8OawwWi/CyDdKdIb69HLCbPhRI6tGSNnGruWFw8qucNsoWFXybel/I960UujFHefjvprhhvYA== + dependencies: + chalk "^5.0.0" + escape-html "^1.0.3" + gaxios "^6.0.0" + glob "^10.3.10" + htmlparser2 "^10.0.0" + marked "^13.0.0" + meow "^13.0.0" + mime "^4.0.0" + server-destroy "^1.0.1" + srcset "^5.0.0" listenercount@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/listenercount/-/listenercount-1.0.1.tgz#84c8a72ab59c4725321480c975e6508342e70937" integrity sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ== -load-json-file@^6.2.0: - version "6.2.0" - resolved "https://registry.npmjs.org/load-json-file/-/load-json-file-6.2.0.tgz" - integrity sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ== - dependencies: - graceful-fs "^4.1.15" - parse-json "^5.0.0" - strip-bom "^4.0.0" - type-fest "^0.6.0" +listr-silent-renderer@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" + integrity sha512-L26cIFm7/oZeSNVhWB6faeorXhMg4HNlb/dS/7jHhr708jxlXrtrBWo4YUxZQkc6dGoxEAe6J/D3juTRBUzjtA== -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== +listr-update-renderer@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz#4ea8368548a7b8aecb7e06d8c95cb45ae2ede6a2" + integrity sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA== + dependencies: + chalk "^1.1.3" + cli-truncate "^0.2.1" + elegant-spinner "^1.0.1" + figures "^1.7.0" + indent-string "^3.0.0" + log-symbols "^1.0.2" + log-update "^2.3.0" + strip-ansi "^3.0.1" + +listr-verbose-renderer@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz#f1132167535ea4c1261102b9f28dac7cba1e03db" + integrity sha512-04PDPqSlsqIOaaaGZ+41vq5FejI9auqTInicFRndCBgE3bXG8D6W1I+mWhk+1nqbHmyhla/6BUrd5OSiHwKRXw== dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" + chalk "^2.4.1" + cli-cursor "^2.1.0" + date-fns "^1.27.2" + figures "^2.0.0" + +listr@^0.14.3: + version "0.14.3" + resolved "https://registry.yarnpkg.com/listr/-/listr-0.14.3.tgz#2fea909604e434be464c50bddba0d496928fa586" + integrity sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA== + dependencies: + "@samverschueren/stream-to-observable" "^0.3.0" + is-observable "^1.1.0" + is-promise "^2.1.0" + is-stream "^1.1.0" + listr-silent-renderer "^1.1.1" + listr-update-renderer "^0.5.0" + listr-verbose-renderer "^0.5.0" + p-map "^2.0.0" + rxjs "^6.3.3" locate-path@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== dependencies: p-locate "^4.1.0" locate-path@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== dependencies: p-locate "^5.0.0" -locate-path@^7.1.0: - version "7.1.1" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-7.1.1.tgz#8e1e5a75c7343770cef02ff93c4bf1f0aa666374" - integrity sha512-vJXaRMJgRVD3+cUZs3Mncj2mxpt5mP0EmNOsxRSZRMlbqjvxzDEOIUWXGmavo0ZC9+tNZCBLQ66reA11nbpHZg== +locate-path@^7.1.0, locate-path@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-7.2.0.tgz#69cb1779bd90b35ab1e771e1f2f89a202c2a8a8a" + integrity sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA== dependencies: p-locate "^6.0.0" @@ -5575,15 +8786,15 @@ lodash-es@^4.17.21: resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== -lodash._reinterpolate@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz" - integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= +lodash.camelcase@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== lodash.defaults@^4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz" - integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw= + resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" + integrity sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ== lodash.difference@^4.5.0: version "4.5.0" @@ -5597,18 +8808,18 @@ lodash.escaperegexp@^4.1.2: lodash.flatten@^4.4.0: version "4.4.0" - resolved "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz" - integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= + resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" + integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g== lodash.flattendeep@^4.4.0: version "4.4.0" - resolved "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz" - integrity sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI= + resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2" + integrity sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ== -lodash.get@^4.4.2, lodash.get@~4.4.2: +lodash.get@^4.4.2: version "4.4.2" - resolved "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz" - integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== lodash.groupby@^4.6.0: version "4.6.0" @@ -5617,13 +8828,13 @@ lodash.groupby@^4.6.0: lodash.includes@^4.3.0: version "4.3.0" - resolved "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz" - integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8= + resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" + integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== lodash.isboolean@^3.0.3: version "3.0.3" - resolved "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz" - integrity sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY= + resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== lodash.isequal@^4.5.0: version "4.5.0" @@ -5637,8 +8848,8 @@ lodash.isfunction@^3.0.9: lodash.isinteger@^4.0.4: version "4.0.4" - resolved "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz" - integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M= + resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" + integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== lodash.isnil@^4.0.0: version "4.0.0" @@ -5647,48 +8858,63 @@ lodash.isnil@^4.0.0: lodash.isnumber@^3.0.3: version "3.0.3" - resolved "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz" - integrity sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w= + resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" + integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== lodash.isplainobject@^4.0.6: version "4.0.6" - resolved "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz" - integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== lodash.isstring@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz" - integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE= + resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== lodash.isundefined@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/lodash.isundefined/-/lodash.isundefined-3.0.1.tgz#23ef3d9535565203a66cefd5b830f848911afb48" integrity sha512-MXB1is3s899/cD8jheYYE2V9qTHwKvt+npCwpD+1Sxm3Q3cECXCiYHjeHWXNwr6Q0SOBPrYUDxendrO6goVTEA== -lodash.once@^4.0.0: +lodash.kebabcase@^4.1.1: version "4.1.1" - resolved "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz" - integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= + resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" + integrity sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g== -lodash.set@^4.3.2: - version "4.3.2" - resolved "https://registry.yarnpkg.com/lodash.set/-/lodash.set-4.3.2.tgz#d8757b1da807dde24816b0d6a84bea1a76230b23" - integrity "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM= sha512-4hNPN5jlm/N/HLMCO43v8BXKq9Z7QdAGc/VGrRD61w8gN9g/6jF9A4L1pbUgBLCffi0w9VsXfTOij5x8iTyFvg==" +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash.template@^4.4.0: +lodash.mergewith@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz#617121f89ac55f59047c7aec1ccd6654c6590f55" + integrity sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ== + +lodash.omit@^4.5.0: version "4.5.0" - resolved "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz" - integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== - dependencies: - lodash._reinterpolate "^3.0.0" - lodash.templatesettings "^4.0.0" + resolved "https://registry.yarnpkg.com/lodash.omit/-/lodash.omit-4.5.0.tgz#6eb19ae5a1ee1dd9df0b969e66ce0b7fa30b5e60" + integrity sha512-XeqSp49hNGmlkj2EJlfrQFIzQ6lXdNro9sddtQzcJY8QaoC2GO0DT7xaIokHeyM+mIT0mPMlPvkYzg2xCuHdZg== -lodash.templatesettings@^4.0.0: - version "4.2.0" - resolved "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz" - integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== - dependencies: - lodash._reinterpolate "^3.0.0" +lodash.once@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" + integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== + +lodash.snakecase@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" + integrity sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== + +lodash.startcase@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.startcase/-/lodash.startcase-4.4.0.tgz#9436e34ed26093ed7ffae1936144350915d9add8" + integrity sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg== lodash.union@^4.6.0: version "4.6.0" @@ -5700,24 +8926,22 @@ lodash.uniq@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== -lodash@4.17.21, lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: +lodash.upperfirst@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz#1365edf431480481ef0d1c68957a5ed99d49f7ce" + integrity sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg== + +lodash@^4.17.15, lodash@^4.17.21: version "4.17.21" - resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== -log-symbols@4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz" - integrity sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA== - dependencies: - chalk "^4.0.0" - -log-symbols@^2.2.0: - version "2.2.0" - resolved "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz" - integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== +log-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" + integrity sha512-mmPrW0Fh2fxOzdBbFv4g1m6pR72haFLPJ2G5SJEELf1y+iaQrDG6cWCPjy54RHYbZAt7X+ls690Kw62AdWXBzQ== dependencies: - chalk "^2.0.1" + chalk "^1.0.0" log-symbols@^4.1.0: version "4.1.0" @@ -5727,243 +8951,430 @@ log-symbols@^4.1.0: chalk "^4.1.0" is-unicode-supported "^0.1.0" -lolex@^2.4.2: - version "2.7.5" - resolved "https://registry.npmjs.org/lolex/-/lolex-2.7.5.tgz" - integrity sha512-l9x0+1offnKKIzYVjyXU2SiwhXDLekRzKyhnbyldPHvC7BvLPVpdNUNR2KeMAiCN2D/kLNttZgQD5WjSxuBx3Q== +log-symbols@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-6.0.0.tgz#bb95e5f05322651cac30c0feb6404f9f2a8a9439" + integrity sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw== + dependencies: + chalk "^5.3.0" + is-unicode-supported "^1.3.0" -lolex@^5.0.1: - version "5.1.2" - resolved "https://registry.npmjs.org/lolex/-/lolex-5.1.2.tgz" - integrity sha512-h4hmjAvHTmd+25JSwrtTIuwbKdwg5NzZVRMLn9saij4SZaepCrTCxPr35H/3bjwfMJtN+t3CX8672UIkglz28A== +log-symbols@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-7.0.1.tgz#f52e68037d96f589fc572ff2193dc424d48c195b" + integrity sha512-ja1E3yCr9i/0hmBVaM0bfwDjnGy8I/s6PP4DFp+yP+a+mrHO4Rm7DtmnqROTUkHIkqffC84YY7AeqX6oFk0WFg== dependencies: - "@sinonjs/commons" "^1.7.0" + is-unicode-supported "^2.0.0" + yoctocolors "^2.1.1" -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== +log-update@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" + integrity sha512-vlP11XfFGyeNQlmEn9tJ66rEW1coA/79m5z6BCkudjbAGE83uhAcGYrBFwfs3AdLiLzGRusRPAbSPK9xZteCmg== + dependencies: + ansi-escapes "^3.0.0" + cli-cursor "^2.0.0" + wrap-ansi "^3.0.1" + +long@^5.0.0, long@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/long/-/long-5.3.2.tgz#1d84463095999262d7d7b7f8bfd4a8cc55167f83" + integrity sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA== + +loupe@^2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.7.tgz#6e69b7d4db7d3ab436328013d37d1c8c3540c697" + integrity sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA== + dependencies: + get-func-name "^2.0.1" + +lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" lowercase-keys@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lowercase-keys@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2" + integrity sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ== + +lru-cache@^10.0.1, lru-cache@^10.2.0, lru-cache@^10.2.2, lru-cache@^10.4.3: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + lru-cache@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-11.0.0.tgz#15d93a196f189034d7166caf9fe55e7384c98a21" - integrity sha512-Qv32eSV1RSCfhY3fpPE2GNZ8jgM9X7rdAfemLWqTUxwiyIC4jJ6Sy0fZ8H+oLWevO6i4/bizg7c8d8i6bxrzbA== + version "11.1.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-11.1.0.tgz#afafb060607108132dbc1cf8ae661afb69486117" + integrity sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A== -lru-cache@^4.0.1: - version "4.1.5" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" - integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" + yallist "^3.0.2" lru-cache@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" +lru-cache@^7.14.1: + version "7.18.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89" + integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA== + +lunr@^2.3.9: + version "2.3.9" + resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" + integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== + make-dir@^3.0.0, make-dir@^3.0.2: version "3.1.0" - resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== dependencies: semver "^6.0.0" +make-dir@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" + integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== + dependencies: + semver "^7.5.3" + make-error@^1.1.1: version "1.3.6" - resolved "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= +make-fetch-happen@^13.0.0, make-fetch-happen@^13.0.1: + version "13.0.1" + resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz#273ba2f78f45e1f3a6dca91cede87d9fa4821e36" + integrity sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA== + dependencies: + "@npmcli/agent" "^2.0.0" + cacache "^18.0.0" + http-cache-semantics "^4.1.1" + is-lambda "^1.0.1" + minipass "^7.0.2" + minipass-fetch "^3.0.0" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + negotiator "^0.6.3" + proc-log "^4.2.0" + promise-retry "^2.0.1" + ssri "^10.0.0" + +make-fetch-happen@^14.0.3: + version "14.0.3" + resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz#d74c3ecb0028f08ab604011e0bc6baed483fcdcd" + integrity sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ== + dependencies: + "@npmcli/agent" "^3.0.0" + cacache "^19.0.1" + http-cache-semantics "^4.1.1" + minipass "^7.0.2" + minipass-fetch "^4.0.0" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + negotiator "^1.0.0" + proc-log "^5.0.0" + promise-retry "^2.0.1" + ssri "^12.0.0" + +map-obj@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" + integrity sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg== -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" +map-obj@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a" + integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ== -markdown-link@^0.1.1: - version "0.1.1" - resolved "https://registry.npmjs.org/markdown-link/-/markdown-link-0.1.1.tgz" - integrity sha1-MsXGUZmmRXMWMi0eQinRNAfIx88= +markdown-it-anchor@^8.6.7: + version "8.6.7" + resolved "https://registry.yarnpkg.com/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz#ee6926daf3ad1ed5e4e3968b1740eef1c6399634" + integrity sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA== -markdown-toc@^1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/markdown-toc/-/markdown-toc-1.2.0.tgz" - integrity sha512-eOsq7EGd3asV0oBfmyqngeEIhrbkc7XVP63OwcJBIhH2EpG2PzFcbZdhy1jutXSlRBBVMNXHvMtSr5LAxSUvUg== - dependencies: - concat-stream "^1.5.2" - diacritics-map "^0.1.0" - gray-matter "^2.1.0" - lazy-cache "^2.0.2" - list-item "^1.1.1" - markdown-link "^0.1.1" - minimist "^1.2.0" - mixin-deep "^1.1.3" - object.pick "^1.2.0" - remarkable "^1.7.1" - repeat-string "^1.6.1" - strip-color "^0.1.0" +markdown-it@^14.1.0: + version "14.1.0" + resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-14.1.0.tgz#3c3c5992883c633db4714ccb4d7b5935d98b7d45" + integrity sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg== + dependencies: + argparse "^2.0.1" + entities "^4.4.0" + linkify-it "^5.0.0" + mdurl "^2.0.0" + punycode.js "^2.3.1" + uc.micro "^2.1.0" + +marked@^13.0.0: + version "13.0.3" + resolved "https://registry.yarnpkg.com/marked/-/marked-13.0.3.tgz#5c5b4a5d0198060c7c9bc6ef9420a7fed30f822d" + integrity sha512-rqRix3/TWzE9rIoFGIn8JmsVfhiuC8VIQ8IdX5TfzmeBucdY05/0UlzKaw0eVtpcN/OdVFpBk7CjKGo9iHJ/zA== + +marked@^14.1.4: + version "14.1.4" + resolved "https://registry.yarnpkg.com/marked/-/marked-14.1.4.tgz#5e348de817bfa35a11e71de7e41dd7c3fdb186a9" + integrity sha512-vkVZ8ONmUdPnjCKc5uTRvmkRbx4EAi2OkTOXmfTDhZz3OFqMNBM1oTTWwTr4HY4uAEojhzPf+Fy8F1DWa3Sndg== + +marked@^4.0.10, marked@^4.2.12, marked@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/marked/-/marked-4.3.0.tgz#796362821b019f734054582038b116481b456cf3" + integrity sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A== -marked@^12.0.0: - version "12.0.0" - resolved "https://registry.yarnpkg.com/marked/-/marked-12.0.0.tgz#051ea8c8c7f65148a63003df1499515a2c6de716" - integrity sha512-Vkwtq9rLqXryZnWaQc86+FHLC6tr/fycMfYAhiOIXkrNmeGAyhSxjqu0Rs1i0bBqw5u0S7+lV9fdH2ZSVaoa0w== +marky@^1.2.2: + version "1.3.0" + resolved "https://registry.yarnpkg.com/marky/-/marky-1.3.0.tgz#422b63b0baf65022f02eda61a238eccdbbc14997" + integrity sha512-ocnPZQLNpvbedwTy9kNrQEsknEfgvcLMvOtz3sFeWApDq1MXH1TqkCIx58xlpESsfwQOnuBO9beyQuNGzVvuhQ== -math-random@^1.0.1: - version "1.0.4" - resolved "https://registry.npmjs.org/math-random/-/math-random-1.0.4.tgz" - integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A== +math-intrinsics@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9" + integrity sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g== -mega-linter-runner@^4.39.0: - version "4.39.0" - resolved "https://registry.npmjs.org/mega-linter-runner/-/mega-linter-runner-4.39.0.tgz" - integrity sha512-QImjrVAHb6/7t0vijRbJpk8C/BmT66IwCL/CEJ0Jw4foKRmhnWhrAHBckFjxsVqMSH02dkD31QLNQ67n6r0bSg== +md-to-pdf@^5.2.4: + version "5.2.4" + resolved "https://registry.yarnpkg.com/md-to-pdf/-/md-to-pdf-5.2.4.tgz#a81fa2e07fa36756828d0f84f7eeb59d3078fb05" + integrity sha512-s6080i4ZSBfvn7qplSVjdqP9eWi9rIWj+t5pucEKnpAHan89VLQuQPhVx06vWIM+6JX2HrJ1wNRFMiwLlS01vg== dependencies: - chalk "^4.1.0" - find-package-json "^1.2.0" - optionator "^0.9.1" - which "^2.0.2" - yeoman-environment "^2.10.3" - yeoman-generator "^4.12.0" - yosay "^2.0.2" + arg "^5.0.2" + chalk "^4.1.2" + chokidar "^3.5.2" + get-port "^5.1.1" + get-stdin "^8.0.0" + gray-matter "^4.0.3" + highlight.js "^11.7.0" + iconv-lite "^0.6.3" + listr "^0.14.3" + marked "^4.2.12" + puppeteer ">=8.0.0" + semver "^7.3.7" + serve-handler "^6.1.3" + +mdast-util-to-hast@^13.0.0: + version "13.2.0" + resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz#5ca58e5b921cc0a3ded1bc02eed79a4fe4fe41f4" + integrity sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + "@ungap/structured-clone" "^1.0.0" + devlop "^1.0.0" + micromark-util-sanitize-uri "^2.0.0" + trim-lines "^3.0.0" + unist-util-position "^5.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + +mdurl@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-2.0.0.tgz#80676ec0433025dd3e17ee983d0fe8de5a2237e0" + integrity sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w== -mem-fs-editor@^6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/mem-fs-editor/-/mem-fs-editor-6.0.0.tgz" - integrity sha512-e0WfJAMm8Gv1mP5fEq/Blzy6Lt1VbLg7gNnZmZak7nhrBTibs+c6nQ4SKs/ZyJYHS1mFgDJeopsLAv7Ow0FMFg== - dependencies: - commondir "^1.0.1" - deep-extend "^0.6.0" - ejs "^2.6.1" - glob "^7.1.4" - globby "^9.2.0" - isbinaryfile "^4.0.0" - mkdirp "^0.5.0" - multimatch "^4.0.0" - rimraf "^2.6.3" - through2 "^3.0.1" - vinyl "^2.2.0" - -mem-fs-editor@^7.0.1: - version "7.1.0" - resolved "https://registry.npmjs.org/mem-fs-editor/-/mem-fs-editor-7.1.0.tgz" - integrity sha512-BH6QEqCXSqGeX48V7zu+e3cMwHU7x640NB8Zk8VNvVZniz+p4FK60pMx/3yfkzo6miI6G3a8pH6z7FeuIzqrzA== +mega-linter-runner@^8.8.0: + version "8.8.0" + resolved "https://registry.yarnpkg.com/mega-linter-runner/-/mega-linter-runner-8.8.0.tgz#ed33b9907d5bdcbd5b13d3f5e4c4e5013269338d" + integrity sha512-ntFdcIZPNl5QvSOwU+ADbKfyyuOG9oyh1TehsGEQ+bcCfi2DNNgOeeDHwir+Ame4APPAC1FozZC8xm0UYo+W+A== dependencies: + chalk "^5.3.0" + find-package-json "^1.2.0" + fs-extra "^11.1.1" + glob "^11.0.0" + mem-fs "^4.0.0" + open "^10.0.2" + optionator "^0.9.3" + prompts "^2.4.2" + uuid "^11.0.0" + which "^5.0.0" + yeoman-environment "^4.0.0" + yeoman-generator "^7.1.0" + +mem-fs-editor@^11.0.0, mem-fs-editor@^11.0.1, mem-fs-editor@^11.1.2: + version "11.1.4" + resolved "https://registry.yarnpkg.com/mem-fs-editor/-/mem-fs-editor-11.1.4.tgz#ddfe705f3b0110cb96a40a3cad290ca21f0d1900" + integrity sha512-Z4QX14Ev6eOVTuVSayS5rdiOua6C3gHcFw+n9Qc7WiaVTbC+H8b99c32MYGmbQN9UFHJeI/p3lf3LAxiIzwEmA== + dependencies: + "@types/ejs" "^3.1.4" + "@types/node" ">=18" + binaryextensions "^6.11.0" commondir "^1.0.1" deep-extend "^0.6.0" - ejs "^3.1.5" - glob "^7.1.4" - globby "^9.2.0" - isbinaryfile "^4.0.0" - mkdirp "^1.0.0" - multimatch "^4.0.0" - rimraf "^3.0.0" - through2 "^3.0.2" - vinyl "^2.2.1" + ejs "^3.1.10" + globby "^14.0.2" + isbinaryfile "5.0.3" + minimatch "^9.0.3" + multimatch "^7.0.0" + normalize-path "^3.0.0" + textextensions "^6.11.0" + vinyl "^3.0.0" -mem-fs@^1.1.0: - version "1.2.0" - resolved "https://registry.npmjs.org/mem-fs/-/mem-fs-1.2.0.tgz" - integrity sha512-b8g0jWKdl8pM0LqAPdK9i8ERL7nYrzmJfRhxMiWH2uYdfYnb7uXnmwVb0ZGe7xyEl4lj+nLIU3yf4zPUT+XsVQ== - dependencies: - through2 "^3.0.0" - vinyl "^2.0.1" - vinyl-file "^3.0.0" +mem-fs@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/mem-fs/-/mem-fs-4.1.2.tgz#9c09d1344d88968e940b0e3366048ed8973bb78a" + integrity sha512-CMwusHK+Kz0tu1qjgbd0rwcJxzgg76jlkPpqK+pDTv8Hth8JyM7JlgxNWaAFRKe969HATPTz/sp8T63QflyI+w== + dependencies: + "@types/node" ">=18" + "@types/vinyl" "^2.0.8" + vinyl "^3.0.0" + vinyl-file "^5.0.0" + +memfs@^4.30.1: + version "4.36.0" + resolved "https://registry.yarnpkg.com/memfs/-/memfs-4.36.0.tgz#b9fa8d97ddda3cb8c06908bceec956560c33d979" + integrity sha512-mfBfzGUdoEw5AZwG8E965ej3BbvW2F9LxEWj4uLxF6BEh1dO2N9eS3AGu9S6vfenuQYrVjsbUOOZK7y3vz4vyQ== + dependencies: + "@jsonjoy.com/json-pack" "^1.0.3" + "@jsonjoy.com/util" "^1.3.0" + tree-dump "^1.0.1" + tslib "^2.0.0" + +meow@^13.0.0: + version "13.2.0" + resolved "https://registry.yarnpkg.com/meow/-/meow-13.2.0.tgz#6b7d63f913f984063b3cc261b6e8800c4cd3474f" + integrity sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA== + +meow@^8.0.0, meow@^8.1.2: + version "8.1.2" + resolved "https://registry.yarnpkg.com/meow/-/meow-8.1.2.tgz#bcbe45bda0ee1729d350c03cffc8395a36c4e897" + integrity sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q== + dependencies: + "@types/minimist" "^1.2.0" + camelcase-keys "^6.2.2" + decamelize-keys "^1.1.0" + hard-rejection "^2.1.0" + minimist-options "4.1.0" + normalize-package-data "^3.0.0" + read-pkg-up "^7.0.1" + redent "^3.0.0" + trim-newlines "^3.0.0" + type-fest "^0.18.0" + yargs-parser "^20.2.3" merge-stream@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== -merge2@^1.2.3, merge2@^1.3.0, merge2@^1.4.1: +merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" - resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== -micromatch@^3.1.10: - version "3.1.10" - resolved "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" +micromark-util-character@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-2.1.1.tgz#2f987831a40d4c510ac261e89852c4e9703ccda6" + integrity sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q== + dependencies: + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromatch@^4.0.2: - version "4.0.2" - resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz" - integrity sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q== +micromark-util-encode@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz#0d51d1c095551cfaac368326963cf55f15f540b8" + integrity sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw== + +micromark-util-sanitize-uri@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz#ab89789b818a58752b73d6b55238621b7faa8fd7" + integrity sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ== dependencies: - braces "^3.0.1" - picomatch "^2.0.5" + micromark-util-character "^2.0.0" + micromark-util-encode "^2.0.0" + micromark-util-symbol "^2.0.0" -micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== +micromark-util-symbol@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz#e5da494e8eb2b071a0d08fb34f6cefec6c0a19b8" + integrity sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q== + +micromark-util-types@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-2.0.2.tgz#f00225f5f5a0ebc3254f96c36b6605c4b393908e" + integrity sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA== + +micromatch@^4.0.2, micromatch@^4.0.5, micromatch@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: - braces "^3.0.2" + braces "^3.0.3" picomatch "^2.3.1" -mime-db@1.46.0: - version "1.46.0" - resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.46.0.tgz" - integrity sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ== - mime-db@1.52.0: version "1.52.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== -mime-types@^2.1.12, mime-types@~2.1.19: - version "2.1.29" - resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.29.tgz" - integrity sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ== +mime-db@~1.33.0: + version "1.33.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db" + integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ== + +mime-types@2.1.18: + version "2.1.18" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.18.tgz#6f323f60a83d11146f831ff11fd66e2fe5503bb8" + integrity sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ== dependencies: - mime-db "1.46.0" + mime-db "~1.33.0" -mime-types@^2.1.35: +mime-types@^2.1.12, mime-types@^2.1.35: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" +mime@2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" + integrity sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== + mime@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== +mime@^4.0.0: + version "4.0.7" + resolved "https://registry.yarnpkg.com/mime/-/mime-4.0.7.tgz#0b7a98b08c63bd3c10251e797d67840c9bde9f13" + integrity sha512-2OfDPL+e03E0LrXaGYOtTFIYhiuzep94NSsuhrNULq+stylcJedcHdzHtz0atMUuGwJfFYs0YL5xeC/Ca2x0eQ== + +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + mimic-fn@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== -mimic-response@^1.0.0, mimic-response@^1.0.1: +mimic-fn@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-4.0.0.tgz#60a90550d5cb0b239cca65d893b1a53b29871ecc" + integrity sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw== + +mimic-function@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/mimic-function/-/mimic-function-5.0.1.tgz#acbe2b3349f99b9deaca7fb70e48b83e94e67076" + integrity sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA== + +mimic-response@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== mimic-response@^3.1.0: @@ -5971,67 +9382,162 @@ mimic-response@^3.1.0: resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== -"minimatch@2 || 3", minimatch@^3.0.4, minimatch@^3.1.1: +mimic-response@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-4.0.0.tgz#35468b19e7c75d10f5165ea25e75a5ceea7cf70f" + integrity sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg== + +min-indent@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity "sha1-Gc0ZS/0+Qo8EmnCBfAONiatL41s= sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" -minimatch@3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== +minimatch@9.0.3: + version "9.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" + integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== dependencies: - brace-expansion "^1.1.7" + brace-expansion "^2.0.1" -minimatch@^10.0.0: - version "10.0.1" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-10.0.1.tgz#ce0521856b453c86e25f2c4c0d03e6ff7ddc440b" - integrity sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ== +minimatch@^10.0.1, minimatch@^10.0.3: + version "10.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-10.0.3.tgz#cf7a0314a16c4d9ab73a7730a0e8e3c3502d47aa" + integrity sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw== dependencies: - brace-expansion "^2.0.1" + "@isaacs/brace-expansion" "^5.0.0" -minimatch@^5.1.0: +minimatch@^5.0.1, minimatch@^5.1.0, minimatch@^5.1.6: version "5.1.6" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== dependencies: brace-expansion "^2.0.1" -minimist@^1.1.0, minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5: - version "1.2.6" - resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== +minimatch@^9.0.0, minimatch@^9.0.3, minimatch@^9.0.4, minimatch@^9.0.5: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + dependencies: + brace-expansion "^2.0.1" + +minimist-options@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" + integrity sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A== + dependencies: + arrify "^1.0.1" + is-plain-obj "^1.1.0" + kind-of "^6.0.3" -minimist@^1.2.6: +minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== -minipass@^7.1.2: +minipass-collect@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-2.0.1.tgz#1621bc77e12258a12c60d34e2276ec5c20680863" + integrity sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw== + dependencies: + minipass "^7.0.3" + +minipass-fetch@^3.0.0: + version "3.0.5" + resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-3.0.5.tgz#f0f97e40580affc4a35cc4a1349f05ae36cb1e4c" + integrity sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg== + dependencies: + minipass "^7.0.3" + minipass-sized "^1.0.3" + minizlib "^2.1.2" + optionalDependencies: + encoding "^0.1.13" + +minipass-fetch@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-4.0.1.tgz#f2d717d5a418ad0b1a7274f9b913515d3e78f9e5" + integrity sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ== + dependencies: + minipass "^7.0.3" + minipass-sized "^1.0.3" + minizlib "^3.0.1" + optionalDependencies: + encoding "^0.1.13" + +minipass-flush@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" + integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== + dependencies: + minipass "^3.0.0" + +minipass-pipeline@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c" + integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== + dependencies: + minipass "^3.0.0" + +minipass-sized@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/minipass-sized/-/minipass-sized-1.0.3.tgz#70ee5a7c5052070afacfbc22977ea79def353b70" + integrity sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g== + dependencies: + minipass "^3.0.0" + +minipass@^3.0.0: + version "3.3.6" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" + integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== + dependencies: + yallist "^4.0.0" + +minipass@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" + integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== + +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.2, minipass@^7.0.3, minipass@^7.0.4, minipass@^7.1.2: version "7.1.2" resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== -mixin-deep@^1.1.3, mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== +minizlib@^2.1.1, minizlib@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" + integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== + dependencies: + minipass "^3.0.0" + yallist "^4.0.0" + +minizlib@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-3.0.2.tgz#f33d638eb279f664439aa38dc5f91607468cb574" + integrity sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA== dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" + minipass "^7.1.2" + +mitt@3.0.1, mitt@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mitt/-/mitt-3.0.1.tgz#ea36cf0cc30403601ae074c8f77b7092cdab36d1" + integrity sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw== -mkdirp-classic@^0.5.2: +mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: version "0.5.3" - resolved "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz" + resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== -mkdirp@1.0.4, mkdirp@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - "mkdirp@>=0.5 0": version "0.5.6" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" @@ -6039,230 +9545,245 @@ mkdirp@1.0.4, mkdirp@^1.0.0: dependencies: minimist "^1.2.6" -mkdirp@^0.5.0, mkdirp@~0.5.1: - version "0.5.5" - resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz" - integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== - dependencies: - minimist "^1.2.5" - -mocha@^8.3.1: - version "8.3.1" - resolved "https://registry.npmjs.org/mocha/-/mocha-8.3.1.tgz" - integrity sha512-5SBMxANWqOv5bw3Hx+HVgaWlcWcFEQDUdaUAr1AUU+qwtx6cowhn7gEDT/DwQP7uYxnvShdUOVLbTYAHOEGfDQ== - dependencies: - "@ungap/promise-all-settled" "1.1.2" - ansi-colors "4.1.1" - browser-stdout "1.3.1" - chokidar "3.5.1" - debug "4.3.1" - diff "5.0.0" - escape-string-regexp "4.0.0" - find-up "5.0.0" - glob "7.1.6" - growl "1.10.5" - he "1.2.0" - js-yaml "4.0.0" - log-symbols "4.0.0" - minimatch "3.0.4" - ms "2.1.3" - nanoid "3.1.20" - serialize-javascript "5.0.1" - strip-json-comments "3.1.1" - supports-color "8.1.1" - which "2.0.2" - wide-align "1.1.3" - workerpool "6.1.0" - yargs "16.2.0" - yargs-parser "20.2.4" - yargs-unparser "2.0.0" - -mock-stdin@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/mock-stdin/-/mock-stdin-1.0.0.tgz" - integrity sha512-tukRdb9Beu27t6dN+XztSRHq9J0B/CoAOySGzHfn8UTfmqipA5yNT/sDUEyYdAV3Hpka6Wx6kOMxuObdOex60Q== +mkdirp@^1.0.3, mkdirp@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -moment@^2.15.1, moment@^2.24.0: - version "2.29.4" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" - integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + +mocha@^10.7.0: + version "10.8.2" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.8.2.tgz#8d8342d016ed411b12a429eb731b825f961afb96" + integrity sha512-VZlYo/WE8t1tstuRmqgeyBgCbJc/lEdopaa+axcKzTBJ+UIdlAB9XnmvTCAH4pwR4ElNInaedhEBmZD8iCSVEg== + dependencies: + ansi-colors "^4.1.3" + browser-stdout "^1.3.1" + chokidar "^3.5.3" + debug "^4.3.5" + diff "^5.2.0" + escape-string-regexp "^4.0.0" + find-up "^5.0.0" + glob "^8.1.0" + he "^1.2.0" + js-yaml "^4.1.0" + log-symbols "^4.1.0" + minimatch "^5.1.6" + ms "^2.1.3" + serialize-javascript "^6.0.2" + strip-json-comments "^3.1.1" + supports-color "^8.1.1" + workerpool "^6.5.1" + yargs "^16.2.0" + yargs-parser "^20.2.9" + yargs-unparser "^2.0.0" moment@^2.30.1: version "2.30.1" resolved "https://registry.yarnpkg.com/moment/-/moment-2.30.1.tgz#f8c91c07b7a786e30c59926df530b4eac96974ae" integrity sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how== +mri@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mri/-/mri-1.2.0.tgz#6721480fec2a11a4889861115a48b6cbe7cc8f0b" + integrity sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA== + ms@2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== -ms@2.1.3, ms@^2.0.0, ms@^2.1.1: +ms@^2.0.0, ms@^2.1.1, ms@^2.1.3: version "2.1.3" - resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== -multimatch@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/multimatch/-/multimatch-4.0.0.tgz" - integrity sha512-lDmx79y1z6i7RNx0ZGCPq1bzJ6ZoDDKbvh7jxr9SJcWLkShMzXrHbYVpTdnhNM5MXpDUxCQ4DgqVttVXlBgiBQ== +multimatch@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-7.0.0.tgz#d0a1bf144db9106b8d19e3cb8cabec1a8986c27f" + integrity sha512-SYU3HBAdF4psHEL/+jXDKHO95/m5P2RvboHT2Y0WtTttvJLP4H/2WS9WlQPFvF6C8d6SpLw8vjCnQOnVIVOSJQ== dependencies: - "@types/minimatch" "^3.0.3" - array-differ "^3.0.0" - array-union "^2.1.0" - arrify "^2.0.1" - minimatch "^3.0.4" + array-differ "^4.0.0" + array-union "^3.0.1" + minimatch "^9.0.3" -multistream@^2.0.5: - version "2.1.1" - resolved "https://registry.npmjs.org/multistream/-/multistream-2.1.1.tgz" - integrity sha512-xasv76hl6nr1dEy3lPvy7Ej7K/Lx3O/FCvwge8PeVJpciPPoNCbaANcNiBug3IpdvTveZUcAV0DJzdnUDMesNQ== +multistream@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/multistream/-/multistream-3.1.0.tgz#49c382bc0bb355e34d15ba3a9fc1cf0f66b9fded" + integrity sha512-zBgD3kn8izQAN/TaL1PCMv15vYpf+Vcrsfub06njuYVYlzUldzpopTlrEZ53pZVEbfn3Shtv7vRFoOv6LOV87Q== dependencies: inherits "^2.0.1" - readable-stream "^2.0.5" + readable-stream "^3.4.0" + +mustache@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/mustache/-/mustache-4.2.0.tgz#e5892324d60a12ec9c2a73359edca52972bf6f64" + integrity sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ== + +mute-stream@1.0.0, mute-stream@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e" + integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA== + +mute-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-2.0.0.tgz#a5446fc0c512b71c83c44d908d5c7b7b4c493b2b" + integrity sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA== + +napi-build-utils@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-2.0.0.tgz#13c22c0187fcfccce1461844136372a47ddc027e" + integrity sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +natural-orderby@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/natural-orderby/-/natural-orderby-3.0.2.tgz#1b874d685fbd68beab2c6e7d14f298e03d631ec3" + integrity sha512-x7ZdOwBxZCEm9MM7+eQCjkrNLrW3rkBKNHVr78zbtqnMGVNlnDi6C/eUEYgxHNrcbu0ymvjzcwIL/6H1iHri9g== -mute-stream@0.0.8: - version "0.0.8" - resolved "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz" - integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== +negotiator@^0.6.3: + version "0.6.4" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.4.tgz#777948e2452651c570b712dd01c23e262713fff7" + integrity sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w== -mute-stream@^1.0.0: +negotiator@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e" - integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA== + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-1.0.0.tgz#b6c91bb47172d69f93cfd7c357bbb529019b5f6a" + integrity sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg== -mv@~2: - version "2.1.1" - resolved "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz" - integrity sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI= - dependencies: - mkdirp "~0.5.1" - ncp "~2.0.0" - rimraf "~2.4.0" - -nan@^2.0.8: - version "2.14.2" - resolved "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz" - integrity sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ== - -nanoid@3.1.20: - version "3.1.20" - resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz" - integrity sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw== - -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" +neo-async@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" - integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= +netmask@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/netmask/-/netmask-2.0.2.tgz#8b01a07644065d536383835823bc52004ebac5e7" + integrity sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg== -natural-orderby@^2.0.1: - version "2.0.3" - resolved "https://registry.npmjs.org/natural-orderby/-/natural-orderby-2.0.3.tgz" - integrity sha512-p7KTHxU0CUrcOXe62Zfrb5Z13nLvPhSWR/so3kFulUQU0sgUll2Z0LwpsLN351eOOD+hRGu/F1g+6xDfPeD++Q== +nice-grpc-client-middleware-retry@^3.1.11: + version "3.1.11" + resolved "https://registry.yarnpkg.com/nice-grpc-client-middleware-retry/-/nice-grpc-client-middleware-retry-3.1.11.tgz#4fc0128b891d184b6c98af3bfd6aca1b608a3fd1" + integrity sha512-xW/imz/kNG2g0DwTfH2eYEGrg1chSLrXtvGp9fg2qkhTgGFfAS/Pq3+t+9G8KThcC4hK/xlEyKvZWKk++33S6A== + dependencies: + abort-controller-x "^0.4.0" + nice-grpc-common "^2.0.2" -ncp@~2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz" - integrity sha1-GVoh1sRuNh0vsSgbo4uR6d9727M= +nice-grpc-common@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/nice-grpc-common/-/nice-grpc-common-2.0.2.tgz#e6aeebb2bd19d87114b351e291e30d79dd38acf7" + integrity sha512-7RNWbls5kAL1QVUOXvBsv1uO0wPQK3lHv+cY1gwkTzirnG1Nop4cBJZubpgziNbaVc/bl9QJcyvsf/NQxa3rjQ== + dependencies: + ts-error "^1.0.6" -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== +nice-grpc@^2.1.12: + version "2.1.12" + resolved "https://registry.yarnpkg.com/nice-grpc/-/nice-grpc-2.1.12.tgz#56ffdcc4d5bc3d0271c176210680c4bd10c5149b" + integrity sha512-J1n4Wg+D3IhRhGQb+iqh2OpiM0GzTve/kf2lnlW4S+xczmIEd0aHUDV1OsJ5a3q8GSTqJf+s4Rgg1M8uJltarw== + dependencies: + "@grpc/grpc-js" "^1.13.1" + abort-controller-x "^0.4.0" + nice-grpc-common "^2.0.2" -nise@^1.3.3: - version "1.5.3" - resolved "https://registry.npmjs.org/nise/-/nise-1.5.3.tgz" - integrity sha512-Ymbac/94xeIrMf59REBPOv0thr+CJVFMhrlAkW/gjCIE58BGQdCj0x7KRCb3yz+Ga2Rz3E9XXSvUyyxqqhjQAQ== +nise@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/nise/-/nise-4.1.0.tgz#8fb75a26e90b99202fa1e63f448f58efbcdedaf6" + integrity sha512-eQMEmGN/8arp0xsvGoQ+B1qvSkR73B1nWSCh7nOt5neMCtwcQVYQGdzQMhcNscktTsWB54xnlSQFzOAPJD8nXA== dependencies: - "@sinonjs/formatio" "^3.2.1" + "@sinonjs/commons" "^1.7.0" + "@sinonjs/fake-timers" "^6.0.0" "@sinonjs/text-encoding" "^0.7.1" just-extend "^4.0.2" - lolex "^5.0.1" path-to-regexp "^1.7.0" -nmtree@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/nmtree/-/nmtree-1.0.6.tgz#953e057ad545e9e627f1275bd25fea4e92c1cf63" - integrity sha512-SUPCoyX5w/lOT6wD/PZEymR+J899984tYEOYjuDqQlIOeX5NSb1MEsCcT0az+dhZD0MLAj5hGBZEpKQxuDdniA== +nise@^5.1.9: + version "5.1.9" + resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.9.tgz#0cb73b5e4499d738231a473cd89bd8afbb618139" + integrity sha512-qOnoujW4SV6e40dYxJOb3uvuoPHtmLzIk4TFo+j0jPJoC+5Z9xja5qH5JZobEPsa8+YYphMrOSwnrshEhG2qww== dependencies: - commander "^2.11.0" + "@sinonjs/commons" "^3.0.0" + "@sinonjs/fake-timers" "^11.2.2" + "@sinonjs/text-encoding" "^0.7.2" + just-extend "^6.2.0" + path-to-regexp "^6.2.1" -nock@^13.0.0: - version "13.0.11" - resolved "https://registry.npmjs.org/nock/-/nock-13.0.11.tgz" - integrity sha512-sKZltNkkWblkqqPAsjYW0bm3s9DcHRPiMOyKO/PkfJ+ANHZ2+LA2PLe22r4lLrKgXaiSaDQwW3qGsJFtIpQIeQ== +no-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== dependencies: - debug "^4.1.0" - json-stringify-safe "^5.0.1" - lodash.set "^4.3.2" - propagate "^2.0.0" + lower-case "^2.0.2" + tslib "^2.0.3" + +node-abi@^3.3.0: + version "3.75.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.75.0.tgz#2f929a91a90a0d02b325c43731314802357ed764" + integrity sha512-OhYaY5sDsIka7H7AtijtI9jwGYLyl29eQn/W623DiN/MIv5sUqc4g7BIDThX+gb7di9f6xK02nkp8sdfFWZLTg== + dependencies: + semver "^7.3.5" -node-domexception@1.0.0, node-domexception@^1.0.0: +node-addon-api@^8.4.0: + version "8.5.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-8.5.0.tgz#c91b2d7682fa457d2e1c388150f0dff9aafb8f3f" + integrity sha512-/bRZty2mXUIFY/xU5HLvveNHlswNJej+RnxBjOMkidWfwZzgTbPG1E3K5TOxRLOR+5hX7bSofy8yf1hZevMS8A== + +node-domexception@1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== -node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1: - version "2.6.7" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== - dependencies: - whatwg-url "^5.0.0" - -node-fetch@^2.6.7: +node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: version "2.7.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== dependencies: whatwg-url "^5.0.0" -node-fetch@^3.2.10: - version "3.2.10" - resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-3.2.10.tgz" - integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA== - dependencies: - data-uri-to-buffer "^4.0.0" - fetch-blob "^3.1.4" - formdata-polyfill "^4.0.10" +node-gyp@^10.0.0: + version "10.3.1" + resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-10.3.1.tgz#1dd1a1a1c6c5c59da1a76aea06a062786b2c8a1a" + integrity sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ== + dependencies: + env-paths "^2.2.0" + exponential-backoff "^3.1.1" + glob "^10.3.10" + graceful-fs "^4.2.6" + make-fetch-happen "^13.0.0" + nopt "^7.0.0" + proc-log "^4.1.0" + semver "^7.3.5" + tar "^6.2.1" + which "^4.0.0" node-preload@^0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz" + resolved "https://registry.yarnpkg.com/node-preload/-/node-preload-0.2.1.tgz#c03043bb327f417a18fee7ab7ee57b408a144301" integrity sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ== dependencies: process-on-spawn "^1.0.0" -node-releases@^1.1.71: - version "1.1.72" - resolved "https://registry.npmjs.org/node-releases/-/node-releases-1.1.72.tgz" - integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== +node-releases@^2.0.19: + version "2.0.19" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.19.tgz#9e445a52950951ec4d177d843af370b411caf314" + integrity sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw== + +nopt@^7.0.0, nopt@^7.2.1: + version "7.2.1" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-7.2.1.tgz#1cac0eab9b8e97c9093338446eddd40b2c8ca1e7" + integrity sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w== + dependencies: + abbrev "^2.0.0" normalize-package-data@^2.5.0: version "2.5.0" - resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" @@ -6271,70 +9792,127 @@ normalize-package-data@^2.5.0: validate-npm-package-license "^3.0.1" normalize-package-data@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.0.tgz" - integrity sha512-6lUjEI0d3v6kFrtgA/lOx4zHCWULXsFNIjHolnZCKCTLA6m/G625cdn3O7eNmT0iD3jfo6HZ9cdImGZwf21prw== + version "3.0.3" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-3.0.3.tgz#dbcc3e2da59509a0983422884cd172eefdfa525e" + integrity sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA== dependencies: - hosted-git-info "^3.0.6" - resolve "^1.17.0" - semver "^7.3.2" + hosted-git-info "^4.0.1" + is-core-module "^2.5.0" + semver "^7.3.4" validate-npm-package-license "^3.0.1" +normalize-package-data@^6, normalize-package-data@^6.0.0: + version "6.0.2" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-6.0.2.tgz#a7bc22167fe24025412bcff0a9651eb768b03506" + integrity sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g== + dependencies: + hosted-git-info "^7.0.0" + semver "^7.3.5" + validate-npm-package-license "^3.0.4" + normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== -normalize-url@^4.1.0: - version "4.5.1" - resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz" - integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== - normalize-url@^6.0.1: version "6.1.0" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== -npm-api@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/npm-api/-/npm-api-1.0.1.tgz" - integrity sha512-4sITrrzEbPcr0aNV28QyOmgn6C9yKiF8k92jn4buYAK8wmA5xo1qL3II5/gT1r7wxbXBflSduZ2K3FbtOrtGkA== +normalize-url@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-8.0.2.tgz#3b343a42f837e4dae2b01917c04e8de3782e9170" + integrity sha512-Ee/R3SyN4BuynXcnTaekmaVdbDAEiNrHqjQIA37mHU8G9pf7aaAD4ZX3XjBLo6rsdcxA/gtkcNYZLt30ACgynw== + +npm-bundled@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-3.0.1.tgz#cca73e15560237696254b10170d8f86dad62da25" + integrity sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ== dependencies: - JSONStream "^1.3.5" - clone-deep "^4.0.1" - download-stats "^0.3.4" - moment "^2.24.0" - node-fetch "^2.6.0" - paged-request "^2.0.1" + npm-normalize-package-bin "^3.0.0" -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz" - integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= +npm-install-checks@^6.0.0, npm-install-checks@^6.2.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/npm-install-checks/-/npm-install-checks-6.3.0.tgz#046552d8920e801fa9f919cad569545d60e826fe" + integrity sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw== + dependencies: + semver "^7.1.1" + +npm-normalize-package-bin@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz#25447e32a9a7de1f51362c61a559233b89947832" + integrity sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ== + +npm-package-arg@^11.0.0, npm-package-arg@^11.0.2: + version "11.0.3" + resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-11.0.3.tgz#dae0c21199a99feca39ee4bfb074df3adac87e2d" + integrity sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw== + dependencies: + hosted-git-info "^7.0.0" + proc-log "^4.0.0" + semver "^7.3.5" + validate-npm-package-name "^5.0.0" + +npm-packlist@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-8.0.2.tgz#5b8d1d906d96d21c85ebbeed2cf54147477c8478" + integrity sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA== + dependencies: + ignore-walk "^6.0.4" + +npm-pick-manifest@^9.0.0, npm-pick-manifest@^9.0.1: + version "9.1.0" + resolved "https://registry.yarnpkg.com/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz#83562afde52b0b07cb6244361788d319ce7e8636" + integrity sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA== dependencies: - path-key "^2.0.0" + npm-install-checks "^6.0.0" + npm-normalize-package-bin "^3.0.0" + npm-package-arg "^11.0.0" + semver "^7.3.5" -npm-run-path@^4.0.0: +npm-registry-fetch@^17.0.0, npm-registry-fetch@^17.0.1: + version "17.1.0" + resolved "https://registry.yarnpkg.com/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz#fb69e8e762d456f08bda2f5f169f7638fb92beb1" + integrity sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA== + dependencies: + "@npmcli/redact" "^2.0.0" + jsonparse "^1.3.1" + make-fetch-happen "^13.0.0" + minipass "^7.0.2" + minipass-fetch "^3.0.0" + minizlib "^2.1.2" + npm-package-arg "^11.0.0" + proc-log "^4.0.0" + +npm-run-path@^4.0.0, npm-run-path@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== dependencies: path-key "^3.0.0" +npm-run-path@^5.1.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-5.3.0.tgz#e23353d0ebb9317f174e93417e4a4d82d0249e9f" + integrity sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ== + dependencies: + path-key "^4.0.0" + number-is-nan@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz" - integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ== -nwsapi@^2.2.7: - version "2.2.7" - resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.7.tgz#738e0707d3128cb750dddcfe90e4610482df0f30" - integrity sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ== +nwsapi@^2.2.16: + version "2.2.20" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.20.tgz#22e53253c61e7b0e7e93cef42c891154bcca11ef" + integrity sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA== -nyc@^15.1.0: - version "15.1.0" - resolved "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz" - integrity sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A== +nyc@^17.0.0: + version "17.1.0" + resolved "https://registry.yarnpkg.com/nyc/-/nyc-17.1.0.tgz#b6349a401a62ffeb912bd38ea9a018839fdb6eb1" + integrity sha512-U42vQ4czpKa0QdI1hu950XuNhYqgoM+ZF1HT+VuUHL9hPfDPVvNQyltmMqdE9bUHMVa+8yNbc3QKTj8zQhlVxQ== dependencies: "@istanbuljs/load-nyc-config" "^1.0.0" "@istanbuljs/schema" "^0.1.2" @@ -6343,12 +9921,12 @@ nyc@^15.1.0: decamelize "^1.2.0" find-cache-dir "^3.2.0" find-up "^4.1.0" - foreground-child "^2.0.0" + foreground-child "^3.3.0" get-package-type "^0.1.0" glob "^7.1.6" istanbul-lib-coverage "^3.0.0" istanbul-lib-hook "^3.0.0" - istanbul-lib-instrument "^4.0.0" + istanbul-lib-instrument "^6.0.2" istanbul-lib-processinfo "^2.0.2" istanbul-lib-report "^3.0.0" istanbul-lib-source-maps "^4.0.0" @@ -6366,69 +9944,170 @@ nyc@^15.1.0: oauth-sign@~0.9.0: version "0.9.0" - resolved "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= +object-assign@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-inspect@^1.13.3, object-inspect@^1.13.4: + version "1.13.4" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.4.tgz#8375265e21bc20d0fa582c22e1b13485d6e00213" + integrity sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object-to-spawn-args@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/object-to-spawn-args/-/object-to-spawn-args-2.0.1.tgz#cf8b8e3c9b3589137a469cac90391f44870144a5" + integrity sha512-6FuKFQ39cOID+BMZ3QaphcC8Y4cw6LXBLyIgPU+OhIYwviJamPAn+4mITapnSBQrejB+NNp+FMskhD8Cq+Ys3w== + +object.assign@^4.1.7: + version "4.1.7" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.7.tgz#8c14ca1a424c6a561b0bb2a22f66f5049a945d3d" + integrity sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.3" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" + has-symbols "^1.1.0" + object-keys "^1.1.1" + +object.fromentries@^2.0.8: + version "2.0.8" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.8.tgz#f7195d8a9b97bd95cbc1999ea939ecd1a2b00c65" + integrity sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ== dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" + es-object-atoms "^1.0.0" -object-inspect@^1.9.0: - version "1.12.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" - integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== +object.groupby@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/object.groupby/-/object.groupby-1.0.3.tgz#9b125c36238129f6f7b61954a1e7176148d5002e" + integrity sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" -object-treeify@^1.1.4: - version "1.1.32" - resolved "https://registry.npmjs.org/object-treeify/-/object-treeify-1.1.32.tgz" - integrity sha512-0ZcGbbeSlvco6ipx/9M0MsYOV7Cao78sEYye9IAfKPXSUcz+5d7kFg1NCwAZIMq8ltO3LmEeVoyReb/pcAaNXg== +object.values@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.2.1.tgz#deed520a50809ff7f75a7cfd4bc64c7a038c6216" + integrity sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.3" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= +oclif@^4.22.9: + version "4.22.9" + resolved "https://registry.yarnpkg.com/oclif/-/oclif-4.22.9.tgz#e32f5707405cb37f7b68daf7464f16a0e3f72e28" + integrity sha512-zrrvAFI/MvfP54QueJrvYl3mjS1eYyeHGV3ITv7NQ16ykcg83E+rXBva2CfymMr1/r9b7m82GTFID6i8f8O66g== dependencies: - isobject "^3.0.0" + "@aws-sdk/client-cloudfront" "^3.864.0" + "@aws-sdk/client-s3" "^3.850.0" + "@inquirer/confirm" "^3.1.22" + "@inquirer/input" "^2.2.4" + "@inquirer/select" "^2.5.0" + "@oclif/core" "^4.5.2" + "@oclif/plugin-help" "^6.2.29" + "@oclif/plugin-not-found" "^3.2.63" + "@oclif/plugin-warn-if-update-available" "^3.1.46" + ansis "^3.16.0" + async-retry "^1.3.3" + change-case "^4" + debug "^4.4.0" + ejs "^3.1.10" + find-yarn-workspace-root "^2.0.0" + fs-extra "^8.1" + github-slugger "^2" + got "^13" + lodash "^4.17.21" + normalize-package-data "^6" + semver "^7.7.1" + sort-package-json "^2.15.1" + tiny-jsonc "^1.0.2" + validate-npm-package-name "^5.0.1" -object.pick@^1.2.0, object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= +ollama@^0.5.17: + version "0.5.17" + resolved "https://registry.yarnpkg.com/ollama/-/ollama-0.5.17.tgz#3217b93c6b72a0e7ca98776ea8a5695de9e61d44" + integrity sha512-q5LmPtk6GLFouS+3aURIVl+qcAOPC4+Msmx7uBb3pd+fxI55WnGjmLZ0yijI/CYy79x0QPGx3BwC3u5zv9fBvQ== dependencies: - isobject "^3.0.1" + whatwg-fetch "^3.6.20" + +on-exit-leak-free@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz#fed195c9ebddb7d9e4c3842f93f281ac8dadd3b8" + integrity sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA== once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" - resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" -onetime@^5.1.0: +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ== + dependencies: + mimic-fn "^1.0.0" + +onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" - resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" -open@8.4.2: - version "8.4.2" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" - integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== +onetime@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-6.0.0.tgz#7c24c18ed1fd2e9bca4bd26806a33613c77d34b4" + integrity sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ== dependencies: - define-lazy-prop "^2.0.0" - is-docker "^2.1.1" - is-wsl "^2.2.0" + mimic-fn "^4.0.0" -openai@^4.52.0: - version "4.52.0" - resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.0.tgz#5f93bdbef05ca3407d92f7a68717234ac0ffd09e" - integrity sha512-xmiNcdA9QJ5wffHpZDpIsge6AsPTETJ6h5iqDNuFQ7qGSNtonHn8Qe0VHy4UwLE8rBWiSqh4j+iSvuYZSeKkPg== +onetime@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-7.0.0.tgz#9f16c92d8c9ef5120e3acd9dd9957cceecc1ab60" + integrity sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ== + dependencies: + mimic-function "^5.0.0" + +oniguruma-to-es@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/oniguruma-to-es/-/oniguruma-to-es-2.3.0.tgz#35ea9104649b7c05f3963c6b3b474d964625028b" + integrity sha512-bwALDxriqfKGfUufKGGepCzu9x7nJQuoRoAFp4AnwehhC2crqrDIAP/uN2qdlsAvSMpeRC3+Yzhqc7hLmle5+g== + dependencies: + emoji-regex-xs "^1.0.0" + regex "^5.1.1" + regex-recursion "^5.1.1" + +open@^10.0.2, open@^10.1.2: + version "10.2.0" + resolved "https://registry.yarnpkg.com/open/-/open-10.2.0.tgz#b9d855be007620e80b6fb05fac98141fe62db73c" + integrity sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA== + dependencies: + default-browser "^5.2.1" + define-lazy-prop "^3.0.0" + is-inside-container "^1.0.0" + wsl-utils "^0.1.0" + +openai@^4.104.0: + version "4.104.0" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.104.0.tgz#c489765dc051b95019845dab64b0e5207cae4d30" + integrity sha512-p99EFNsA/yX6UhVO93f5kJsDRLAg+CTA2RBqdHK4RtK8u5IJw32Hyb2dTGKbnnFmnuoBv5r7Z2CURI9sGZpSuA== dependencies: "@types/node" "^18.11.18" "@types/node-fetch" "^2.6.4" @@ -6437,28 +10116,30 @@ openai@^4.52.0: form-data-encoder "1.7.2" formdata-node "^4.3.2" node-fetch "^2.6.7" - web-streams-polyfill "^3.2.1" -opn@^5.3.0: - version "5.5.0" - resolved "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz" - integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== - dependencies: - is-wsl "^1.1.0" +openai@^5.3.0: + version "5.10.1" + resolved "https://registry.yarnpkg.com/openai/-/openai-5.10.1.tgz#4535a9603f4d03b2392bb2ca41a618a80fdcfacd" + integrity sha512-fq6xVfv1/gpLbsj8fArEt3b6B9jBxdhAK+VJ+bDvbUvNd+KTLlA3bnDeYZaBsGH9LUhJ1M1yXfp9sEyBLMx6eA== -optionator@^0.9.1: - version "0.9.1" - resolved "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== +openapi-types@^12.1.3: + version "12.1.3" + resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3" + integrity sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw== + +optionator@^0.9.3: + version "0.9.4" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" + integrity sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g== dependencies: deep-is "^0.1.3" fast-levenshtein "^2.0.6" levn "^0.4.1" prelude-ls "^1.2.1" type-check "^0.4.0" - word-wrap "^1.2.3" + word-wrap "^1.2.5" -ora@5.4.1: +ora@^5.4.1: version "5.4.1" resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== @@ -6473,36 +10154,55 @@ ora@5.4.1: strip-ansi "^6.0.0" wcwidth "^1.0.1" -os-tmpdir@~1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz" - integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= - -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== +ora@^8.1.0, ora@^8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/ora/-/ora-8.2.0.tgz#8fbbb7151afe33b540dd153f171ffa8bd38e9861" + integrity sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw== + dependencies: + chalk "^5.3.0" + cli-cursor "^5.0.0" + cli-spinners "^2.9.2" + is-interactive "^2.0.0" + is-unicode-supported "^2.0.0" + log-symbols "^6.0.0" + stdin-discarder "^0.2.2" + string-width "^7.2.0" + strip-ansi "^7.1.0" + +own-keys@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/own-keys/-/own-keys-1.0.1.tgz#e4006910a2bf913585289676eebd6f390cf51358" + integrity sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg== + dependencies: + get-intrinsic "^1.2.6" + object-keys "^1.1.1" + safe-push-apply "^1.0.0" p-cancelable@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== +p-cancelable@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-3.0.0.tgz#63826694b54d61ca1c20ebcb6d3ecf5e14cd8050" + integrity sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw== + p-finally@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz" - integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow== -p-limit@^2.0.0, p-limit@^2.2.0: +p-limit@^2.2.0: version "2.3.0" - resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" p-limit@^3.0.2: version "3.1.0" - resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== dependencies: yocto-queue "^0.1.0" @@ -6514,23 +10214,16 @@ p-limit@^4.0.0: dependencies: yocto-queue "^1.0.0" -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - p-locate@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== dependencies: p-limit "^2.2.0" p-locate@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== dependencies: p-limit "^3.0.2" @@ -6542,14 +10235,31 @@ p-locate@^6.0.0: dependencies: p-limit "^4.0.0" +p-map@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" + integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== + p-map@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-3.0.0.tgz#d704d9af8a2ba684e2600d9a215983d4141a979d" integrity sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ== dependencies: aggregate-error "^3.0.0" -p-queue@^6.6.1: +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + +p-map@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-7.0.3.tgz#7ac210a2d36f81ec28b736134810f7ba4418cdb6" + integrity sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA== + +p-queue@^6, p-queue@^6.6.2: version "6.6.2" resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-6.6.2.tgz#2068a9dcf8e67dd0ec3e7a2bcb76810faa85e426" integrity sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ== @@ -6557,7 +10267,23 @@ p-queue@^6.6.1: eventemitter3 "^4.0.4" p-timeout "^3.2.0" -p-retry@^4.0.0: +p-queue@^7.3.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-7.4.1.tgz#7f86f853048beca8272abdbb7cec1ed2afc0f265" + integrity sha512-vRpMXmIkYF2/1hLBKisKeVYJZ8S2tZ0zEAmIJgdVKP2nq0nh4qCdf8bgw+ZgKrkh71AOCaqzwbJJk1WtdcF3VA== + dependencies: + eventemitter3 "^5.0.1" + p-timeout "^5.0.2" + +p-queue@^8.0.1: + version "8.1.0" + resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-8.1.0.tgz#d71929249868b10b16f885d8a82beeaf35d32279" + integrity sha512-mxLDbbGIBEXTJL0zEx8JIylaj3xQ7Z/7eEVjcF9fJX4DBiH9oqe+oahYnlKKxm0Ci9TlWTyhSHgygxMxjIB2jw== + dependencies: + eventemitter3 "^5.0.1" + p-timeout "^6.1.2" + +p-retry@4, p-retry@^4: version "4.6.2" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== @@ -6572,14 +10298,55 @@ p-timeout@^3.2.0: dependencies: p-finally "^1.0.0" -p-try@^2.0.0, p-try@^2.1.0: +p-timeout@^5.0.2: + version "5.1.0" + resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-5.1.0.tgz#b3c691cf4415138ce2d9cfe071dba11f0fee085b" + integrity sha512-auFDyzzzGZZZdHz3BtET9VEz0SE/uMEAx7uWfGPucfzEwwe/xH0iVeZibQmANYE/hp9T2+UUZT5m+BKyrDp3Ew== + +p-timeout@^6.1.2: + version "6.1.4" + resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-6.1.4.tgz#418e1f4dd833fa96a2e3f532547dd2abdb08dbc2" + integrity sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg== + +p-transform@^4.1.6: + version "4.1.6" + resolved "https://registry.yarnpkg.com/p-transform/-/p-transform-4.1.6.tgz#7947f5c4c50ea3acb75202554d5392c4036f3de7" + integrity sha512-ub9xBaRuk5FhfyzJv9y4OF8i7/3wH0gRG4C3jUp2pFcoftzkopgr6H0BJNcvbJBGsr7KmHHNWN+DCRPfPBIlAQ== + dependencies: + "@types/node" ">=16.18.31" + p-queue "^7.3.0" + readable-stream "^4.3.0" + +p-try@^2.0.0: version "2.2.0" - resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== +pac-proxy-agent@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/pac-proxy-agent/-/pac-proxy-agent-7.2.0.tgz#9cfaf33ff25da36f6147a20844230ec92c06e5df" + integrity sha512-TEB8ESquiLMc0lV8vcd5Ql/JAKAoyzHFXaStwjkzpOpC5Yv+pIzLfHvjTSdf3vpa2bMiUQrg9i6276yn8666aA== + dependencies: + "@tootallnate/quickjs-emscripten" "^0.23.0" + agent-base "^7.1.2" + debug "^4.3.4" + get-uri "^6.0.1" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.6" + pac-resolver "^7.0.1" + socks-proxy-agent "^8.0.5" + +pac-resolver@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/pac-resolver/-/pac-resolver-7.0.1.tgz#54675558ea368b64d210fd9c92a640b5f3b8abb6" + integrity sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg== + dependencies: + degenerator "^5.0.0" + netmask "^2.0.2" + package-hash@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-4.0.0.tgz#3537f654665ec3cc38827387fc904c163c54f506" integrity sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ== dependencies: graceful-fs "^4.1.15" @@ -6588,60 +10355,88 @@ package-hash@^4.0.0: release-zalgo "^1.0.0" package-json-from-dist@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz#e501cd3094b278495eb4258d4c9f6d5ac3019f00" - integrity sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw== - -package-json@^6.3.0: - version "6.5.0" - resolved "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz" - integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== - dependencies: - got "^9.6.0" - registry-auth-token "^4.0.0" - registry-url "^5.0.0" - semver "^6.2.0" - -pad-component@0.0.1: - version "0.0.1" - resolved "https://registry.npmjs.org/pad-component/-/pad-component-0.0.1.tgz" - integrity sha1-rR8izhvw/cDW3dkIrxfzUaQEuKw= + version "1.0.1" + resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505" + integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== -paged-request@^2.0.1: - version "2.0.2" - resolved "https://registry.npmjs.org/paged-request/-/paged-request-2.0.2.tgz" - integrity sha512-NWrGqneZImDdcMU/7vMcAOo1bIi5h/pmpJqe7/jdsy85BA/s5MSaU/KlpxwW/IVPmIwBcq2uKPrBWWhEWhtxag== - dependencies: - axios "^0.21.1" +package-json@^10.0.0: + version "10.0.1" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-10.0.1.tgz#e49ee07b8de63b638e7f1b5bb353733e428fe7d7" + integrity sha512-ua1L4OgXSBdsu1FPb7F3tYH0F48a6kxvod4pLUlGY9COeJAJQNX/sNH2IiEmsxw7lqYiAwrdHMjz1FctOsyDQg== + dependencies: + ky "^1.2.0" + registry-auth-token "^5.0.2" + registry-url "^6.0.1" + semver "^7.6.0" + +pacote@^18.0.0, pacote@^18.0.6: + version "18.0.6" + resolved "https://registry.yarnpkg.com/pacote/-/pacote-18.0.6.tgz#ac28495e24f4cf802ef911d792335e378e86fac7" + integrity sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A== + dependencies: + "@npmcli/git" "^5.0.0" + "@npmcli/installed-package-contents" "^2.0.1" + "@npmcli/package-json" "^5.1.0" + "@npmcli/promise-spawn" "^7.0.0" + "@npmcli/run-script" "^8.0.0" + cacache "^18.0.0" + fs-minipass "^3.0.0" + minipass "^7.0.2" + npm-package-arg "^11.0.0" + npm-packlist "^8.0.0" + npm-pick-manifest "^9.0.0" + npm-registry-fetch "^17.0.0" + proc-log "^4.0.0" + promise-retry "^2.0.1" + sigstore "^2.2.0" + ssri "^10.0.0" + tar "^6.1.11" pako@~1.0.2: version "1.0.11" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== -papaparse@^5.3.1: - version "5.3.1" - resolved "https://registry.npmjs.org/papaparse/-/papaparse-5.3.1.tgz" - integrity sha512-Dbt2yjLJrCwH2sRqKFFJaN5XgIASO9YOFeFP8rIBRG2Ain8mqk5r1M6DkfvqEVozVcz3r3HaUGw253hA1nLIcA== +papaparse@^5.5.3: + version "5.5.3" + resolved "https://registry.yarnpkg.com/papaparse/-/papaparse-5.5.3.tgz#07f8994dec516c6dab266e952bed68e1de59fa9a" + integrity sha512-5QvjGxYVjxO59MGU2lHVYpRWBBtKHnlIAcSe1uNFCkkptUh63NFRj0FJQm7nR67puEruUci/ZkjmEFrjCAyP4A== + +param-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" parent-module@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: callsites "^3.0.0" +parse-conflict-json@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz#67dc55312781e62aa2ddb91452c7606d1969960c" + integrity sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw== + dependencies: + json-parse-even-better-errors "^3.0.0" + just-diff "^6.0.0" + just-diff-apply "^5.2.0" + parse-json@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz" - integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" + integrity sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw== dependencies: error-ex "^1.3.1" json-parse-better-errors "^1.0.1" -parse-json@^5.0.0: +parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" - resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== dependencies: "@babel/code-frame" "^7.0.0" @@ -6649,44 +10444,48 @@ parse-json@^5.0.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" -parse5@^7.1.2: - version "7.1.2" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32" - integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw== +parse-json@^8.0.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-8.3.0.tgz#88a195a2157025139a2317a4f2f9252b61304ed5" + integrity sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ== dependencies: - entities "^4.4.0" - -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= + "@babel/code-frame" "^7.26.2" + index-to-position "^1.1.0" + type-fest "^4.39.1" -pascalcase@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/pascalcase/-/pascalcase-1.0.0.tgz" - integrity sha512-BSExi0rRnCHReJys6NocaK+cfTXNinAegfWBvr0JD3hiaEG7Nuc7r0CIdOJunXrs8gU/sbHQ9dxVAtiVQisjmg== +parse5@^7.2.1: + version "7.3.0" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.3.0.tgz#d7e224fa72399c7a175099f45fc2ad024b05ec05" + integrity sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw== + dependencies: + entities "^6.0.0" -password-prompt@^1.0.7, password-prompt@^1.1.2: - version "1.1.2" - resolved "https://registry.npmjs.org/password-prompt/-/password-prompt-1.1.2.tgz" - integrity sha512-bpuBhROdrhuN3E7G/koAju0WjVw9/uQOG5Co5mokNj0MiOSBVZS1JTwM4zl55hu0WFmIEFvO9cU9sJQiBIYeIA== +pascal-case@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== dependencies: - ansi-escapes "^3.1.0" - cross-spawn "^6.0.5" + no-case "^3.0.4" + tslib "^2.0.3" -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= +pascalcase@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-2.0.0.tgz#13515fcbfa76eddff9282827f59f7868e3cc9250" + integrity sha512-DHpENy5Qm/FaX+x3iBLoMLG/XHNCTgL+yErm1TwuVaj6u4fiOSkYkf60vGtITk7hrKHOO4uCl9vRrD4hqjNKjg== + dependencies: + camelcase "^6.2.1" -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz" - integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= +path-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/path-case/-/path-case-3.0.4.tgz#9168645334eb942658375c56f80b4c0cb5f82c6f" + integrity sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" path-exists@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== path-exists@^5.0.0: @@ -6696,24 +10495,37 @@ path-exists@^5.0.0: path-is-absolute@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== -path-key@^2.0.0, path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz" - integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= +path-is-inside@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + integrity sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w== path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" - resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== -path-parse@^1.0.6, path-parse@^1.0.7: +path-key@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-4.0.0.tgz#295588dc3aee64154f877adb9d780b81c554bf18" + integrity sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ== + +path-parse@^1.0.7: version "1.0.7" - resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== +path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-scurry@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-2.0.0.tgz#9f052289f23ad8bf9397a2a0425e7b8615c58580" @@ -6722,256 +10534,424 @@ path-scurry@^2.0.0: lru-cache "^11.0.0" minipass "^7.1.2" +path-to-regexp@3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-3.3.0.tgz#f7f31d32e8518c2660862b644414b6d5c63a611b" + integrity sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw== + path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + version "1.9.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.9.0.tgz#5dc0753acbf8521ca2e0f137b4578b917b10cf24" + integrity sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g== dependencies: isarray "0.0.1" -path-type@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz" - integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== - dependencies: - pify "^3.0.0" +path-to-regexp@^6.2.1: + version "6.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.3.0.tgz#2b6a26a337737a8e1416f9272ed0766b1c0389f4" + integrity sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ== path-type@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== +path-type@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-6.0.0.tgz#2f1bb6791a91ce99194caede5d6c5920ed81eb51" + integrity sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ== + pathval@^1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d" integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ== pend@~1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz" - integrity sha1-elfrVQpng/kRUzH89GY9XI4AelA= - -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" - integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" + integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== -picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1: - version "2.2.2" - resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz" - integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== +picocolors@^1.0.0, picocolors@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== -picomatch@^2.3.1: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -pify@^2.3.0: - version "2.3.0" - resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" - integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= +picomatch@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-3.0.1.tgz#817033161def55ec9638567a2f3bbc876b3e7516" + integrity sha512-I3EurrIQMlRc9IaAZnqRR044Phh2DXY+55o7uJ0V+hYZAcQYSuFWsc9q5PvyDHUSCe1Qxn/iBz+78s86zWnGag== -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= +picomatch@^4.0.2: + version "4.0.3" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.3.tgz#796c76136d1eead715db1e7bad785dedd695a042" + integrity sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q== -pify@^4.0.1: - version "4.0.1" - resolved "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" - integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== +pino-abstract-transport@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/pino-abstract-transport/-/pino-abstract-transport-1.2.0.tgz#97f9f2631931e242da531b5c66d3079c12c9d1b5" + integrity sha512-Guhh8EZfPCfH+PMXAb6rKOjGQEoy0xlAIn+irODG5kgfYV+BQ0rGYYWTIel3P5mmyXqkYkPmdIkywsn6QKUR1Q== + dependencies: + readable-stream "^4.0.0" + split2 "^4.0.0" + +pino-abstract-transport@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz#de241578406ac7b8a33ce0d77ae6e8a0b3b68a60" + integrity sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw== + dependencies: + split2 "^4.0.0" + +pino-pretty@^11.3.0: + version "11.3.0" + resolved "https://registry.yarnpkg.com/pino-pretty/-/pino-pretty-11.3.0.tgz#390b3be044cf3d2e9192c7d19d44f6b690468f2e" + integrity sha512-oXwn7ICywaZPHmu3epHGU2oJX4nPmKvHvB/bwrJHlGcbEWaVcotkpyVHMKLKmiVryWYByNp0jpgAcXpFJDXJzA== + dependencies: + colorette "^2.0.7" + dateformat "^4.6.3" + fast-copy "^3.0.2" + fast-safe-stringify "^2.1.1" + help-me "^5.0.0" + joycon "^3.1.1" + minimist "^1.2.6" + on-exit-leak-free "^2.1.0" + pino-abstract-transport "^2.0.0" + pump "^3.0.0" + readable-stream "^4.0.0" + secure-json-parse "^2.4.0" + sonic-boom "^4.0.1" + strip-json-comments "^3.1.1" -pkg-dir@4.2.0, pkg-dir@^4.1.0, pkg-dir@^4.2.0: +pino-std-serializers@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz#7c625038b13718dbbd84ab446bd673dc52259e3b" + integrity sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA== + +pino@^9.7.0: + version "9.7.0" + resolved "https://registry.yarnpkg.com/pino/-/pino-9.7.0.tgz#ff7cd86eb3103ee620204dbd5ca6ffda8b53f645" + integrity sha512-vnMCM6xZTb1WDmLvtG2lE/2p+t9hDEIvTWJsu6FejkE62vB7gDhvzrpFR4Cw2to+9JNQxVnkAKVPA1KPB98vWg== + dependencies: + atomic-sleep "^1.0.0" + fast-redact "^3.1.1" + on-exit-leak-free "^2.1.0" + pino-abstract-transport "^2.0.0" + pino-std-serializers "^7.0.0" + process-warning "^5.0.0" + quick-format-unescaped "^4.0.3" + real-require "^0.2.0" + safe-stable-stringify "^2.3.1" + sonic-boom "^4.0.1" + thread-stream "^3.0.0" + +pkg-dir@^4.1.0: version "4.2.0" - resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== dependencies: find-up "^4.0.0" -pkg-dir@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-7.0.0.tgz#8f0c08d6df4476756c5ff29b3282d0bab7517d11" - integrity sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA== - dependencies: - find-up "^6.3.0" +pluralize@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1" + integrity sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA== -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= +possible-typed-array-names@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz#93e3582bc0e5426586d9d07b79ee40fc841de4ae" + integrity sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg== + +postcss-selector-parser@^6.0.10: + version "6.1.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz#27ecb41fb0e3b6ba7a1ec84fff347f734c7929de" + integrity sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" postman-request@^2.88.1-postman.30: - version "2.88.1-postman.33" - resolved "https://registry.yarnpkg.com/postman-request/-/postman-request-2.88.1-postman.33.tgz#684147d61c9a263a28f148d3207b1593e0f01ec5" - integrity sha512-uL9sCML4gPH6Z4hreDWbeinKU0p0Ke261nU7OvII95NU22HN6Dk7T/SaVPaj6T4TsQqGKIFw6/woLZnH7ugFNA== + version "2.88.1-postman.42" + resolved "https://registry.yarnpkg.com/postman-request/-/postman-request-2.88.1-postman.42.tgz#b7c9b07edcd7b0d4930a2a4c53ec06c4682357e7" + integrity sha512-lepCE8QU0izagxxA31O/MHj8IUguwLlpqeVK7A8vHK401FPvN/PTIzWHm29c/L3j3kTUE7dhZbq8vvbyQ7S2Bw== dependencies: "@postman/form-data" "~3.1.1" "@postman/tough-cookie" "~4.1.3-postman.1" - "@postman/tunnel-agent" "^0.6.3" + "@postman/tunnel-agent" "^0.6.4" aws-sign2 "~0.7.0" aws4 "^1.12.0" - brotli "^1.3.3" caseless "~0.12.0" combined-stream "~1.0.6" extend "~3.0.2" forever-agent "~0.6.1" - har-validator "~5.1.3" - http-signature "~1.3.1" + http-signature "~1.4.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" mime-types "^2.1.35" oauth-sign "~0.9.0" - performance-now "^2.1.0" qs "~6.5.3" safe-buffer "^5.1.2" stream-length "^1.0.2" uuid "^8.3.2" +prebuild-install@^7.1.3: + version "7.1.3" + resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.3.tgz#d630abad2b147443f20a212917beae68b8092eec" + integrity sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug== + dependencies: + detect-libc "^2.0.0" + expand-template "^2.0.3" + github-from-package "0.0.0" + minimist "^1.2.3" + mkdirp-classic "^0.5.3" + napi-build-utils "^2.0.0" + node-abi "^3.3.0" + pump "^3.0.0" + rc "^1.2.7" + simple-get "^4.0.0" + tar-fs "^2.0.0" + tunnel-agent "^0.6.0" + prelude-ls@^1.2.1: version "1.2.1" - resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -prepend-http@^1.0.1: - version "1.0.4" - resolved "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz" - integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= +prettier@^2.8.8: + version "2.8.8" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz" - integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= +pretty-bytes@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-6.1.1.tgz#38cd6bb46f47afbf667c202cfc754bffd2016a3b" + integrity sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ== -pretty-bytes@^5.2.0: - version "5.6.0" - resolved "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz" - integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== +pretty-quick@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/pretty-quick/-/pretty-quick-3.3.1.tgz#cfde97fec77a8d201a0e0c9c71d9990e12587ee2" + integrity sha512-3b36UXfYQ+IXXqex6mCca89jC8u0mYLqFAN5eTQKoXO6oCQYcIVYZEB/5AlBHI7JPYygReM2Vv6Vom/Gln7fBg== + dependencies: + execa "^4.1.0" + find-up "^4.1.0" + ignore "^5.3.0" + mri "^1.2.0" + picocolors "^1.0.0" + picomatch "^3.0.1" + tslib "^2.6.2" + +proc-log@^4.0.0, proc-log@^4.1.0, proc-log@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/proc-log/-/proc-log-4.2.0.tgz#b6f461e4026e75fdfe228b265e9f7a00779d7034" + integrity sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA== + +proc-log@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/proc-log/-/proc-log-5.0.0.tgz#e6c93cf37aef33f835c53485f314f50ea906a9d8" + integrity sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ== -process-nextick-args@^2.0.0, process-nextick-args@~2.0.0: +process-nextick-args@~2.0.0: version "2.0.1" - resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -process-on-spawn@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz" - integrity sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg== +process-on-spawn@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/process-on-spawn/-/process-on-spawn-1.1.0.tgz#9d5999ba87b3bf0a8acb05322d69f2f5aa4fb763" + integrity sha512-JOnOPQ/8TZgjs1JIH/m9ni7FfimjNa/PRx7y/Wb5qdItsnhO0jE4AT7fC0HjC28DUQWDr50dwSYZLdRMlqDq3Q== + dependencies: + fromentries "^1.2.0" + +process-warning@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-5.0.0.tgz#566e0bf79d1dff30a72d8bbbe9e8ecefe8d378d7" + integrity sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA== + +process@^0.11.10: + version "0.11.10" + resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== + +proggy@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/proggy/-/proggy-2.0.0.tgz#154bb0e41d3125b518ef6c79782455c2c47d94e1" + integrity sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A== + +progress@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +promise-all-reject-late@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz#f8ebf13483e5ca91ad809ccc2fcf25f26f8643c2" + integrity sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw== + +promise-call-limit@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/promise-call-limit/-/promise-call-limit-3.0.2.tgz#524b7f4b97729ff70417d93d24f46f0265efa4f9" + integrity sha512-mRPQO2T1QQVw11E7+UdCJu7S61eJVWknzml9sC1heAdj1jxl0fWMBypIt9ZOcLFf8FkG995ZD7RnVk7HH72fZw== + +promise-inflight@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" + integrity sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g== + +promise-retry@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/promise-retry/-/promise-retry-2.0.1.tgz#ff747a13620ab57ba688f5fc67855410c370da22" + integrity sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g== dependencies: - fromentries "^1.2.0" + err-code "^2.0.2" + retry "^0.12.0" -progress@2.0.3, progress@^2.0.0: - version "2.0.3" - resolved "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== +prompts@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" -promise@^7.1.1: - version "7.3.1" - resolved "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz" - integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== +proper-lockfile@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/proper-lockfile/-/proper-lockfile-4.1.2.tgz#c8b9de2af6b2f1601067f98e01ac66baa223141f" + integrity sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA== dependencies: - asap "~2.0.3" + graceful-fs "^4.2.4" + retry "^0.12.0" + signal-exit "^3.0.2" -propagate@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz" - integrity sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag== +property-information@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/property-information/-/property-information-7.1.0.tgz#b622e8646e02b580205415586b40804d3e8bfd5d" + integrity sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ== + +proto-list@~1.2.1: + version "1.2.4" + resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" + integrity sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA== + +protobufjs@^7.2.5: + version "7.5.3" + resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.5.3.tgz#13f95a9e3c84669995ec3652db2ac2fb00b89363" + integrity sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/node" ">=13.7.0" + long "^5.0.0" + +proxy-agent@^6.4.0, proxy-agent@^6.5.0: + version "6.5.0" + resolved "https://registry.yarnpkg.com/proxy-agent/-/proxy-agent-6.5.0.tgz#9e49acba8e4ee234aacb539f89ed9c23d02f232d" + integrity sha512-TmatMXdr2KlRiA2CyDu8GqR8EjahTG3aY3nXjdzFyoZbmB8hrBsTyMezhULIXKnC0jpfjlmiZ3+EaCzoInSu/A== + dependencies: + agent-base "^7.1.2" + debug "^4.3.4" + http-proxy-agent "^7.0.1" + https-proxy-agent "^7.0.6" + lru-cache "^7.14.1" + pac-proxy-agent "^7.1.0" + proxy-from-env "^1.1.0" + socks-proxy-agent "^8.0.5" -proxy-from-env@1.1.0, proxy-from-env@^1.1.0: +proxy-from-env@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== - -psl@^1.1.28, psl@^1.1.33, psl@^1.8.0: - version "1.8.0" - resolved "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz" - integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== +psl@^1.1.33, psl@^1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.15.0.tgz#bdace31896f1d97cec6a79e8224898ce93d974c6" + integrity sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w== + dependencies: + punycode "^2.3.1" pump@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + version "3.0.3" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.3.tgz#151d979f1a29668dc0025ec589a455b53282268d" + integrity sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" -punycode@^2.1.0, punycode@^2.1.1: - version "2.1.1" - resolved "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +punycode.js@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode.js/-/punycode.js-2.3.1.tgz#6b53e56ad75588234e79f4affa90972c7dd8cdb7" + integrity sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA== -punycode@^2.3.1: +punycode@^2.1.0, punycode@^2.1.1, punycode@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== -pupa@^2.1.1: - version "2.1.1" - resolved "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz" - integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A== - dependencies: - escape-goat "^2.0.0" - -puppeteer@^13.5.2: - version "13.5.2" - resolved "https://registry.npmjs.org/puppeteer/-/puppeteer-13.5.2.tgz" - integrity sha512-DJAyXODBikZ3xPs8C35CtExEw78LZR9RyelGDAs0tX1dERv3OfW7qpQ9VPBgsfz+hG2HiMTO/Tyf7BuMVWsrxg== - dependencies: - cross-fetch "3.1.5" - debug "4.3.4" - devtools-protocol "0.0.969999" - extract-zip "2.0.1" - https-proxy-agent "5.0.0" - pkg-dir "4.2.0" - progress "2.0.3" - proxy-from-env "1.1.0" - rimraf "3.0.2" - tar-fs "2.1.1" - unbzip2-stream "1.4.3" - ws "8.5.0" - -qqjs@^0.3.10: - version "0.3.11" - resolved "https://registry.npmjs.org/qqjs/-/qqjs-0.3.11.tgz" - integrity sha512-pB2X5AduTl78J+xRSxQiEmga1jQV0j43jOPs/MTgTLApGFEOn6NgdE2dEjp7nvDtjkIOZbvFIojAiYUx6ep3zg== - dependencies: - chalk "^2.4.1" - debug "^4.1.1" - execa "^0.10.0" - fs-extra "^6.0.1" - get-stream "^5.1.0" - glob "^7.1.2" - globby "^10.0.1" - http-call "^5.1.2" - load-json-file "^6.2.0" - pkg-dir "^4.2.0" - tar-fs "^2.0.0" - tmp "^0.1.0" - write-json-file "^4.1.1" - -qs@^6.10.1: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== - dependencies: - side-channel "^1.0.4" - -qs@^6.9.1: - version "6.11.1" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.1.tgz#6c29dff97f0c0060765911ba65cbc9764186109f" - integrity sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ== - dependencies: - side-channel "^1.0.4" - -qs@~6.5.2, qs@~6.5.3: +pupa@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/pupa/-/pupa-3.1.0.tgz#f15610274376bbcc70c9a3aa8b505ea23f41c579" + integrity sha512-FLpr4flz5xZTSJxSeaheeMKN/EDzMdK7b8PTOC6a5PYFKTucWbdqjgqaEyH0shFiSJrVB1+Qqi4Tk19ccU6Aug== + dependencies: + escape-goat "^4.0.0" + +puppeteer-core@24.14.0: + version "24.14.0" + resolved "https://registry.yarnpkg.com/puppeteer-core/-/puppeteer-core-24.14.0.tgz#c2e3e80649b6a0c2855c3dae09c794c981c686d6" + integrity sha512-NO9XpCl+i8oB0zJp81iPhzMo2QK8/JTj4ramSvTpGCo9CPCNo4AZ8qVOGpSgXzlcOfOT3VHOkzTfPo08GOE5jA== + dependencies: + "@puppeteer/browsers" "2.10.6" + chromium-bidi "7.1.1" + debug "^4.4.1" + devtools-protocol "0.0.1464554" + typed-query-selector "^2.12.0" + ws "^8.18.3" + +puppeteer-core@^23.11.1: + version "23.11.1" + resolved "https://registry.yarnpkg.com/puppeteer-core/-/puppeteer-core-23.11.1.tgz#3e064de11b3cb3a2df1a8060ff2d05b41be583db" + integrity sha512-3HZ2/7hdDKZvZQ7dhhITOUg4/wOrDRjyK2ZBllRB0ZCOi9u0cwq1ACHDjBB+nX+7+kltHjQvBRdeY7+W0T+7Gg== + dependencies: + "@puppeteer/browsers" "2.6.1" + chromium-bidi "0.11.0" + debug "^4.4.0" + devtools-protocol "0.0.1367902" + typed-query-selector "^2.12.0" + ws "^8.18.0" + +puppeteer@>=8.0.0: + version "24.14.0" + resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-24.14.0.tgz#3f4d3123c2364e8b5c69f6cc48854620462025b5" + integrity sha512-GB7suRDkp9pUnxpNGAORICQCtw11KFbg6U2iJXVTflzJLK5D1qzq8xOOmLgN/QnDBpDMdpn96ri52XkuN83Giw== + dependencies: + "@puppeteer/browsers" "2.10.6" + chromium-bidi "7.1.1" + cosmiconfig "^9.0.0" + devtools-protocol "0.0.1464554" + puppeteer-core "24.14.0" + typed-query-selector "^2.12.0" + +qs@^6.10.1, qs@^6.10.3: + version "6.14.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.14.0.tgz#c63fa40680d2c5c941412a0e899c89af60c0a930" + integrity sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w== + dependencies: + side-channel "^1.1.0" + +qs@~6.5.3: version "6.5.3" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" - integrity "sha1-Ou7/yRln7241wOSI70b7KWq3aq0= sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==" + integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== query-string@^7.0.0: version "7.1.3" @@ -6989,34 +10969,40 @@ querystringify@^2.1.1: integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== queue-microtask@^1.2.2: - version "1.2.2" - resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.2.tgz" - integrity sha512-dB15eXv3p2jDlbOiNLyMabYg1/sXvppd8DP2J3EOCQ0AkuSXCW2tP7mnVouVLJKgUMY6yP0kcQDVpLCN13h4Xg== + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-format-unescaped@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7" + integrity sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg== + +quick-lru@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" + integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== quick-lru@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== -randomatic@^3.0.0: - version "3.1.1" - resolved "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz" - integrity sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw== - dependencies: - is-number "^4.0.0" - kind-of "^6.0.0" - math-random "^1.0.1" - randombytes@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" -rc@^1.2.8: +range-parser@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e" + integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== + +rc@1.2.8, rc@^1.2.7: version "1.2.8" - resolved "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" @@ -7024,34 +11010,40 @@ rc@^1.2.8: minimist "^1.2.0" strip-json-comments "~2.0.1" -read-chunk@^3.2.0: - version "3.2.0" - resolved "https://registry.npmjs.org/read-chunk/-/read-chunk-3.2.0.tgz" - integrity sha512-CEjy9LCzhmD7nUpJ1oVOE6s/hBkejlcJEgLQHVnQznOSilOPb+kpKktlLfFDK3/WP43+F80xkUTM2VOkYoSYvQ== +read-cmd-shim@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/read-cmd-shim/-/read-cmd-shim-4.0.0.tgz#640a08b473a49043e394ae0c7a34dd822c73b9bb" + integrity sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q== + +read-package-json-fast@^3.0.0, read-package-json-fast@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz#394908a9725dc7a5f14e70c8e7556dff1d2b1049" + integrity sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw== dependencies: - pify "^4.0.1" - with-open-file "^0.1.6" + json-parse-even-better-errors "^3.0.0" + npm-normalize-package-bin "^3.0.0" -read-pkg-up@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-5.0.0.tgz" - integrity sha512-XBQjqOBtTzyol2CpsQOw8LHV0XbDZVG7xMMjmXAJomlVY03WOBRmYgDJETlvcg0H63AJvPRwT7GFi5rvOzUOKg== +read-package-up@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/read-package-up/-/read-package-up-11.0.0.tgz#71fb879fdaac0e16891e6e666df22de24a48d5ba" + integrity sha512-MbgfoNPANMdb4oRBNg5eqLbB2t2r+o5Ua1pNt8BqGp4I0FJZhuVSOj3PaBPni4azWuSzEdNn2evevzVmEk1ohQ== dependencies: - find-up "^3.0.0" - read-pkg "^5.0.0" + find-up-simple "^1.0.0" + read-pkg "^9.0.0" + type-fest "^4.6.0" read-pkg-up@^7.0.1: version "7.0.1" - resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== dependencies: find-up "^4.1.0" read-pkg "^5.2.0" type-fest "^0.8.1" -read-pkg@^5.0.0, read-pkg@^5.2.0: +read-pkg@^5.2.0: version "5.2.0" - resolved "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== dependencies: "@types/normalize-package-data" "^2.4.0" @@ -7059,16 +11051,27 @@ read-pkg@^5.0.0, read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -"readable-stream@2 || 3", readable-stream@^3.1.1, readable-stream@^3.4.0: - version "3.6.0" - resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== +read-pkg@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-9.0.1.tgz#b1b81fb15104f5dbb121b6bbdee9bbc9739f569b" + integrity sha512-9viLL4/n1BJUCT1NXVTdS1jtm80yDEgR5T4yCelII49Mbj0v1rZdKqj7zCiYdbB0CuCgdrvHcNogAKTFPBocFA== + dependencies: + "@types/normalize-package-data" "^2.4.3" + normalize-package-data "^6.0.0" + parse-json "^8.0.0" + type-fest "^4.6.0" + unicorn-magic "^0.1.0" + +readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" util-deprecate "^1.0.1" -readable-stream@^2.0.0: +readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@~2.3.6: version "2.3.8" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== @@ -7081,27 +11084,16 @@ readable-stream@^2.0.0: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.0, readable-stream@^2.2.2, readable-stream@^2.3.5, readable-stream@~2.3.6: - version "2.3.7" - resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.6.0: - version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" - integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== +readable-stream@^4.0.0, readable-stream@^4.3.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.7.0.tgz#cedbd8a1146c13dfff8dab14068028d58c15ac91" + integrity sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg== dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" + abort-controller "^3.0.0" + buffer "^6.0.3" + events "^3.3.0" + process "^0.11.10" + string_decoder "^1.3.0" readdir-glob@^1.1.2: version "1.1.3" @@ -7110,150 +11102,135 @@ readdir-glob@^1.1.2: dependencies: minimatch "^5.1.0" -readdirp@~3.5.0: - version "3.5.0" - resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz" - integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== dependencies: picomatch "^2.2.1" +real-require@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/real-require/-/real-require-0.2.0.tgz#209632dea1810be2ae063a6ac084fee7e33fba78" + integrity sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg== + rechoir@^0.6.2: version "0.6.2" - resolved "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz" - integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q= + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== dependencies: resolve "^1.1.6" -redeyed@~2.1.0: - version "2.1.1" - resolved "https://registry.npmjs.org/redeyed/-/redeyed-2.1.1.tgz" - integrity sha1-iYS1gV2ZyyIEacme7v/jiRPmzAs= +redent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== dependencies: - esprima "~4.0.0" + indent-string "^4.0.0" + strip-indent "^3.0.0" -redis-commands@1.5.0: - version "1.5.0" - resolved "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz" - integrity sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg== +reflect.getprototypeof@^1.0.6, reflect.getprototypeof@^1.0.9: + version "1.0.10" + resolved "https://registry.yarnpkg.com/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz#c629219e78a3316d8b604c765ef68996964e7bf9" + integrity sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw== + dependencies: + call-bind "^1.0.8" + define-properties "^1.2.1" + es-abstract "^1.23.9" + es-errors "^1.3.0" + es-object-atoms "^1.0.0" + get-intrinsic "^1.2.7" + get-proto "^1.0.1" + which-builtin-type "^1.2.1" + +regex-recursion@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/regex-recursion/-/regex-recursion-5.1.1.tgz#5a73772d18adbf00f57ad097bf54171b39d78f8b" + integrity sha512-ae7SBCbzVNrIjgSbh7wMznPcQel1DNlDtzensnFxpiNpXt1U2ju/bHugH422r+4LAVS1FpW1YCwilmnNsjum9w== + dependencies: + regex "^5.1.1" + regex-utilities "^2.3.0" -redis-errors@^1.0.0, redis-errors@^1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz" - integrity sha1-62LSrbFeTq9GEMBK/hUpOEJQq60= +regex-utilities@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/regex-utilities/-/regex-utilities-2.3.0.tgz#87163512a15dce2908cf079c8960d5158ff43280" + integrity sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng== -redis-parser@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz" - integrity sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ= +regex@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/regex/-/regex-5.1.1.tgz#cf798903f24d6fe6e531050a36686e082b29bd03" + integrity sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw== dependencies: - redis-errors "^1.0.0" + regex-utilities "^2.3.0" -regenerator-runtime@^0.14.0: - version "0.14.1" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" - integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== +regexp-tree@^0.1.27: + version "0.1.27" + resolved "https://registry.yarnpkg.com/regexp-tree/-/regexp-tree-0.1.27.tgz#2198f0ef54518ffa743fe74d983b56ffd631b6cd" + integrity sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA== -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== +regexp.prototype.flags@^1.5.4: + version "1.5.4" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz#1ad6c62d44a259007e55b3970e00f746efbcaa19" + integrity sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA== dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" + call-bind "^1.0.8" + define-properties "^1.2.1" + es-errors "^1.3.0" + get-proto "^1.0.1" + gopd "^1.2.0" + set-function-name "^2.0.2" -regexpp@^3.0.0, regexpp@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz" - integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== +registry-auth-token@^5.0.2, registry-auth-token@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-5.1.0.tgz#3c659047ecd4caebd25bc1570a3aa979ae490eca" + integrity sha512-GdekYuwLXLxMuFTwAPg5UKGLW/UXzQrZvH/Zj791BQif5T05T0RsaLfHc9q3ZOKi7n+BoprPD9mJ0O0k4xzUlw== + dependencies: + "@pnpm/npm-conf" "^2.1.0" -registry-auth-token@^4.0.0: - version "4.2.1" - resolved "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz" - integrity sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw== +registry-url@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-6.0.1.tgz#056d9343680f2f64400032b1e199faa692286c58" + integrity sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q== dependencies: - rc "^1.2.8" + rc "1.2.8" -registry-url@^5.0.0: - version "5.1.0" - resolved "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz" - integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== +regjsparser@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.10.0.tgz#b1ed26051736b436f22fdec1c8f72635f9f44892" + integrity sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA== dependencies: - rc "^1.2.8" + jsesc "~0.5.0" release-zalgo@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz" - integrity sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA= + resolved "https://registry.yarnpkg.com/release-zalgo/-/release-zalgo-1.0.0.tgz#09700b7e5074329739330e535c5a90fb67851730" + integrity sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA== dependencies: es6-error "^4.0.1" -remarkable@^1.7.1: - version "1.7.4" - resolved "https://registry.npmjs.org/remarkable/-/remarkable-1.7.4.tgz" - integrity sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg== - dependencies: - argparse "^1.0.10" - autolinker "~0.28.0" - -remove-trailing-separator@^1.0.1: +remove-trailing-separator@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz" - integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= - -repeat-element@^1.1.2: - version "1.1.4" - resolved "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz" - integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== - -repeat-string@^1.5.2, repeat-string@^1.6.1: - version "1.6.1" - resolved "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= - -replace-ext@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/replace-ext/-/replace-ext-1.0.1.tgz" - integrity sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw== + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw== -request@^2.72.0: - version "2.88.2" - resolved "https://registry.npmjs.org/request/-/request-2.88.2.tgz" - integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== - dependencies: - aws-sign2 "~0.7.0" - aws4 "^1.8.0" - caseless "~0.12.0" - combined-stream "~1.0.6" - extend "~3.0.2" - forever-agent "~0.6.1" - form-data "~2.3.2" - har-validator "~5.1.3" - http-signature "~1.2.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.19" - oauth-sign "~0.9.0" - performance-now "^2.1.0" - qs "~6.5.2" - safe-buffer "^5.1.2" - tough-cookie "~2.5.0" - tunnel-agent "^0.6.0" - uuid "^3.3.2" +replace-ext@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-2.0.0.tgz#9471c213d22e1bcc26717cd6e50881d88f812b06" + integrity sha512-UszKE5KVK6JvyD92nzMn9cDapSk6w/CaFZ96CnmDMUqH9oowfxF/ZjRITD25H4DnOQClLA4/j7jLGXXLVKxAug== require-directory@^2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" - integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== require-from-string@^2.0.2: version "2.0.2" - resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== require-main-filename@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== requires-port@^1.0.0: @@ -7261,50 +11238,44 @@ requires-port@^1.0.0: resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== -resolve-alpn@^1.0.0: +requizzle@^0.2.3: + version "0.2.4" + resolved "https://registry.yarnpkg.com/requizzle/-/requizzle-0.2.4.tgz#319eb658b28c370f0c20f968fa8ceab98c13d27c" + integrity sha512-JRrFk1D4OQ4SqovXOgdav+K8EAhSB/LJZqCz8tbX0KObcdeM15Ss59ozWMBWmmINMagCwmqn4ZNryUGpBsl6Jw== + dependencies: + lodash "^4.17.21" + +resolve-alpn@^1.0.0, resolve-alpn@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== +resolve-from@5.0.0, resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + resolve-from@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== -resolve-from@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= +resolve-global@1.0.0, resolve-global@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/resolve-global/-/resolve-global-1.0.0.tgz#a2a79df4af2ca3f49bf77ef9ddacd322dad19255" + integrity sha512-zFa12V4OLtT5XUX/Q4VLvTfBf+Ok0SPc1FNGM/z9ctUdiU618qwKpWnd0CHs3+RqROfyEg/DhuHbMWYqcgljEw== + dependencies: + global-dirs "^0.1.1" -resolve@^1.1.6: - version "1.21.1" - resolved "https://registry.npmjs.org/resolve/-/resolve-1.21.1.tgz" - integrity sha512-lfEImVbnolPuaSZuLQ52cAxPBHeI77sPwCOWRdy12UG/CNa8an7oBHH1R+Fp1/mUqSJi4c8TIP6FOIPSZAUrEQ== +resolve@^1.1.6, resolve@^1.10.0, resolve@^1.22.4: + version "1.22.10" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.10.tgz#b663e83ffb09bbf2386944736baae803029b8b39" + integrity sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w== dependencies: - is-core-module "^2.8.0" + is-core-module "^2.16.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -resolve@^1.10.0, resolve@^1.17.0: - version "1.20.0" - resolved "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz" - integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== - dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" - -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz" - integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= - dependencies: - lowercase-keys "^1.0.0" - responselike@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.1.tgz#9a0bc8fdc252f3fb1cca68b016591059ba1422bc" @@ -7312,59 +11283,75 @@ responselike@^2.0.0: dependencies: lowercase-keys "^2.0.0" +responselike@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-3.0.0.tgz#20decb6c298aff0dbee1c355ca95461d42823626" + integrity sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg== + dependencies: + lowercase-keys "^3.0.0" + +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q== + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + restore-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== dependencies: onetime "^5.1.0" signal-exit "^3.0.2" -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== +restore-cursor@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-5.1.0.tgz#0766d95699efacb14150993f55baf0953ea1ebe7" + integrity sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA== + dependencies: + onetime "^7.0.0" + signal-exit "^4.1.0" -retry@^0.13.1: +retry@0.13.1, retry@^0.13.1: version "0.13.1" resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== +retry@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" + integrity sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow== + reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + version "1.1.0" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.1.0.tgz#0fe13b9522e1473f51b558ee796e08f11f9b489f" + integrity sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw== -rimraf@2, rimraf@^2.6.3: +rimraf@2: version "2.7.1" - resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" -rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: +rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" - resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: glob "^7.1.3" -rimraf@~2.4.0: - version "2.4.5" - resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz" - integrity sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto= - dependencies: - glob "^6.0.1" - -rrweb-cssom@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz#ed298055b97cbddcdeb278f904857629dec5e0e1" - integrity sha512-APM0Gt1KoXBz0iIkkdB/kfvGOwC4UuJFeG/c+yV7wSc7q96cG/kJ0HiYCnzivD9SB53cLV1MlHFNfOuPaadYSw== +rrweb-cssom@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz#3021d1b4352fbf3b614aaeed0bc0d5739abe0bc2" + integrity sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw== -run-async@^2.0.0, run-async@^2.4.0: - version "2.4.1" - resolved "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz" - integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== +run-applescript@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/run-applescript/-/run-applescript-7.0.0.tgz#e5a553c2bffd620e169d276c1cd8f1b64778fbeb" + integrity sha512-9by4Ij99JUr/MCFBUkDKLWK3G9HVXmabKz9U5MlIAIuvuzkiOicRYs8XJLxX+xahD+mLiiCYDqF9dKAgtzKP1A== run-async@^3.0.0: version "3.0.0" @@ -7373,61 +11360,77 @@ run-async@^3.0.0: run-parallel@^1.1.9: version "1.2.0" - resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== dependencies: queue-microtask "^1.2.2" -rxjs@^6.6.0: +rxjs@^6.3.3: version "6.6.7" - resolved "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== dependencies: tslib "^1.9.0" -rxjs@^7.8.1: - version "7.8.1" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" - integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== +rxjs@^7.2.0, rxjs@^7.8.1: + version "7.8.2" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.2.tgz#955bc473ed8af11a002a2be52071bf475638607b" + integrity sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA== dependencies: tslib "^2.1.0" +safe-array-concat@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.3.tgz#c9e54ec4f603b0bbb8e7e5007a5ee7aecd1538c3" + integrity sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.2" + get-intrinsic "^1.2.6" + has-symbols "^1.1.0" + isarray "^2.0.5" + safe-buffer@*, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.1" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-json-stringify@~1: - version "1.2.0" - resolved "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz" - integrity sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg== +safe-push-apply@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/safe-push-apply/-/safe-push-apply-1.0.0.tgz#01850e981c1602d398c85081f360e4e6d03d27f5" + integrity sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA== + dependencies: + es-errors "^1.3.0" + isarray "^2.0.5" -safe-regex@^1.1.0: +safe-regex-test@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.1.0.tgz#7f87dfb67a3150782eaaf18583ff5d1711ac10c1" + integrity sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw== dependencies: - ret "~0.1.10" + call-bound "^1.0.2" + es-errors "^1.3.0" + is-regex "^1.2.1" + +safe-stable-stringify@^2.3.1, safe-stable-stringify@^2.4.3: + version "2.5.0" + resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz#4ca2f8e385f2831c432a719b108a3bf7af42a1dd" + integrity sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA== "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" - resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -samsam@1.3.0: - version "1.3.0" - resolved "https://registry.npmjs.org/samsam/-/samsam-1.3.0.tgz" - integrity sha512-1HwIYD/8UlOtFS3QO3w7ey+SdSDFE4HRNLZoZRYVQefrOY3l17epswImeB1ijgJFQJodIaHcwkp3r/myBjFVbg== - sax@>=0.6.0: - version "1.2.4" - resolved "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz" - integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + version "1.4.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.1.tgz#44cc8988377f126304d3b3fc1010c733b929ef0f" + integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== saxes@^5.0.1: version "5.0.1" @@ -7443,79 +11446,119 @@ saxes@^6.0.0: dependencies: xmlchars "^2.2.0" -scoped-regex@^1.0.0: +section-matter@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/scoped-regex/-/scoped-regex-1.0.0.tgz" - integrity sha1-o0a7Gs1CB65wvXwMfKnlZra63bg= - -semver-diff@^3.1.1: - version "3.1.1" - resolved "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz" - integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg== + resolved "https://registry.yarnpkg.com/section-matter/-/section-matter-1.0.0.tgz#e9041953506780ec01d59f292a19c7b850b84167" + integrity sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA== dependencies: - semver "^6.3.0" + extend-shallow "^2.0.1" + kind-of "^6.0.0" + +secure-json-parse@^2.4.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/secure-json-parse/-/secure-json-parse-2.7.0.tgz#5a5f9cd6ae47df23dba3151edd06855d47e09862" + integrity sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw== -"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0: +"semver@2 || 3 || 4 || 5": version "5.7.2" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== -semver@^6.0.0, semver@^6.2.0, semver@^6.3.0: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity "sha1-VW0u+GiRRuRtzqS/3QlfNDTf/LQ= sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" - -semver@^7.1.3, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: +semver@7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" -semver@^7.6.0: - version "7.6.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d" - integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg== +semver@^6.0.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.1.1, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.3, semver@^7.5.4, semver@^7.6.0, semver@^7.6.3, semver@^7.7.1, semver@^7.7.2: + version "7.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.2.tgz#67d99fdcd35cec21e6f8b87a7fd515a33f982b58" + integrity sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA== + +sentence-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/sentence-case/-/sentence-case-3.0.4.tgz#3645a7b8c117c787fde8702056225bb62a45131f" + integrity sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg== dependencies: - lru-cache "^6.0.0" + no-case "^3.0.4" + tslib "^2.0.3" + upper-case-first "^2.0.2" sequin@*: version "0.1.1" - resolved "https://registry.npmjs.org/sequin/-/sequin-0.1.1.tgz" - integrity sha1-XC04nWajg3NOqvvEXt6ywcsb5wE= + resolved "https://registry.yarnpkg.com/sequin/-/sequin-0.1.1.tgz#5c2d389d66a383734eaafbc45edeb2c1cb1be701" + integrity sha512-hJWMZRwP75ocoBM+1/YaCsvS0j5MTPeBHJkS2/wruehl9xwtX30HlDF1Gt6UZ8HHHY8SJa2/IL+jo+JJCd59rA== -serialize-javascript@5.0.1: - version "5.0.1" - resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz" - integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA== +serialize-javascript@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" + integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== dependencies: randombytes "^2.1.0" +serve-handler@^6.1.3: + version "6.1.6" + resolved "https://registry.yarnpkg.com/serve-handler/-/serve-handler-6.1.6.tgz#50803c1d3e947cd4a341d617f8209b22bd76cfa1" + integrity sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ== + dependencies: + bytes "3.0.0" + content-disposition "0.5.2" + mime-types "2.1.18" + minimatch "3.1.2" + path-is-inside "1.0.2" + path-to-regexp "3.3.0" + range-parser "1.2.0" + +server-destroy@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/server-destroy/-/server-destroy-1.0.1.tgz#f13bf928e42b9c3e79383e61cc3998b5d14e6cdd" + integrity sha512-rb+9B5YBIEzYcD6x2VKidaa+cqYBJQKnU4oe4E3ANwRRN56yk/ua1YCJT1n21NTS8w6CcOclAKNP3PhdCXKYtQ== + set-blocking@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" - integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== -set-getter@^0.1.0: - version "0.1.1" - resolved "https://registry.npmjs.org/set-getter/-/set-getter-0.1.1.tgz" - integrity sha512-9sVWOy+gthr+0G9DzqqLaYNA7+5OKkSmcqjL9cBpDEaZrr3ShQlyX2cZ/O/ozE41oxn/Tt0LGEM/w4Rub3A3gw== +set-function-length@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== dependencies: - to-object-path "^0.3.0" + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" -set-value@^2.0.0, set-value@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz" - integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== +set-function-name@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.2.tgz#16a705c5a0dc2f5e638ca96d8a8cd4e1c2b90985" + integrity sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ== dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" + define-data-property "^1.1.4" + es-errors "^1.3.0" + functions-have-names "^1.2.3" + has-property-descriptors "^1.0.2" + +set-proto@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/set-proto/-/set-proto-1.0.0.tgz#0760dbcff30b2d7e801fd6e19983e56da337565e" + integrity sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw== + dependencies: + dunder-proto "^1.0.1" + es-errors "^1.3.0" + es-object-atoms "^1.0.0" -set-value@^4.0.0: +set-value@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/set-value/-/set-value-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-4.1.0.tgz#aa433662d87081b75ad88a4743bd450f044e7d09" integrity sha512-zTEg4HL0RwVrqcWs3ztF+x1vkxfm0lP+MQQFPiMJTKVceBwEV0A569Ou8l9IYQG8jOZdMVI1hGsc0tmeD2o/Lw== dependencies: is-plain-object "^2.0.4" @@ -7526,203 +11569,283 @@ setimmediate@^1.0.5, setimmediate@~1.0.4: resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== -shallow-clone@^3.0.0: - version "3.0.1" - resolved "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz" - integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== - dependencies: - kind-of "^6.0.2" - -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz" - integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= - dependencies: - shebang-regex "^1.0.0" - shebang-command@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== dependencies: shebang-regex "^3.0.0" -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz" - integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= - shebang-regex@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -shelljs@^0.8.4: +shelljs@^0.8.4, shelljs@^0.8.5: version "0.8.5" - resolved "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== dependencies: glob "^7.0.0" interpret "^1.0.0" rechoir "^0.6.2" -shx@^0.3.3: - version "0.3.3" - resolved "https://registry.npmjs.org/shx/-/shx-0.3.3.tgz" - integrity sha512-nZJ3HFWVoTSyyB+evEKjJ1STiixGztlqwKLTUNV5KqMWtGey9fTd4KU1gdZ1X9BV6215pswQ/Jew9NsuS/fNDA== +shiki@^1.16.2: + version "1.29.2" + resolved "https://registry.yarnpkg.com/shiki/-/shiki-1.29.2.tgz#5c93771f2d5305ce9c05975c33689116a27dc657" + integrity sha512-njXuliz/cP+67jU2hukkxCNuH1yUi4QfdZZY+sMr5PPrIyXSu5iTb/qYC4BiWWB0vZ+7TbdvYUCeL23zpwCfbg== + dependencies: + "@shikijs/core" "1.29.2" + "@shikijs/engine-javascript" "1.29.2" + "@shikijs/engine-oniguruma" "1.29.2" + "@shikijs/langs" "1.29.2" + "@shikijs/themes" "1.29.2" + "@shikijs/types" "1.29.2" + "@shikijs/vscode-textmate" "^10.0.1" + "@types/hast" "^3.0.4" + +side-channel-list@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/side-channel-list/-/side-channel-list-1.0.0.tgz#10cb5984263115d3b7a0e336591e290a830af8ad" + integrity sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA== dependencies: - minimist "^1.2.3" - shelljs "^0.8.4" + es-errors "^1.3.0" + object-inspect "^1.13.3" -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== +side-channel-map@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/side-channel-map/-/side-channel-map-1.0.1.tgz#d6bb6b37902c6fef5174e5f533fab4c732a26f42" + integrity sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA== dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" + call-bound "^1.0.2" + es-errors "^1.3.0" + get-intrinsic "^1.2.5" + object-inspect "^1.13.3" -signal-exit@^3.0.0, signal-exit@^3.0.2: - version "3.0.3" - resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz" - integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== +side-channel-weakmap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz#11dda19d5368e40ce9ec2bdc1fb0ecbc0790ecea" + integrity sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A== + dependencies: + call-bound "^1.0.2" + es-errors "^1.3.0" + get-intrinsic "^1.2.5" + object-inspect "^1.13.3" + side-channel-map "^1.0.1" + +side-channel@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.1.0.tgz#c3fcff9c4da932784873335ec9765fa94ff66bc9" + integrity sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw== + dependencies: + es-errors "^1.3.0" + object-inspect "^1.13.3" + side-channel-list "^1.0.0" + side-channel-map "^1.0.1" + side-channel-weakmap "^1.0.2" + +signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== signal-exit@^4.0.1, signal-exit@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== -simple-git@^3.20.0: - version "3.20.0" - resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-3.20.0.tgz#ff9c3f736d6b2bf0e3510209569d206aac84833d" - integrity sha512-ozK8tl2hvLts8ijTs18iFruE+RoqmC/mqZhjs/+V7gS5W68JpJ3+FCTmLVqmR59MaUQ52MfGQuWsIqfsTbbJ0Q== +sigstore@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/sigstore/-/sigstore-2.3.1.tgz#0755dd2cc4820f2e922506da54d3d628e13bfa39" + integrity sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ== + dependencies: + "@sigstore/bundle" "^2.3.2" + "@sigstore/core" "^1.0.0" + "@sigstore/protobuf-specs" "^0.3.2" + "@sigstore/sign" "^2.3.2" + "@sigstore/tuf" "^2.3.4" + "@sigstore/verify" "^1.2.1" + +simple-concat@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" + integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== + +simple-get@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" + integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== + dependencies: + decompress-response "^6.0.0" + once "^1.3.1" + simple-concat "^1.0.0" + +simple-git@^3.20.0, simple-git@^3.28.0: + version "3.28.0" + resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-3.28.0.tgz#c6345b2e387880f8450788a1e388573366ae48ac" + integrity sha512-Rs/vQRwsn1ILH1oBUy8NucJlXmnnLeLCfcvbSehkPzbv3wwoFWIdtfd6Ndo6ZPhlPsCZ60CPI4rxurnwAa+a2w== dependencies: "@kwsites/file-exists" "^1.1.1" "@kwsites/promise-deferred" "^1.1.1" - debug "^4.3.4" + debug "^4.4.0" -sinon@5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/sinon/-/sinon-5.1.1.tgz" - integrity sha512-h/3uHscbt5pQNxkf7Y/Lb9/OM44YNCicHakcq73ncbrIS8lXg+ZGOZbtuU+/km4YnyiCYfQQEwANaReJz7KDfw== - dependencies: - "@sinonjs/formatio" "^2.0.0" - diff "^3.5.0" - lodash.get "^4.4.2" - lolex "^2.4.2" - nise "^1.3.3" - supports-color "^5.4.0" - type-detect "^4.0.8" +simple-wcswidth@^1.0.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/simple-wcswidth/-/simple-wcswidth-1.1.2.tgz#66722f37629d5203f9b47c5477b1225b85d6525b" + integrity sha512-j7piyCjAeTDSjzTSQ7DokZtMNwNlEAyxqSZeCS+CXH7fJ4jx3FuJ/mTW3mE+6JLs4VJBbcll0Kjn+KXI5t21Iw== + +sinon@10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-10.0.0.tgz#52279f97e35646ff73d23207d0307977c9b81430" + integrity sha512-XAn5DxtGVJBlBWYrcYKEhWCz7FLwZGdyvANRyK06419hyEpdT0dMc5A8Vcxg5SCGHc40CsqoKsc1bt1CbJPfNw== + dependencies: + "@sinonjs/commons" "^1.8.1" + "@sinonjs/fake-timers" "^6.0.1" + "@sinonjs/samsam" "^5.3.1" + diff "^4.0.2" + nise "^4.1.0" + supports-color "^7.1.0" -slash@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz" - integrity sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU= +sinon@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-17.0.2.tgz#470894bcc2d24b01bad539722ea46da949892405" + integrity sha512-uihLiaB9FhzesElPDFZA7hDcNABzsVHwr3YfmM9sBllVwab3l0ltGlRV1XhpNfIacNDLGD1QRZNLs5nU5+hTuA== + dependencies: + "@sinonjs/commons" "^3.0.1" + "@sinonjs/fake-timers" "^11.2.2" + "@sinonjs/samsam" "^8.0.0" + diff "^5.2.0" + nise "^5.1.9" + supports-color "^7" -slash@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz" - integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A== +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== slash@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== -slash@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" - integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== +slash@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-5.1.0.tgz#be3adddcdf09ac38eebe8dcdc7b1a57a75b095ce" + integrity sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg== + +slice-ansi@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" + integrity sha512-up04hB2hR92PgjpyU3y/eg91yIBILyjVY26NvvciY3EVVPjybkMszMpXQ9QAkcS3I5rtJBDLoTxxg+qvW8c7rw== + +slice-ansi@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-7.1.0.tgz#cd6b4655e298a8d1bdeb04250a433094b347b9a9" + integrity sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg== + dependencies: + ansi-styles "^6.2.1" + is-fullwidth-code-point "^5.0.0" + +smart-buffer@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae" + integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== -slice-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz" - integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== +snake-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c" + integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg== dependencies: - ansi-styles "^4.0.0" - astral-regex "^2.0.0" - is-fullwidth-code-point "^3.0.0" + dot-case "^3.0.4" + tslib "^2.0.3" -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== +socks-proxy-agent@^8.0.3, socks-proxy-agent@^8.0.5: + version "8.0.5" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz#b9cdb4e7e998509d7659d689ce7697ac21645bee" + integrity sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw== dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" + agent-base "^7.1.2" + debug "^4.3.4" + socks "^2.8.3" -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== +socks@^2.8.3: + version "2.8.6" + resolved "https://registry.yarnpkg.com/socks/-/socks-2.8.6.tgz#e335486a2552f34f932f0c27d8dbb93f2be867aa" + integrity sha512-pe4Y2yzru68lXCb38aAqRf5gvN8YdjP1lok5o0J7BOHljkyCGKVz7H3vpVIXKD27rj2giOJ7DwVyk/GWrPHDWA== dependencies: - kind-of "^3.2.0" + ip-address "^9.0.5" + smart-buffer "^4.2.0" -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== +sonic-boom@^4.0.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-4.2.0.tgz#e59a525f831210fa4ef1896428338641ac1c124d" + integrity sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww== dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" + atomic-sleep "^1.0.0" -sort-array@^4.1.3: - version "4.1.4" - resolved "https://registry.npmjs.org/sort-array/-/sort-array-4.1.4.tgz" - integrity sha512-GVFN6Y1sHKrWaSYOJTk9093ZnrBMc9sP3nuhANU44S4xg3rE6W5Z5WyamuT8VpMBbssnetx5faKCua0LEmUnSw== +sort-array@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/sort-array/-/sort-array-4.1.5.tgz#64b92aaba222aec606786f4df28ae4e3e3e68313" + integrity sha512-Ya4peoS1fgFN42RN1REk2FgdNOeLIEMKFGJvs7VTP3OklF8+kl2SkpVliZ4tk/PurWsrWRsdNdU+tgyOBkB9sA== dependencies: array-back "^5.0.0" typical "^6.0.1" -sort-keys@^4.0.0: - version "4.2.0" - resolved "https://registry.npmjs.org/sort-keys/-/sort-keys-4.2.0.tgz" - integrity sha512-aUYIEU/UviqPgc8mHR6IW1EGxkAXpeRETYcrzg8cLAvUPZcpAlleSXHV2mY7G12GphSH6Gzv+4MMVSSkbdteHg== +sort-array@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/sort-array/-/sort-array-5.1.1.tgz#92f5ee092fb2cca1dc3b46eee102a0d3a3dfc944" + integrity sha512-EltS7AIsNlAFIM9cayrgKrM6XP94ATWwXP4LCL4IQbvbYhELSt2hZTrixg+AaQwnWFs/JGJgqU3rxMcNNWxGAA== + dependencies: + array-back "^6.2.2" + typical "^7.1.1" + +sort-keys@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-5.1.0.tgz#50a3f3d1ad3c5a76d043e0aeeba7299241e9aa5c" + integrity sha512-aSbHV0DaBcr7u0PVHXzM6NbZNAtrr9sF6+Qfs9UUVG7Ll3jQ6hHi8F/xqIIcn2rvIVbr0v/2zyjSdwSV47AgLQ== dependencies: - is-plain-obj "^2.0.0" + is-plain-obj "^4.0.0" sort-object-keys@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/sort-object-keys/-/sort-object-keys-1.1.3.tgz#bff833fe85cab147b34742e45863453c1e190b45" integrity sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg== -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - -source-map-url@^0.4.0: - version "0.4.1" - resolved "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz" - integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== +sort-package-json@^2.15.1: + version "2.15.1" + resolved "https://registry.yarnpkg.com/sort-package-json/-/sort-package-json-2.15.1.tgz#e5a035fad7da277b1947b9eecc93ea09c1c2526e" + integrity sha512-9x9+o8krTT2saA9liI4BljNjwAbvUnWf11Wq+i/iZt8nl2UGYnf3TH5uBydE7VALmP7AGwlfszuEeL8BDyb0YA== + dependencies: + detect-indent "^7.0.1" + detect-newline "^4.0.0" + get-stdin "^9.0.0" + git-hooks-list "^3.0.0" + is-plain-obj "^4.1.0" + semver "^7.6.0" + sort-object-keys "^1.1.3" + tinyglobby "^0.2.9" -source-map@^0.5.0, source-map@^0.5.6: - version "0.5.7" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= +source-map-support@^0.5.21: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" -source-map@^0.6.1: +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: version "0.6.1" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== +space-separated-tokens@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz#1ecd9d2350a3844572c3f4a312bceb018348859f" + integrity sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q== + spawn-wrap@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-2.0.0.tgz#103685b8b8f9b79771318827aa78650a610d457e" integrity sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg== dependencies: foreground-child "^2.0.0" @@ -7733,56 +11856,79 @@ spawn-wrap@^2.0.0: which "^2.0.1" spdx-correct@^3.0.0: - version "3.1.1" - resolved "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz" - integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== + version "3.2.0" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" + integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: - version "2.3.0" - resolved "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz" - integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== + version "2.5.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz#5d607d27fc806f66d7b64a766650fa890f04ed66" + integrity sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w== spdx-expression-parse@^3.0.0: version "3.0.1" - resolved "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" +spdx-expression-parse@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-4.0.0.tgz#a23af9f3132115465dac215c099303e4ceac5794" + integrity sha512-Clya5JIij/7C6bRR22+tnGXbc4VKlibKSVj2iHvVeX5iMW7s1SIQlqu699JkODJJIhh/pUu8L0/VLh8xflD+LQ== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + spdx-license-ids@^3.0.0: - version "3.0.7" - resolved "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz" - integrity sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ== + version "3.0.21" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz#6d6e980c9df2b6fc905343a3b2d702a6239536c3" + integrity sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg== split-on-first@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/split-on-first/-/split-on-first-1.1.0.tgz#f610afeee3b12bce1d0c30425e76398b78249a5f" integrity sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw== -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== +split2@^3.0.0, split2@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" + integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg== dependencies: - extend-shallow "^3.0.0" + readable-stream "^3.0.0" + +split2@^4.0.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4" + integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg== split@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/split/-/split-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/split/-/split-1.0.1.tgz#605bd9be303aa59fb35f9229fbea0ddec9ea07d9" integrity sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg== dependencies: through "2" +sprintf-js@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" + integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== + sprintf-js@~1.0.2: version "1.0.3" - resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" - integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +srcset@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/srcset/-/srcset-5.0.1.tgz#e660a728f195419e4afa95121099bc9efb7a1e36" + integrity sha512-/P1UYbGfJVlxZag7aABNRrulEXAwCSDo7fklafOQrantuPTDmYgijJMks2zusPCVzgW9+4P69mq7w6pYuZpgxw== -sshpk@^1.14.1: +sshpk@^1.18.0: version "1.18.0" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.18.0.tgz#1663e55cddf4d688b86a46b77f0d5fe363aba028" integrity sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ== @@ -7797,41 +11943,32 @@ sshpk@^1.14.1: safer-buffer "^2.0.2" tweetnacl "~0.14.0" -sshpk@^1.7.0: - version "1.16.1" - resolved "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz" - integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== +ssri@^10.0.0, ssri@^10.0.6: + version "10.0.6" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-10.0.6.tgz#a8aade2de60ba2bce8688e3fa349bad05c7dc1e5" + integrity sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ== dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - bcrypt-pbkdf "^1.0.0" - dashdash "^1.12.0" - ecc-jsbn "~0.1.1" - getpass "^0.1.1" - jsbn "~0.1.0" - safer-buffer "^2.0.2" - tweetnacl "~0.14.0" - -standard-as-callback@^2.0.1: - version "2.1.0" - resolved "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz" - integrity sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A== + minipass "^7.0.3" -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= +ssri@^12.0.0: + version "12.0.0" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-12.0.0.tgz#bcb4258417c702472f8191981d3c8a771fee6832" + integrity sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ== dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" + minipass "^7.0.3" -stdout-stderr@^0.1.9: - version "0.1.13" - resolved "https://registry.npmjs.org/stdout-stderr/-/stdout-stderr-0.1.13.tgz" - integrity sha512-Xnt9/HHHYfjZ7NeQLvuQDyL1LnbsbddgMFKCuaQKwGCdJm8LnstZIXop+uOY36UR1UXXoHXfMbC1KlVdVd2JLA== +stdin-discarder@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/stdin-discarder/-/stdin-discarder-0.2.2.tgz#390037f44c4ae1a1ae535c5fe38dc3aba8d997be" + integrity sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ== + +stop-iteration-iterator@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz#f481ff70a548f6124d0312c3aa14cbfa7aa542ad" + integrity sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ== dependencies: - debug "^4.1.1" - strip-ansi "^6.0.0" + es-errors "^1.3.0" + internal-slot "^1.1.0" stream-length@^1.0.2: version "1.0.2" @@ -7840,6 +11977,16 @@ stream-length@^1.0.2: dependencies: bluebird "^2.6.2" +streamx@^2.12.5, streamx@^2.15.0, streamx@^2.21.0: + version "2.22.1" + resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.22.1.tgz#c97cbb0ce18da4f4db5a971dc9ab68ff5dc7f5a5" + integrity sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA== + dependencies: + fast-fifo "^1.3.2" + text-decoder "^1.1.0" + optionalDependencies: + bare-events "^2.2.0" + strict-uri-encode@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" @@ -7847,13 +11994,8 @@ strict-uri-encode@^2.0.0: string-template@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/string-template/-/string-template-1.0.0.tgz" - integrity sha1-np8iM9wA8hhxjsN5oopWc+zKi5Y= - -string-template@~0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/string-template/-/string-template-0.2.1.tgz" - integrity sha1-QpMuWYo1LQH8IuwzZ9nYTuxsmt0= + resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96" + integrity sha512-SLqR3GBUXuoPP5MmYtD7ompvXiG87QjT6lzOszyXjTM86Uu7At7vNnt2xgyTLq5o9T4IxTYFyGxcULqpsmsfdg== "string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" @@ -7866,38 +12008,29 @@ string-template@~0.2.1: string-width@^1.0.1: version "1.0.2" - resolved "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz" - integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw== dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -"string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: +string-width@^2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" -string-width@^3.0.0: - version "3.1.0" - resolved "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz" - integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== - dependencies: - emoji-regex "^7.0.1" - is-fullwidth-code-point "^2.0.0" - strip-ansi "^5.1.0" - -string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0: - version "4.2.2" - resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz" - integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== dependencies: emoji-regex "^8.0.0" is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.0" + strip-ansi "^6.0.1" string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" @@ -7908,20 +12041,69 @@ string-width@^5.0.1, string-width@^5.1.2: emoji-regex "^9.2.2" strip-ansi "^7.0.1" -string_decoder@^1.1.1: +string-width@^7.0.0, string-width@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-7.2.0.tgz#b5bb8e2165ce275d4d43476dd2700ad9091db6dc" + integrity sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ== + dependencies: + emoji-regex "^10.3.0" + get-east-asian-width "^1.0.0" + strip-ansi "^7.1.0" + +string.prototype.trim@^1.2.10: + version "1.2.10" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz#40b2dd5ee94c959b4dcfb1d65ce72e90da480c81" + integrity sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.2" + define-data-property "^1.1.4" + define-properties "^1.2.1" + es-abstract "^1.23.5" + es-object-atoms "^1.0.0" + has-property-descriptors "^1.0.2" + +string.prototype.trimend@^1.0.9: + version "1.0.9" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz#62e2731272cd285041b36596054e9f66569b6942" + integrity sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ== + dependencies: + call-bind "^1.0.8" + call-bound "^1.0.2" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" + +string.prototype.trimstart@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz#7ee834dda8c7c17eff3118472bb35bfedaa34dde" + integrity sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" + +string_decoder@^1.1.1, string_decoder@^1.3.0: version "1.3.0" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" +stringify-entities@^4.0.0: + version "4.0.4" + resolved "https://registry.yarnpkg.com/stringify-entities/-/stringify-entities-4.0.4.tgz#b3b79ef5f277cc4ac73caeb0236c5ba939b3a4f3" + integrity sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg== + dependencies: + character-entities-html4 "^2.0.0" + character-entities-legacy "^3.0.0" + "strip-ansi-cjs@npm:strip-ansi@^6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -7929,198 +12111,187 @@ string_decoder@~1.1.1: dependencies: ansi-regex "^5.0.1" +strip-ansi@6.0.1, strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg== dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz" - integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow== dependencies: ansi-regex "^3.0.0" -strip-ansi@^5.0.0, strip-ansi@^5.1.0: - version "5.2.0" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz" - integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== - dependencies: - ansi-regex "^4.1.0" - -strip-ansi@^6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz" - integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== - dependencies: - ansi-regex "^5.0.0" - -strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-ansi@^7.0.1: +strip-ansi@^7.0.1, strip-ansi@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== dependencies: ansi-regex "^6.0.1" -strip-bom-buf@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/strip-bom-buf/-/strip-bom-buf-1.0.0.tgz" - integrity sha1-HLRar1dTD0yvhsf3UXnSyaUd1XI= +strip-bom-buf@^3.0.0, strip-bom-buf@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-bom-buf/-/strip-bom-buf-3.0.1.tgz#3f50f447f4650eed6e3743664a3964d638bb716f" + integrity sha512-iJaWw2WroigLHzQysdc5WWeUc99p7ea7AEgB6JkY8CMyiO1yTVAA1gIlJJgORElUIR+lcZJkNl1OGChMhvc2Cw== dependencies: is-utf8 "^0.2.1" -strip-bom-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/strip-bom-stream/-/strip-bom-stream-2.0.0.tgz" - integrity sha1-+H217yYT9paKpUWr/h7HKLaoKco= +strip-bom-stream@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/strip-bom-stream/-/strip-bom-stream-5.0.0.tgz#7287b857964c8884d01cee17cb9f86b53e4379a0" + integrity sha512-Yo472mU+3smhzqeKlIxClre4s4pwtYZEvDNQvY/sJpnChdaxmKuwU28UVx/v1ORKNMxkmj1GBuvxJQyBk6wYMQ== dependencies: - first-chunk-stream "^2.0.0" - strip-bom "^2.0.0" + first-chunk-stream "^5.0.0" + strip-bom-buf "^3.0.0" -strip-bom@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz" - integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= - dependencies: - is-utf8 "^0.2.0" +strip-bom-string@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92" + integrity sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g== + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== strip-bom@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== -strip-color@^0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/strip-color/-/strip-color-0.1.0.tgz" - integrity sha1-EG9l09PmotlAHKwOsM6LinArT3s= - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz" - integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= - strip-final-newline@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== -strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: +strip-final-newline@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz#52894c313fbff318835280aed60ff71ebf12b8fd" + integrity sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: version "3.1.1" - resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== strip-json-comments@~2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz" - integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== -supports-color@8.1.1: - version "8.1.1" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" +strnum@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.1.2.tgz#57bca4fbaa6f271081715dbc9ed7cee5493e28e4" + integrity sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA== + +strnum@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/strnum/-/strnum-2.1.1.tgz#cf2a6e0cf903728b8b2c4b971b7e36b4e82d46ab" + integrity sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw== + +stubborn-fs@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/stubborn-fs/-/stubborn-fs-1.2.5.tgz#e5e244223166921ddf66ed5e062b6b3bf285bfd2" + integrity sha512-H2N9c26eXjzL/S/K+i/RHHcFanE74dptvvjM8iwzwbVcWY/zjBbgRqF3K0DY4+OD+uTTASTBvDoxPDaPN02D7g== supports-color@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g== -supports-color@^5.0.0, supports-color@^5.3.0, supports-color@^5.4.0, supports-color@^5.5.0: +supports-color@^5.3.0: version "5.5.0" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" -supports-color@^7.0.0, supports-color@^7.1.0: +supports-color@^7, supports-color@^7.0.0, supports-color@^7.1.0: version "7.2.0" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: has-flag "^4.0.0" -supports-hyperlinks@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-1.0.1.tgz" - integrity sha512-HHi5kVSefKaJkGYXbDuKbUGRVxqnWGn3J2e39CYcNJEfWciGq2zYtOhXLTlvrOZW1QU7VX67w7fMmWafHX9Pfw== +supports-color@^8, supports-color@^8.1.1: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== dependencies: - has-flag "^2.0.0" - supports-color "^5.0.0" + has-flag "^4.0.0" -supports-hyperlinks@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.1.0.tgz" - integrity sha512-zoE5/e+dnEijk6ASB6/qrK+oYdm2do1hjoLWrqUC/8WEIW1gbxFcKuBof7sW8ArN6e+AYvsE8HBGiVRWL/F5CA== +supports-hyperlinks@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== dependencies: has-flag "^4.0.0" supports-color "^7.0.0" supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +symbol-observable@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" + integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== + symbol-tree@^3.2.4: version "3.2.4" resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== -synp@^1.9.10: - version "1.9.10" - resolved "https://registry.yarnpkg.com/synp/-/synp-1.9.10.tgz#53163321a600418c9b06af0db499939ffce12907" - integrity sha512-G9Z/TXTaBG1xNslUf3dHFidz/8tvvRaR560WWyOwyI7XrGGEGBTEIIg4hdRh1qFtz8mPYynAUYwWXUg/Zh0Pzw== - dependencies: - "@yarnpkg/lockfile" "^1.1.0" - bash-glob "^2.0.0" - colors "1.4.0" - commander "^7.2.0" - eol "^0.9.1" - lodash "4.17.21" - nmtree "^1.0.6" - semver "^7.3.5" - sort-object-keys "^1.1.3" - -table@^6.0.4: - version "6.0.7" - resolved "https://registry.npmjs.org/table/-/table-6.0.7.tgz" - integrity sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g== - dependencies: - ajv "^7.0.2" - lodash "^4.17.20" - slice-ansi "^4.0.0" - string-width "^4.2.0" - -taketalk@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/taketalk/-/taketalk-1.0.0.tgz" - integrity sha1-tNTw3u0gauffd1sSnqLKbeUvJt0= +table-layout@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/table-layout/-/table-layout-4.1.1.tgz#0f72965de1a5c0c1419c9ba21cae4e73a2f73a42" + integrity sha512-iK5/YhZxq5GO5z8wb0bY1317uDF3Zjpha0QFFLA8/trAoiLbQD0HUbMesEaxyzUgDxi2QlcbM8IvqOlEjgoXBA== dependencies: - get-stdin "^4.0.1" - minimist "^1.1.0" + array-back "^6.2.2" + wordwrapjs "^5.1.0" -tar-fs@2.1.1, tar-fs@^2.0.0: - version "2.1.1" - resolved "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== +tar-fs@^2.0.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.3.tgz#fb3b8843a26b6f13a08e606f7922875eb1fbbf92" + integrity sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg== dependencies: chownr "^1.1.1" mkdirp-classic "^0.5.2" pump "^3.0.0" tar-stream "^2.1.4" +tar-fs@^3.0.6, tar-fs@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.1.0.tgz#4675e2254d81410e609d91581a762608de999d25" + integrity sha512-5Mty5y/sOF1YWj1J6GiBodjlDc05CUR8PKXrsnFAiSG0xA+GHeWLovaZPYUDXkH/1iKRf2+M5+OrRgzC7O9b7w== + dependencies: + pump "^3.0.0" + tar-stream "^3.1.5" + optionalDependencies: + bare-fs "^4.0.1" + bare-path "^3.0.0" + tar-stream@^2.1.4, tar-stream@^2.2.0: version "2.2.0" - resolved "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== dependencies: bl "^4.0.3" @@ -8129,165 +12300,221 @@ tar-stream@^2.1.4, tar-stream@^2.2.0: inherits "^2.0.3" readable-stream "^3.1.1" +tar-stream@^3.1.5: + version "3.1.7" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b" + integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ== + dependencies: + b4a "^1.6.4" + fast-fifo "^1.2.0" + streamx "^2.15.0" + +tar@^6.1.11, tar@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" + integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== + dependencies: + chownr "^2.0.0" + fs-minipass "^2.0.0" + minipass "^5.0.0" + minizlib "^2.1.1" + mkdirp "^1.0.3" + yallist "^4.0.0" + +tar@^7.4.3: + version "7.4.3" + resolved "https://registry.yarnpkg.com/tar/-/tar-7.4.3.tgz#88bbe9286a3fcd900e94592cda7a22b192e80571" + integrity sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw== + dependencies: + "@isaacs/fs-minipass" "^4.0.0" + chownr "^3.0.0" + minipass "^7.1.2" + minizlib "^3.0.1" + mkdirp "^3.0.1" + yallist "^5.0.0" + +teex@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/teex/-/teex-1.0.1.tgz#b8fa7245ef8e8effa8078281946c85ab780a0b12" + integrity sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg== + dependencies: + streamx "^2.12.5" + +terminal-link@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-3.0.0.tgz#91c82a66b52fc1684123297ce384429faf72ac5c" + integrity sha512-flFL3m4wuixmf6IfhFJd1YPiLiMuxEc8uHRM1buzIeZPm22Au2pDqBJQgdo7n1WfPU1ONFGv7YDwpFBmHGF6lg== + dependencies: + ansi-escapes "^5.0.0" + supports-hyperlinks "^2.2.0" + test-exclude@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== dependencies: "@istanbuljs/schema" "^0.1.2" glob "^7.1.4" minimatch "^3.0.4" -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= +text-decoder@^1.1.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/text-decoder/-/text-decoder-1.2.3.tgz#b19da364d981b2326d5f43099c310cc80d770c65" + integrity sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA== + dependencies: + b4a "^1.6.4" -textextensions@^2.5.0: - version "2.6.0" - resolved "https://registry.npmjs.org/textextensions/-/textextensions-2.6.0.tgz" - integrity sha512-49WtAWS+tcsy93dRt6P0P3AMD2m5PvXRhuEA0kaXos5ZLlujtYmpmFsB+QvWUSxE1ZsstmYXfQ7L40+EcQgpAQ== +text-extensions@^1.0.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/text-extensions/-/text-extensions-1.9.0.tgz#1853e45fee39c945ce6f6c36b2d659b5aabc2a26" + integrity sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ== -through2@^2.0.0: - version "2.0.5" - resolved "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" - integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== - dependencies: - readable-stream "~2.3.6" - xtend "~4.0.1" +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== -through2@^3.0.0, through2@^3.0.1, through2@^3.0.2: - version "3.0.2" - resolved "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz" - integrity sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ== +textextensions@^6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/textextensions/-/textextensions-6.11.0.tgz#864535d09f49026150c96f0b0d79f1fa0869db15" + integrity sha512-tXJwSr9355kFJI3lbCkPpUH5cP8/M0GGy2xLO34aZCjMXBaK3SoPnZwr/oWmo1FdCnELcs4npdCIOFtq9W3ruQ== dependencies: - inherits "^2.0.4" - readable-stream "2 || 3" - -through@2, "through@>=2.2.7 <3", through@^2.3.6, through@^2.3.8: - version "2.3.8" - resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + editions "^6.21.0" -timed-out@^4.0.0: - version "4.0.1" - resolved "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz" - integrity sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8= +thingies@^2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/thingies/-/thingies-2.5.0.tgz#5f7b882c933b85989f8466b528a6247a6881e04f" + integrity sha512-s+2Bwztg6PhWUD7XMfeYm5qliDdSiZm7M7n8KjTkIsm3l/2lgVRc2/Gx/v+ZX8lT4FMA+i8aQvhcWylldc+ZNw== -tmp@^0.0.33: - version "0.0.33" - resolved "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz" - integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== +thread-stream@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/thread-stream/-/thread-stream-3.1.0.tgz#4b2ef252a7c215064507d4ef70c05a5e2d34c4f1" + integrity sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A== dependencies: - os-tmpdir "~1.0.2" + real-require "^0.2.0" -tmp@^0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/tmp/-/tmp-0.1.0.tgz" - integrity sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw== +through2@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764" + integrity sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw== dependencies: - rimraf "^2.6.3" + readable-stream "3" -tmp@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14" - integrity sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ== - dependencies: - rimraf "^3.0.0" +through@2, "through@>=2.2.7 <3", through@^2.3.8: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" - integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= +tiny-jsonc@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/tiny-jsonc/-/tiny-jsonc-1.0.2.tgz#208df4c437684199cc724f31c2b91ee39c349678" + integrity sha512-f5QDAfLq6zIVSyCZQZhhyl0QS6MvAyTxgz4X4x3+EoCktNWEYJ6PeoEA97fyb98njpBNNi88ybpD7m+BDFXaCw== -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= +tinyglobby@^0.2.14, tinyglobby@^0.2.9: + version "0.2.14" + resolved "https://registry.yarnpkg.com/tinyglobby/-/tinyglobby-0.2.14.tgz#5280b0cf3f972b050e74ae88406c0a6a58f4079d" + integrity sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ== dependencies: - kind-of "^3.0.2" + fdir "^6.4.4" + picomatch "^4.0.2" -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== +tldts-core@^6.1.86: + version "6.1.86" + resolved "https://registry.yarnpkg.com/tldts-core/-/tldts-core-6.1.86.tgz#a93e6ed9d505cb54c542ce43feb14c73913265d8" + integrity sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA== -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= +tldts@^6.1.32: + version "6.1.86" + resolved "https://registry.yarnpkg.com/tldts/-/tldts-6.1.86.tgz#087e0555b31b9725ee48ca7e77edc56115cd82f7" + integrity sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ== dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" + tldts-core "^6.1.86" + +tmp@0.2.5, tmp@^0.0.33, tmp@^0.2.0: + version "0.2.5" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.5.tgz#b06bcd23f0f3c8357b426891726d16015abfd8f8" + integrity "sha1-sGvNI/DzyDV7QmiRcm0WAVq/2Pg= sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==" to-regex-range@^5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - -toml@^2.3.2: - version "2.3.6" - resolved "https://registry.npmjs.org/toml/-/toml-2.3.6.tgz" - integrity sha512-gVweAectJU3ebq//Ferr2JUY4WKSDe5N+z0FvjDncLGyHmIDoxgY/2Ie4qfEIDm4IS7OA6Rmdm7pdEEdMcV/xQ== - -tough-cookie@*, tough-cookie@^4.1.3: - version "4.1.3" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" - integrity "sha1-l7mtsHKLQigKo9gUtrmZsv8DGL8= sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==" - dependencies: - psl "^1.1.33" - punycode "^2.1.1" - universalify "^0.2.0" - url-parse "^1.5.3" - -tough-cookie@~2.5.0: - version "2.5.0" - resolved "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz" - integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== +tough-cookie@*, tough-cookie@^5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-5.1.2.tgz#66d774b4a1d9e12dc75089725af3ac75ec31bed7" + integrity sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A== dependencies: - psl "^1.1.28" - punycode "^2.1.1" + tldts "^6.1.32" -tr46@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-5.0.0.tgz#3b46d583613ec7283020d79019f1335723801cec" - integrity sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g== +tr46@^5.1.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-5.1.1.tgz#96ae867cddb8fdb64a49cc3059a8d428bcf238ca" + integrity sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw== dependencies: punycode "^2.3.1" tr46@~0.0.3: version "0.0.3" - resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz" - integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== "traverse@>=0.3.0 <0.4": version "0.3.9" resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9" integrity sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ== -treeify@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/treeify/-/treeify-1.1.0.tgz" - integrity sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A== +tree-dump@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tree-dump/-/tree-dump-1.0.3.tgz#2f0e42e77354714418ed7ab44291e435ccdb0f80" + integrity sha512-il+Cv80yVHFBwokQSfd4bldvr1Md951DpgAGfmhydt04L+YzHgubm2tQ7zueWDcGENKHq0ZvGFR/hjvNXilHEg== + +treeverse@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/treeverse/-/treeverse-3.0.0.tgz#dd82de9eb602115c6ebd77a574aae67003cb48c8" + integrity sha512-gcANaAnd2QDZFmHFEOF4k7uc1J/6a6z3DJMd/QwEyxLoKGiptJRwid582r7QIsFlFMIZ3SnxfS52S4hm2DHkuQ== + +trim-lines@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/trim-lines/-/trim-lines-3.0.1.tgz#d802e332a07df861c48802c04321017b1bd87338" + integrity sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg== + +trim-newlines@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144" + integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw== + +ts-api-utils@^1.0.1, ts-api-utils@^1.3.0: + version "1.4.3" + resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.4.3.tgz#bfc2215fe6528fecab2b0fba570a2e8a4263b064" + integrity sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw== + +ts-error@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/ts-error/-/ts-error-1.0.6.tgz#277496f2a28de6c184cfce8dfd5cdd03a4e6b0fc" + integrity sha512-tLJxacIQUM82IR7JO1UUkKlYuUTmoY9HBJAmNWFzheSlDS5SPMcNIepejHJa4BpPQLAcbRhRf3GDJzyj6rbKvA== + +ts-json-schema-generator@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/ts-json-schema-generator/-/ts-json-schema-generator-1.5.1.tgz#7759c421240be86d393a884ad186f926b22332db" + integrity sha512-apX5qG2+NA66j7b4AJm8q/DpdTeOsjfh7A3LpKsUiil0FepkNwtN28zYgjrsiiya2/OPhsr/PSjX5FUYg79rCg== + dependencies: + "@types/json-schema" "^7.0.15" + commander "^12.0.0" + glob "^8.0.3" + json5 "^2.2.3" + normalize-path "^3.0.0" + safe-stable-stringify "^2.4.3" + typescript "~5.4.2" -ts-node@^10.7.0: - version "10.7.0" - resolved "https://registry.npmjs.org/ts-node/-/ts-node-10.7.0.tgz" - integrity sha512-TbIGS4xgJoX2i3do417KSaep1uRAW/Lu+WAL2doDHC0D6ummjirVOXU5/7aiZotbQ5p1Zp9tP7U6cYhA0O7M8A== +ts-node@^10.8.1, ts-node@^10.9.2: + version "10.9.2" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" + integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ== dependencies: - "@cspotcode/source-map-support" "0.7.0" + "@cspotcode/source-map-support" "^0.8.0" "@tsconfig/node10" "^1.0.7" "@tsconfig/node12" "^1.0.7" "@tsconfig/node14" "^1.0.0" @@ -8298,45 +12525,47 @@ ts-node@^10.7.0: create-require "^1.1.0" diff "^4.0.1" make-error "^1.1.1" - v8-compile-cache-lib "^3.0.0" + v8-compile-cache-lib "^3.0.1" yn "3.1.1" -ts-retry-promise@^0.6.0: - version "0.6.0" - resolved "https://registry.npmjs.org/ts-retry-promise/-/ts-retry-promise-0.6.0.tgz" - integrity sha512-8DF80uA7JPu6m8ouHxGkyBpPTIGQnsgIUgLDqcRaD7EEhVowjG72KqCX334gsa1P+AmzeTVdd/xEzVFCAuPCtg== +ts-retry-promise@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/ts-retry-promise/-/ts-retry-promise-0.8.1.tgz#ba90eb07cb03677fcbf78fe38e94c9183927e154" + integrity sha512-+AHPUmAhr5bSRRK5CurE9kNH8gZlEHnCgusZ0zy2bjfatUBDX0h6vGQjiT0YrGwSDwRZmU+bapeX6mj55FOPvg== + +tsconfig-paths@^3.15.0: + version "3.15.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" + integrity sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.2" + minimist "^1.2.6" + strip-bom "^3.0.0" -tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: +tslib@^1.9.0: version "1.14.1" - resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/tslib/-/tslib-2.1.0.tgz" - integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== - -tslib@^2.2.0, tslib@^2.3.1: - version "2.3.1" - resolved "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz" - integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== - -tslib@^2.4.0, tslib@^2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.1.tgz#0d0bfbaac2880b91e22df0768e55be9753a5b17e" - integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA== +tslib@^2.0.0, tslib@^2.0.1, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.6.2: + version "2.8.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== -tsutils@^3.17.1: - version "3.21.0" - resolved "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== +tuf-js@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tuf-js/-/tuf-js-2.2.1.tgz#fdd8794b644af1a75c7aaa2b197ddffeb2911b56" + integrity sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA== dependencies: - tslib "^1.8.1" + "@tufjs/models" "2.0.1" + debug "^4.3.4" + make-fetch-happen "^13.0.1" tunnel-agent@*, tunnel-agent@^0.6.0: version "0.6.0" - resolved "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" - integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== dependencies: safe-buffer "^5.0.1" @@ -8347,125 +12576,314 @@ tunnel@0.0.6, tunnel@^0.0.6: tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" - resolved "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" - integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" - resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: prelude-ls "^1.2.1" -type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8: +type-detect@4.0.8: version "4.0.8" - resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== -type-fest@^0.11.0: - version "0.11.0" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz" - integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ== +type-detect@^4.0.0, type-detect@^4.0.8, type-detect@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.1.0.tgz#deb2453e8f08dcae7ae98c626b13dddb0155906c" + integrity sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw== + +type-fest@^0.18.0: + version "0.18.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.18.1.tgz#db4bc151a4a2cf4eebf9add5db75508db6cc841f" + integrity sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw== type-fest@^0.20.2: version "0.20.2" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== type-fest@^0.21.3: version "0.21.3" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-fest@^0.6.0: version "0.6.0" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== type-fest@^0.8.0, type-fest@^0.8.1: version "0.8.1" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== -typed-rest-client@^1.8.4: - version "1.8.9" - resolved "https://registry.yarnpkg.com/typed-rest-client/-/typed-rest-client-1.8.9.tgz#e560226bcadfe71b0fb5c416b587f8da3b8f92d8" - integrity sha512-uSmjE38B80wjL85UFX3sTYEUlvZ1JgCRhsWj/fJ4rZ0FqDUFoIuodtiVeE+cUqiVTOKPdKrp/sdftD15MDek6g== +type-fest@^1.0.2: + version "1.4.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-1.4.0.tgz#e9fb813fe3bf1744ec359d55d1affefa76f14be1" + integrity sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA== + +type-fest@^2.13.0: + version "2.19.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" + integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== + +type-fest@^4.18.2, type-fest@^4.21.0, type-fest@^4.39.1, type-fest@^4.6.0: + version "4.41.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-4.41.0.tgz#6ae1c8e5731273c2bf1f58ad39cbae2c91a46c58" + integrity sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA== + +typed-array-buffer@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz#a72395450a4869ec033fd549371b47af3a2ee536" + integrity sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw== + dependencies: + call-bound "^1.0.3" + es-errors "^1.3.0" + is-typed-array "^1.1.14" + +typed-array-byte-length@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz#8407a04f7d78684f3d252aa1a143d2b77b4160ce" + integrity sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg== + dependencies: + call-bind "^1.0.8" + for-each "^0.3.3" + gopd "^1.2.0" + has-proto "^1.2.0" + is-typed-array "^1.1.14" + +typed-array-byte-offset@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz#ae3698b8ec91a8ab945016108aef00d5bff12355" + integrity sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ== + dependencies: + available-typed-arrays "^1.0.7" + call-bind "^1.0.8" + for-each "^0.3.3" + gopd "^1.2.0" + has-proto "^1.2.0" + is-typed-array "^1.1.15" + reflect.getprototypeof "^1.0.9" + +typed-array-length@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.7.tgz#ee4deff984b64be1e118b0de8c9c877d5ce73d3d" + integrity sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg== + dependencies: + call-bind "^1.0.7" + for-each "^0.3.3" + gopd "^1.0.1" + is-typed-array "^1.1.13" + possible-typed-array-names "^1.0.0" + reflect.getprototypeof "^1.0.6" + +typed-query-selector@^2.12.0: + version "2.12.0" + resolved "https://registry.yarnpkg.com/typed-query-selector/-/typed-query-selector-2.12.0.tgz#92b65dbc0a42655fccf4aeb1a08b1dddce8af5f2" + integrity sha512-SbklCd1F0EiZOyPiW192rrHZzZ5sBijB6xM+cpmrwDqObvdtunOHHIk9fCGsoK5JVIYXoyEp4iEdE3upFH3PAg== + +typed-rest-client@2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/typed-rest-client/-/typed-rest-client-2.1.0.tgz#f04c6cfcabc6012c2d036b806eaac455604f1598" + integrity sha512-Nel9aPbgSzRxfs1+4GoSB4wexCF+4Axlk7OSGVQCMa+4fWcyxIsN/YNmkp0xTT2iQzMD98h8yFLav/cNaULmRA== dependencies: - qs "^6.9.1" + des.js "^1.1.0" + js-md4 "^0.3.2" + qs "^6.10.3" tunnel "0.0.6" underscore "^1.12.1" typedarray-to-buffer@^3.1.5: version "3.1.5" - resolved "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== dependencies: is-typedarray "^1.0.0" -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz" - integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= - -typescript@^5.5.2: - version "5.5.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507" - integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew== +typedoc-plugin-missing-exports@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/typedoc-plugin-missing-exports/-/typedoc-plugin-missing-exports-3.1.0.tgz#cab4952c19cae1ab3f91cbbf2d7d17564682b023" + integrity sha512-Sogbaj+qDa21NjB3SlIw4JXSwmcl/WOjwiPNaVEcPhpNG/MiRTtpwV81cT7h1cbu9StpONFPbddYWR0KV/fTWA== + +typedoc@^0.26.5: + version "0.26.11" + resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.26.11.tgz#124b43a5637b7f3237b8c721691b44738c5c9dc9" + integrity sha512-sFEgRRtrcDl2FxVP58Ze++ZK2UQAEvtvvH8rRlig1Ja3o7dDaMHmaBfvJmdGnNEFaLTpQsN8dpvZaTqJSu/Ugw== + dependencies: + lunr "^2.3.9" + markdown-it "^14.1.0" + minimatch "^9.0.5" + shiki "^1.16.2" + yaml "^2.5.1" + +"typescript@^4.6.4 || ^5.2.2", typescript@^5.5.4, typescript@^5.9.2: + version "5.9.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.2.tgz#d93450cddec5154a2d5cabe3b8102b83316fb2a6" + integrity sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A== + +typescript@~5.4.2: + version "5.4.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611" + integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== typical@^6.0.1: version "6.0.1" - resolved "https://registry.npmjs.org/typical/-/typical-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/typical/-/typical-6.0.1.tgz#89bd1a6aa5e5e96fa907fb6b7579223bff558a06" integrity sha512-+g3NEp7fJLe9DPa1TArHm9QAA7YciZmWnfAqEaFrBihQ7epOv9i99rjtgb6Iz0wh3WuQDjsCTDfgRoGnmHN81A== -unbzip2-stream@1.4.3: +typical@^7.1.1, typical@^7.2.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/typical/-/typical-7.3.0.tgz#930376be344228709f134613911fa22aa09617a4" + integrity sha512-ya4mg/30vm+DOWfBg4YK3j2WD6TWtRkCbasOJr40CseYENzCUby/7rIvXA99JGsQHeNxLbnXdyLLxKSv3tauFw== + +uc.micro@^2.0.0, uc.micro@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-2.1.0.tgz#f8d3f7d0ec4c3dea35a7e3c8efa4cb8b45c9e7ee" + integrity sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A== + +uglify-js@^3.1.4: + version "3.19.3" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.19.3.tgz#82315e9bbc6f2b25888858acd1fff8441035b77f" + integrity sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ== + +unbox-primitive@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.1.0.tgz#8d9d2c9edeea8460c7f35033a88867944934d1e2" + integrity sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw== + dependencies: + call-bound "^1.0.3" + has-bigints "^1.0.2" + has-symbols "^1.1.0" + which-boxed-primitive "^1.1.1" + +unbzip2-stream@^1.4.3: version "1.4.3" - resolved "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz" + resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== dependencies: buffer "^5.2.1" through "^2.3.8" -underscore@^1.12.1: - version "1.13.6" - resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.6.tgz#04786a1f589dc6c09f761fc5f45b89e935136441" - integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A== +underscore@^1.12.1, underscore@~1.13.2: + version "1.13.7" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.7.tgz#970e33963af9a7dda228f17ebe8399e5fbe63a10" + integrity sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g== undici-types@~5.26.4: version "5.26.5" resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== -undici-types@~6.19.2: - version "6.19.8" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02" - integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== +undici-types@~6.21.0: + version "6.21.0" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.21.0.tgz#691d00af3909be93a7faa13be61b3a5b50ef12cb" + integrity sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ== -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== +undici-types@~7.8.0: + version "7.8.0" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-7.8.0.tgz#de00b85b710c54122e44fbfd911f8d70174cd294" + integrity sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw== + +undici@^5.25.4, undici@^5.28.5: + version "5.29.0" + resolved "https://registry.yarnpkg.com/undici/-/undici-5.29.0.tgz#419595449ae3f2cdcba3580a2e8903399bd1f5a3" + integrity sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg== dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" + "@fastify/busboy" "^2.0.0" -unique-string@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== +unicorn-magic@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/unicorn-magic/-/unicorn-magic-0.1.0.tgz#1bb9a51c823aaf9d73a8bfcd3d1a23dde94b0ce4" + integrity sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ== + +unicorn-magic@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/unicorn-magic/-/unicorn-magic-0.3.0.tgz#4efd45c85a69e0dd576d25532fbfa22aa5c8a104" + integrity sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA== + +unique-filename@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-3.0.0.tgz#48ba7a5a16849f5080d26c760c86cf5cf05770ea" + integrity sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g== dependencies: - crypto-random-string "^2.0.0" + unique-slug "^4.0.0" -universal-user-agent@^6.0.0: +unique-filename@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-4.0.0.tgz#a06534d370e7c977a939cd1d11f7f0ab8f1fed13" + integrity sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ== + dependencies: + unique-slug "^5.0.0" + +unique-slug@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-4.0.0.tgz#6bae6bb16be91351badd24cdce741f892a6532e3" + integrity sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ== + dependencies: + imurmurhash "^0.1.4" + +unique-slug@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-5.0.0.tgz#ca72af03ad0dbab4dad8aa683f633878b1accda8" + integrity sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg== + dependencies: + imurmurhash "^0.1.4" + +unist-util-is@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.0.tgz#3381f8503b251c0d9cd21bc1de939ec9df5480ee" - integrity sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w== + resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-6.0.0.tgz#b775956486aff107a9ded971d996c173374be424" + integrity sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-position@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-5.0.0.tgz#678f20ab5ca1207a97d7ea8a388373c9cf896be4" + integrity sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-stringify-position@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz#449c6e21a880e0855bf5aabadeb3a740314abac2" + integrity sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-visit-parents@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz#4d5f85755c3b8f0dc69e21eca5d6d82d22162815" + integrity sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw== + dependencies: + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + +unist-util-visit@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-5.0.0.tgz#a7de1f31f72ffd3519ea71814cccf5fd6a9217d6" + integrity sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg== + dependencies: + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + unist-util-visit-parents "^6.0.0" + +universal-user-agent@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" + integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== + +universal-user-agent@^7.0.0, universal-user-agent@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-7.0.3.tgz#c05870a58125a2dc00431f2df815a77fe69736be" + integrity sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A== universalify@^0.1.0: version "0.1.2" - resolved "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== universalify@^0.2.0: @@ -8474,27 +12892,14 @@ universalify@^0.2.0: integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== universalify@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz" - integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== - -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -untildify@^3.0.3: - version "3.0.3" - resolved "https://registry.npmjs.org/untildify/-/untildify-3.0.3.tgz" - integrity sha512-iSk/J8efr8uPT/Z4eSUywnqyrQU7DSdMfdqK4iWEaUVVmcP5JcnpRqmVMwcwcnmI1ATFNgC5V90u09tBynNFKA== - -unzip-response@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/unzip-response/-/unzip-response-2.0.1.tgz" - integrity sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c= + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d" + integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== + +untildify@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/untildify/-/untildify-5.0.0.tgz#3e40c5dd67518cb8582a027b78d3c2747234f921" + integrity sha512-bOgQLUnd2G5rhzaTvh1VCI9Fo6bC5cLTpH17T5aFfamyXFYDbbdzN6IXdeoc3jBS7T9hNTmJtYUzJCJ2Xlc9gA== unzipper@^0.10.11: version "0.10.14" @@ -8512,51 +12917,50 @@ unzipper@^0.10.11: readable-stream "~2.3.6" setimmediate "~1.0.4" -update-notifier@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz" - integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw== +update-browserslist-db@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz#348377dd245216f9e7060ff50b15a1b740b75420" + integrity sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw== dependencies: - boxen "^5.0.0" - chalk "^4.1.0" - configstore "^5.0.1" - has-yarn "^2.1.0" - import-lazy "^2.1.0" - is-ci "^2.0.0" - is-installed-globally "^0.4.0" - is-npm "^5.0.0" - is-yarn-global "^0.3.0" - latest-version "^5.1.0" - pupa "^2.1.1" - semver "^7.3.4" - semver-diff "^3.1.1" - xdg-basedir "^4.0.0" + escalade "^3.2.0" + picocolors "^1.1.1" -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== +update-notifier@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-7.3.1.tgz#49af1ad6acfa0ea01c0d0f3c04047c154ead7096" + integrity sha512-+dwUY4L35XFYEzE+OAL3sarJdUioVovq+8f7lcIJ7wnmnYQV5UD1Y/lcwaMSyaQ6Bj3JMj1XSTjZbNLHn/19yA== + dependencies: + boxen "^8.0.1" + chalk "^5.3.0" + configstore "^7.0.0" + is-in-ci "^1.0.0" + is-installed-globally "^1.0.0" + is-npm "^6.0.0" + latest-version "^9.0.0" + pupa "^3.1.0" + semver "^7.6.3" + xdg-basedir "^5.1.0" + +upper-case-first@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/upper-case-first/-/upper-case-first-2.0.2.tgz#992c3273f882abd19d1e02894cc147117f844324" + integrity sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg== dependencies: - punycode "^2.1.0" - -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= + tslib "^2.0.3" -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz" - integrity sha1-evjzA2Rem9eaJy56FKxovAYJ2nM= +upper-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-2.0.2.tgz#d89810823faab1df1549b7d97a76f8662bae6f7a" + integrity sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg== dependencies: - prepend-http "^1.0.1" + tslib "^2.0.3" -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz" - integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: - prepend-http "^2.0.0" + punycode "^2.1.0" url-parse@^1.5.3: version "1.5.10" @@ -8571,75 +12975,98 @@ url-template@^2.0.8: resolved "https://registry.yarnpkg.com/url-template/-/url-template-2.0.8.tgz#fc565a3cccbff7730c775f5641f9555791439f21" integrity sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw== -use@^3.1.0: - version "3.1.1" - resolved "https://registry.npmjs.org/use/-/use-3.1.1.tgz" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - -util-deprecate@^1.0.1, util-deprecate@~1.0.1: +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: version "1.0.2" - resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== -uuid@^3.3.2, uuid@^3.3.3: - version "3.4.0" - resolved "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== +uuid@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-10.0.0.tgz#5a95aa454e6e002725c79055fd42aaba30ca6294" + integrity sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ== + +uuid@^11.0.0, uuid@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-11.1.0.tgz#9549028be1753bb934fc96e2bca09bb4105ae912" + integrity sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A== uuid@^8.3.0, uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -v8-compile-cache-lib@^3.0.0: +uuid@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" + integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== + +v8-compile-cache-lib@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== -v8-compile-cache@^2.0.3: - version "2.3.0" - resolved "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz" - integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== - -validate-npm-package-license@^3.0.1: +validate-npm-package-license@^3.0.1, validate-npm-package-license@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" +validate-npm-package-name@^5.0.0, validate-npm-package-name@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz#a316573e9b49f3ccd90dbb6eb52b3f06c6d604e8" + integrity sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ== + verror@1.10.0: version "1.10.0" - resolved "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz" - integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" -vinyl-file@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/vinyl-file/-/vinyl-file-3.0.0.tgz" - integrity sha1-sQTZ5ECf+jJfqt1SBkLQo7SIs2U= +version-range@^4.13.0: + version "4.14.0" + resolved "https://registry.yarnpkg.com/version-range/-/version-range-4.14.0.tgz#91c12e4665756a9101d1af43faeda399abe0edec" + integrity sha512-gjb0ARm9qlcBAonU4zPwkl9ecKkas+tC2CGwFfptTCWWIVTWY1YUbT2zZKsOAF1jR/tNxxyLwwG0cb42XlYcTg== + +vfile-message@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-4.0.2.tgz#c883c9f677c72c166362fd635f21fc165a7d1181" + integrity sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw== dependencies: - graceful-fs "^4.1.2" - pify "^2.3.0" - strip-bom-buf "^1.0.0" - strip-bom-stream "^2.0.0" - vinyl "^2.0.1" + "@types/unist" "^3.0.0" + unist-util-stringify-position "^4.0.0" -vinyl@^2.0.1, vinyl@^2.2.0, vinyl@^2.2.1: - version "2.2.1" - resolved "https://registry.npmjs.org/vinyl/-/vinyl-2.2.1.tgz" - integrity sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw== +vfile@^6.0.0: + version "6.0.3" + resolved "https://registry.yarnpkg.com/vfile/-/vfile-6.0.3.tgz#3652ab1c496531852bf55a6bac57af981ebc38ab" + integrity sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q== + dependencies: + "@types/unist" "^3.0.0" + vfile-message "^4.0.0" + +vinyl-file@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/vinyl-file/-/vinyl-file-5.0.0.tgz#a051f79a8a0610f21ee69279be42ccfd6065d082" + integrity sha512-MvkPF/yA1EX7c6p+juVIvp9+Lxp70YUfNKzEWeHMKpUNVSnTZh2coaOqLxI0pmOe2V9nB+OkgFaMDkodaJUyGw== + dependencies: + "@types/vinyl" "^2.0.7" + strip-bom-buf "^3.0.1" + strip-bom-stream "^5.0.0" + vinyl "^3.0.0" + +vinyl@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-3.0.1.tgz#5f5ff85255bda2b5da25e4b3bd80b3fc077fb5a9" + integrity sha512-0QwqXteBNXgnLCdWdvPQBX6FXRHtIH3VhJPTd5Lwn28tJXc34YqSCWUmkOvtJHBmB3gGoPtrOKk3Ts8/kEZ9aA== dependencies: - clone "^2.1.1" - clone-buffer "^1.0.0" - clone-stats "^1.0.0" - cloneable-readable "^1.0.0" - remove-trailing-separator "^1.0.1" - replace-ext "^1.0.0" + clone "^2.1.2" + remove-trailing-separator "^1.1.0" + replace-ext "^2.0.0" + teex "^1.0.1" w3c-xmlserializer@^5.0.0: version "5.0.0" @@ -8648,32 +13075,51 @@ w3c-xmlserializer@^5.0.0: dependencies: xml-name-validator "^5.0.0" -wcwidth@^1.0.0, wcwidth@^1.0.1: +walk-back@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/walk-back/-/walk-back-2.0.1.tgz#554e2a9d874fac47a8cb006bf44c2f0c4998a0a4" + integrity sha512-Nb6GvBR8UWX1D+Le+xUq0+Q1kFmRBIWVrfLnQAOmcpEzA9oAxwJ9gIr36t9TWYfzvWRvuMtjHiVsJYEkXWaTAQ== + +walk-back@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/walk-back/-/walk-back-5.1.1.tgz#80045191b3b3a05a8e3cc6fca066a2e495230d93" + integrity sha512-e/FRLDVdZQWFrAzU6Hdvpm7D7m2ina833gIKLptQykRK49mmCYHLHq7UqjPDbxbKLZkTkW1rFqbengdE3sLfdw== + +walk-up-path@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/walk-up-path/-/walk-up-path-3.0.1.tgz#c8d78d5375b4966c717eb17ada73dbd41490e886" + integrity sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA== + +wcwidth@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz" - integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== dependencies: defaults "^1.0.3" +weaviate-client@^3.5.2: + version "3.7.0" + resolved "https://registry.yarnpkg.com/weaviate-client/-/weaviate-client-3.7.0.tgz#cf11ccbc90ee1849869d6bebb79ec4a33056b722" + integrity sha512-q888bkHWJBdPgPHFoIc2GiFig42WbxHOgiSBVYYQUqJGA9VszaOpU/7Ip1fVATyyVjhjzRDvMebmRltirvpfMg== + dependencies: + abort-controller-x "^0.4.3" + graphql "^16.11.0" + graphql-request "^6.1.0" + long "^5.3.2" + nice-grpc "^2.1.12" + nice-grpc-client-middleware-retry "^3.1.11" + nice-grpc-common "^2.0.2" + uuid "^9.0.1" + web-streams-polyfill@4.0.0-beta.3: version "4.0.0-beta.3" resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38" integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug== -web-streams-polyfill@^3.0.3: - version "3.2.0" - resolved "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.0.tgz" - integrity sha512-EqPmREeOzttaLRm5HS7io98goBgZ7IVz79aDvqjD0kYXLtFZTc0T/U6wHTPKyIjb+MdN7DFIIX6hgdBEpWmfPA== - -web-streams-polyfill@^3.2.1: - version "3.3.3" - resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz#2073b91a2fdb1fbfbd401e7de0ac9f8214cecb4b" - integrity sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw== - webidl-conversions@^3.0.0: version "3.0.1" - resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" - integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== webidl-conversions@^7.0.0: version "7.0.0" @@ -8682,7 +13128,7 @@ webidl-conversions@^7.0.0: websocket-driver@>=0.5.1: version "0.7.4" - resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" + resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== dependencies: http-parser-js ">=0.5.1" @@ -8691,7 +13137,7 @@ websocket-driver@>=0.5.1: websocket-extensions@>=0.1.1: version "0.1.4" - resolved "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz" + resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== whatwg-encoding@^3.1.1: @@ -8701,85 +13147,176 @@ whatwg-encoding@^3.1.1: dependencies: iconv-lite "0.6.3" +whatwg-fetch@^3.6.20: + version "3.6.20" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz#580ce6d791facec91d37c72890995a0b48d31c70" + integrity sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg== + whatwg-mimetype@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz#bc1bf94a985dc50388d54a9258ac405c3ca2fc0a" integrity sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg== -whatwg-url@^14.0.0: - version "14.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-14.0.0.tgz#00baaa7fd198744910c4b1ef68378f2200e4ceb6" - integrity sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw== +whatwg-url@^14.0.0, whatwg-url@^14.1.1: + version "14.2.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-14.2.0.tgz#4ee02d5d725155dae004f6ae95c73e7ef5d95663" + integrity sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw== dependencies: - tr46 "^5.0.0" + tr46 "^5.1.0" webidl-conversions "^7.0.0" whatwg-url@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz" - integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== dependencies: tr46 "~0.0.3" webidl-conversions "^3.0.0" +when-exit@^2.1.1: + version "2.1.4" + resolved "https://registry.yarnpkg.com/when-exit/-/when-exit-2.1.4.tgz#e2a0e998f7ad67eb0d2ce37e9794386663cc96f7" + integrity sha512-4rnvd3A1t16PWzrBUcSDZqcAmsUIy4minDXT/CZ8F2mVDgd65i4Aalimgz1aQkRGU0iH5eT5+6Rx2TK8o443Pg== + +which-boxed-primitive@^1.1.0, which-boxed-primitive@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz#d76ec27df7fa165f18d5808374a5fe23c29b176e" + integrity sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA== + dependencies: + is-bigint "^1.1.0" + is-boolean-object "^1.2.1" + is-number-object "^1.1.1" + is-string "^1.1.1" + is-symbol "^1.1.1" + +which-builtin-type@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/which-builtin-type/-/which-builtin-type-1.2.1.tgz#89183da1b4907ab089a6b02029cc5d8d6574270e" + integrity sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q== + dependencies: + call-bound "^1.0.2" + function.prototype.name "^1.1.6" + has-tostringtag "^1.0.2" + is-async-function "^2.0.0" + is-date-object "^1.1.0" + is-finalizationregistry "^1.1.0" + is-generator-function "^1.0.10" + is-regex "^1.2.1" + is-weakref "^1.0.2" + isarray "^2.0.5" + which-boxed-primitive "^1.1.0" + which-collection "^1.0.2" + which-typed-array "^1.1.16" + +which-collection@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.2.tgz#627ef76243920a107e7ce8e96191debe4b16c2a0" + integrity sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw== + dependencies: + is-map "^2.0.3" + is-set "^2.0.3" + is-weakmap "^2.0.2" + is-weakset "^2.0.3" + which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz" - integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= + version "2.0.1" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.1.tgz#776b1fe35d90aebe99e8ac15eb24093389a4a409" + integrity sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ== -which@2.0.2, which@^2.0.1, which@^2.0.2: +which-package-manager@^0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/which-package-manager/-/which-package-manager-0.0.1.tgz#90cd7da6bdc5944a897f4e418ab6659b27299efd" + integrity sha512-a+bCExXd8OdYky5J59nimHxTCRPhxZSQtwKh3Ew6lpC4oY9f3KH77XDxcPrComVhSEPtvMjZigS2vZgZfgJuxA== + dependencies: + execa "^7.1.1" + find-up "^6.3.0" + micromatch "^4.0.5" + +which-typed-array@^1.1.16, which-typed-array@^1.1.19: + version "1.1.19" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.19.tgz#df03842e870b6b88e117524a4b364b6fc689f956" + integrity sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw== + dependencies: + available-typed-arrays "^1.0.7" + call-bind "^1.0.8" + call-bound "^1.0.4" + for-each "^0.3.5" + get-proto "^1.0.1" + gopd "^1.2.0" + has-tostringtag "^1.0.2" + +which@^2.0.1: version "2.0.2" - resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" -which@^1.2.9: - version "1.3.1" - resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -wide-align@1.1.3: - version "1.1.3" - resolved "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz" - integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== +which@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/which/-/which-4.0.0.tgz#cd60b5e74503a3fbcfbf6cd6b4138a8bae644c1a" + integrity sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg== dependencies: - string-width "^1.0.2 || 2" + isexe "^3.1.1" -widest-line@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/widest-line/-/widest-line-2.0.1.tgz" - integrity sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA== +which@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/which/-/which-5.0.0.tgz#d93f2d93f79834d4363c7d0c23e00d07c466c8d6" + integrity sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ== dependencies: - string-width "^2.1.1" + isexe "^3.1.1" widest-line@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca" integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== dependencies: string-width "^4.0.0" -with-open-file@^0.1.6: - version "0.1.7" - resolved "https://registry.npmjs.org/with-open-file/-/with-open-file-0.1.7.tgz" - integrity sha512-ecJS2/oHtESJ1t3ZfMI3B7KIDKyfN0O16miWxdn30zdh66Yd3LsRFebXZXq6GU4xfxLf6nVxp9kIqElb5fqczA== +widest-line@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-4.0.1.tgz#a0fc673aaba1ea6f0a0d35b3c2795c9a9cc2ebf2" + integrity sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig== dependencies: - p-finally "^1.0.0" - p-try "^2.1.0" - pify "^4.0.1" + string-width "^5.0.1" + +widest-line@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-5.0.0.tgz#b74826a1e480783345f0cd9061b49753c9da70d0" + integrity sha512-c9bZp7b5YtRj2wOe6dlj32MK+Bx/M/d+9VB2SHM1OtsUHR0aV0tdP6DWh/iMt0kWi1t5g1Iudu6hQRNd1A4PVA== + dependencies: + string-width "^7.0.0" -word-wrap@^1.2.3: +wireit@^0.14.5: + version "0.14.12" + resolved "https://registry.yarnpkg.com/wireit/-/wireit-0.14.12.tgz#c35788b4be4a796a8d05d204ec7d3f5c4b355d71" + integrity sha512-gNSd+nZmMo6cuICezYXRIayu6TSOeCSCDzjSF0q6g8FKDsRbdqrONrSZYzdk/uBISmRcv4vZtsno6GyGvdXwGA== + dependencies: + brace-expansion "^4.0.0" + chokidar "^3.5.3" + fast-glob "^3.2.11" + jsonc-parser "^3.0.0" + proper-lockfile "^4.1.2" + +word-wrap@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" - integrity "sha1-0sRcbdT7zmIaZvE2y+Mor9BBCzQ= sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==" + integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== -workerpool@6.1.0: - version "6.1.0" - resolved "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz" - integrity sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg== +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== + +wordwrapjs@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/wordwrapjs/-/wordwrapjs-5.1.0.tgz#4c4d20446dcc670b14fa115ef4f8fd9947af2b3a" + integrity sha512-JNjcULU2e4KJwUNv6CHgI46UvDGitb6dGryHajXTDiLgg1/RiGoPSDw4kZfYnwGtEXf2ZMeIewDQgFGzkCB2Sg== + +workerpool@^6.5.1: + version "6.5.1" + resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.5.1.tgz#060f73b39d0caf97c6db64da004cd01b4c099544" + integrity sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA== "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": version "7.0.0" @@ -8790,26 +13327,17 @@ workerpool@6.1.0: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^2.0.0: - version "2.1.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz" - integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= - dependencies: - string-width "^1.0.1" - strip-ansi "^3.0.1" - -wrap-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-4.0.0.tgz" - integrity sha512-uMTsj9rDb0/7kk1PbcbCcwvHUxp60fGDB/NNXpVa0Q+ic/e7y5+BwTxKfQ33VYgDppSwi/FBzpetYzo8s6tfbg== +wrap-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-3.0.1.tgz#288a04d87eda5c286e060dfe8f135ce8d007f8ba" + integrity sha512-iXR3tDXpbnTpzjKSylUJRkLuOrEC7hwEB221cgn6wtF8wpmz28puFXAEfPT5zrjM3wahygB//VuWEr1vTkDcNQ== dependencies: - ansi-styles "^3.2.0" string-width "^2.1.1" strip-ansi "^4.0.0" wrap-ansi@^6.2.0: version "6.2.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== dependencies: ansi-styles "^4.0.0" @@ -8818,7 +13346,7 @@ wrap-ansi@^6.2.0: wrap-ansi@^7.0.0: version "7.0.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== dependencies: ansi-styles "^4.0.0" @@ -8834,14 +13362,23 @@ wrap-ansi@^8.1.0: string-width "^5.0.1" strip-ansi "^7.0.1" +wrap-ansi@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-9.0.0.tgz#1a3dc8b70d85eeb8398ddfb1e4a02cd186e58b3e" + integrity sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q== + dependencies: + ansi-styles "^6.2.1" + string-width "^7.0.0" + strip-ansi "^7.1.0" + wrappy@1: version "1.0.2" - resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== write-file-atomic@^3.0.0: version "3.0.3" - resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== dependencies: imurmurhash "^0.1.4" @@ -8849,67 +13386,52 @@ write-file-atomic@^3.0.0: signal-exit "^3.0.2" typedarray-to-buffer "^3.1.5" -write-json-file@^4.1.1: - version "4.3.0" - resolved "https://registry.npmjs.org/write-json-file/-/write-json-file-4.3.0.tgz" - integrity sha512-PxiShnxf0IlnQuMYOPPhPkhExoCQuTUNPOa/2JWCYTmBquU9njyyDuwRKN26IZBlp4yn1nt+Agh2HOOBl+55HQ== +write-file-atomic@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-5.0.1.tgz#68df4717c55c6fa4281a7860b4c2ba0a6d2b11e7" + integrity sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw== dependencies: - detect-indent "^6.0.0" - graceful-fs "^4.1.15" - is-plain-obj "^2.0.0" - make-dir "^3.0.0" - sort-keys "^4.0.0" - write-file-atomic "^3.0.0" - -ws@8.5.0: - version "8.5.0" - resolved "https://registry.npmjs.org/ws/-/ws-8.5.0.tgz" - integrity sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg== + imurmurhash "^0.1.4" + signal-exit "^4.0.1" -ws@^7.4.4: - version "7.5.10" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" - integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== +ws@^8.18.0, ws@^8.18.3: + version "8.18.3" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.3.tgz#b56b88abffde62791c639170400c93dcb0c95472" + integrity sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg== -ws@^8.16.0: - version "8.17.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" - integrity "sha1-kpPaUwu1SP68lTcdkPnIeHJ9kZs= sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==" +wsl-utils@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/wsl-utils/-/wsl-utils-0.1.0.tgz#8783d4df671d4d50365be2ee4c71917a0557baab" + integrity sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw== + dependencies: + is-wsl "^3.1.0" xcase@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/xcase/-/xcase-2.0.1.tgz#c7fa72caa0f440db78fd5673432038ac984450b9" integrity sha512-UmFXIPU+9Eg3E9m/728Bii0lAIuoc+6nbrNUKaRPJOFp91ih44qqGlWtxMB6kXFrRD6po+86ksHM5XHCfk6iPw== -xdg-basedir@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz" - integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== +xdg-basedir@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-5.1.0.tgz#1efba19425e73be1bc6f2a6ceb52a3d2c884c0c9" + integrity sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ== xml-name-validator@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-5.0.0.tgz#82be9b957f7afdacf961e5980f1bf227c0bf7673" integrity sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg== -xml2js@^0.4.16: - version "0.4.23" - resolved "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz" - integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug== - dependencies: - sax ">=0.6.0" - xmlbuilder "~11.0.0" - -xml2js@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" - integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== +xml2js@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" + integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== dependencies: sax ">=0.6.0" xmlbuilder "~11.0.0" xmlbuilder@~11.0.0: version "11.0.1" - resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== xmlchars@^2.2.0: @@ -8917,62 +13439,67 @@ xmlchars@^2.2.0: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== -xpath@^0.0.32: - version "0.0.32" - resolved "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz" - integrity sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw== +xmlcreate@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/xmlcreate/-/xmlcreate-2.0.4.tgz#0c5ab0f99cdd02a81065fa9cd8f8ae87624889be" + integrity sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg== -xtend@~4.0.1: - version "4.0.2" - resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== +xpath@^0.0.34: + version "0.0.34" + resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.34.tgz#a769255e8816e0938e1e0005f2baa7279be8be12" + integrity sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA== y18n@^4.0.0: - version "4.0.1" - resolved "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz" - integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ== + version "4.0.3" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" + integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ== y18n@^5.0.5: - version "5.0.5" - resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz" - integrity sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg== + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - integrity sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A== +yallist@^3.0.2: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yallist@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== -yaml@^1.10.0: - version "1.10.0" - resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz" - integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg== +yallist@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533" + integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw== -yargs-parser@20.2.4: - version "20.2.4" - resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz" - integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== +yaml@^2.2.1, yaml@^2.5.1, yaml@^2.7.1: + version "2.8.0" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.8.0.tgz#15f8c9866211bdc2d3781a0890e44d4fa1a5fff6" + integrity sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ== yargs-parser@^18.1.2: version "18.1.3" - resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" -yargs-parser@^20.2.2: - version "20.2.6" - resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.6.tgz" - integrity sha512-AP1+fQIWSM/sMiET8fyayjx/J+JmTPt2Mr0FkrgqB4todtfa53sOsrSAcIrJRD5XS20bKUwaDIuMkWKCEiQLKA== +yargs-parser@^20.2.2, yargs-parser@^20.2.3, yargs-parser@^20.2.9: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== -yargs-unparser@2.0.0: +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs-unparser@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-2.0.0.tgz#f131f9226911ae5d9ad38c432fe809366c2325eb" integrity sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA== dependencies: camelcase "^6.0.0" @@ -8980,22 +13507,9 @@ yargs-unparser@2.0.0: flat "^5.0.2" is-plain-obj "^2.1.0" -yargs@16.2.0: - version "16.2.0" - resolved "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz" - integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.0" - y18n "^5.0.5" - yargs-parser "^20.2.2" - yargs@^15.0.2: version "15.4.1" - resolved "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== dependencies: cliui "^6.0.0" @@ -9010,131 +13524,113 @@ yargs@^15.0.2: y18n "^4.0.0" yargs-parser "^18.1.2" -yarn-audit-fix@^9.3.7: - version "9.3.7" - resolved "https://registry.yarnpkg.com/yarn-audit-fix/-/yarn-audit-fix-9.3.7.tgz#bd8c66ce2edecd930d99d55e9c6470aabdd35811" - integrity sha512-zYSp7fp3oJhGGt7o4/DvwQJTfV8M0ozOUPVpHmCAeQkJhNDj/3Zq8vLYToyffvzy2A86zZKEMPTQsLrNvYvqqA== - dependencies: - "@types/find-cache-dir" "^3.2.1" - "@types/fs-extra" "^9.0.13" - "@types/lodash-es" "^4.17.6" - "@types/semver" "^7.3.12" - "@types/yarnpkg__lockfile" "^1.1.5" - "@yarnpkg/lockfile" "^1.1.0" - chalk "^5.0.1" - commander "^9.4.0" - find-cache-dir "^4.0.0" - find-up "^6.3.0" - fs-extra "^10.1.0" - globby "^13.1.2" - js-yaml "^4.1.0" - lodash-es "^4.17.21" - pkg-dir "^7.0.0" - semver "^7.3.7" - synp "^1.9.10" - tslib "^2.4.0" +yargs@^16.2.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yargs@^17.0.0, yargs@^17.7.2: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" yauzl@^2.10.0: version "2.10.0" - resolved "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz" - integrity sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk= + resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" + integrity sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g== dependencies: buffer-crc32 "~0.2.3" fd-slicer "~1.1.0" -yeoman-environment@^2.10.3, yeoman-environment@^2.9.5: - version "2.10.3" - resolved "https://registry.npmjs.org/yeoman-environment/-/yeoman-environment-2.10.3.tgz" - integrity sha512-pLIhhU9z/G+kjOXmJ2bPFm3nejfbH+f1fjYRSOteEXDBrv1EoJE/e+kuHixSXfCYfTkxjYsvRaDX+1QykLCnpQ== - dependencies: - chalk "^2.4.1" - debug "^3.1.0" - diff "^3.5.0" - escape-string-regexp "^1.0.2" - execa "^4.0.0" - globby "^8.0.1" - grouped-queue "^1.1.0" - inquirer "^7.1.0" - is-scoped "^1.0.0" - lodash "^4.17.10" - log-symbols "^2.2.0" - mem-fs "^1.1.0" - mem-fs-editor "^6.0.0" - npm-api "^1.0.0" - semver "^7.1.3" - strip-ansi "^4.0.0" - text-table "^0.2.0" - untildify "^3.0.3" - yeoman-generator "^4.8.2" - -yeoman-generator@^4.12.0, yeoman-generator@^4.8.2: - version "4.13.0" - resolved "https://registry.npmjs.org/yeoman-generator/-/yeoman-generator-4.13.0.tgz" - integrity sha512-f2/5N5IR3M2Ozm+QocvZQudlQITv2DwI6Mcxfy7R7gTTzaKgvUpgo/pQMJ+WQKm0KN0YMWCFOZpj0xFGxevc1w== - dependencies: - async "^2.6.2" - chalk "^2.4.2" - cli-table "^0.3.1" - cross-spawn "^6.0.5" - dargs "^6.1.0" - dateformat "^3.0.3" +yeoman-environment@^4.0.0: + version "4.4.3" + resolved "https://registry.yarnpkg.com/yeoman-environment/-/yeoman-environment-4.4.3.tgz#37121fcad4f77075445e5c1881b74eded3821b15" + integrity sha512-Irk6rymmb9vNZRhQlg8LUDeopnEot2VGlP4VayTNjayfQ3nFbiINTIjZqrtHrQdw29nTxYVOAsYy98jL1p27QQ== + dependencies: + "@yeoman/adapter" "^1.4.0" + "@yeoman/conflicter" "^2.0.0-alpha.2" + "@yeoman/namespace" "^1.0.0" + "@yeoman/transform" "^1.2.0" + "@yeoman/types" "^1.1.1" + arrify "^3.0.0" + chalk "^5.3.0" + commander "^11.1.0" + debug "^4.3.4" + execa "^8.0.1" + fly-import "^0.4.0" + globby "^14.0.0" + grouped-queue "^2.0.0" + locate-path "^7.2.0" + lodash-es "^4.17.21" + mem-fs "^4.0.0" + mem-fs-editor "^11.0.0" + semver "^7.5.4" + slash "^5.1.0" + untildify "^5.0.0" + which-package-manager "^0.0.1" + +yeoman-generator@^7.1.0: + version "7.5.1" + resolved "https://registry.yarnpkg.com/yeoman-generator/-/yeoman-generator-7.5.1.tgz#3354691050ca0fbb337e68bfeb21dc247395fa5c" + integrity sha512-MYncRvzSTd71BMwiUMAVhfX00sDD8DZDrmPzRxQkWuWQ0V1Qt4Rd0gS/Nee2QDTWvRjvCa+KBfiAVrtOySq+JA== + dependencies: + "@types/lodash-es" "^4.17.9" + "@yeoman/namespace" "^1.0.0" + chalk "^5.3.0" debug "^4.1.1" - diff "^4.0.1" - error "^7.0.2" - find-up "^3.0.0" - github-username "^3.0.0" - istextorbinary "^2.5.1" - lodash "^4.17.11" - make-dir "^3.0.0" - mem-fs-editor "^7.0.1" - minimist "^1.2.5" - pretty-bytes "^5.2.0" - read-chunk "^3.2.0" - read-pkg-up "^5.0.0" - rimraf "^2.6.3" - run-async "^2.0.0" - semver "^7.2.1" - shelljs "^0.8.4" + execa "^8.0.1" + github-username "^9.0.0" + json-schema "^0.4.0" + latest-version "^9.0.0" + lodash-es "^4.17.21" + mem-fs-editor "^11.0.1" + minimist "^1.2.8" + read-package-up "^11.0.0" + semver "^7.5.4" + simple-git "^3.20.0" + sort-keys "^5.0.0" text-table "^0.2.0" - through2 "^3.0.1" - optionalDependencies: - grouped-queue "^1.1.0" - yeoman-environment "^2.9.5" yn@3.1.1: version "3.1.1" - resolved "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== yocto-queue@^0.1.0: version "0.1.0" - resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== yocto-queue@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" - integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== + version "1.2.1" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.2.1.tgz#36d7c4739f775b3cbc28e6136e21aa057adec418" + integrity sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg== yoctocolors-cjs@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz#f4b905a840a37506813a7acaa28febe97767a242" integrity sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA== -yosay@^2.0.2: - version "2.0.2" - resolved "https://registry.npmjs.org/yosay/-/yosay-2.0.2.tgz" - integrity sha512-avX6nz2esp7IMXGag4gu6OyQBsMh/SEn+ZybGu3yKPlOTE6z9qJrzG/0X5vCq/e0rPFy0CUYCze0G5hL310ibA== - dependencies: - ansi-regex "^2.0.0" - ansi-styles "^3.0.0" - chalk "^1.0.0" - cli-boxes "^1.0.0" - pad-component "0.0.1" - string-width "^2.0.0" - strip-ansi "^3.0.0" - taketalk "^1.0.0" - wrap-ansi "^2.0.0" +yoctocolors@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/yoctocolors/-/yoctocolors-2.1.1.tgz#e0167474e9fbb9e8b3ecca738deaa61dd12e56fc" + integrity sha512-GQHQqAopRhwU8Kt1DDM8NjibDXHC8eoh1erhGAJPEyveY9qqVeXvVikNKrDz69sHowPMorbPUrH/mx8c50eiBQ== zip-stream@^4.1.0: version "4.1.1" @@ -9144,3 +13640,23 @@ zip-stream@^4.1.0: archiver-utils "^3.0.4" compress-commons "^4.1.2" readable-stream "^3.6.0" + +zod-to-json-schema@^3.22.3: + version "3.24.6" + resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz#5920f020c4d2647edfbb954fa036082b92c9e12d" + integrity sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg== + +zod@3.23.8: + version "3.23.8" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.23.8.tgz#e37b957b5d52079769fb8097099b592f0ef4067d" + integrity sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g== + +zod@^3.22.4, zod@^3.24.1, zod@^3.25.32: + version "3.25.76" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.25.76.tgz#26841c3f6fd22a6a2760e7ccb719179768471e34" + integrity sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ== + +zwitch@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-2.0.4.tgz#c827d4b0acb76fc3e685a4c6ec2902d51070e9d7" + integrity sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==