diff --git a/.config/1espt/PipelineAutobaseliningConfig.yml b/.config/1espt/PipelineAutobaseliningConfig.yml
new file mode 100644
index 0000000000..a46c8ef6dc
--- /dev/null
+++ b/.config/1espt/PipelineAutobaseliningConfig.yml
@@ -0,0 +1,21 @@
+## DO NOT MODIFY THIS FILE MANUALLY. This is part of auto-baselining from 1ES Pipeline Templates. Go to [https://aka.ms/1espt-autobaselining] for more details.
+
+pipelines:
+ 1110:
+ retail:
+ binary:
+ credscan:
+ lastModifiedDate: 2024-03-06
+ binskim:
+ lastModifiedDate: 2024-03-06
+ spotbugs:
+ lastModifiedDate: 2024-03-06
+ source:
+ credscan:
+ lastModifiedDate: 2024-03-06
+ eslint:
+ lastModifiedDate: 2024-03-06
+ psscriptanalyzer:
+ lastModifiedDate: 2024-03-06
+ armory:
+ lastModifiedDate: 2024-03-06
diff --git a/.config/guardian/.gdnbaselines b/.config/guardian/.gdnbaselines
new file mode 100644
index 0000000000..afb198073d
--- /dev/null
+++ b/.config/guardian/.gdnbaselines
@@ -0,0 +1,93 @@
+{
+ "properties": {
+ "helpUri": "https://eng.ms/docs/microsoft-security/security/azure-security/cloudai-security-fundamentals-engineering/security-integration/guardian-wiki/microsoft-guardian/general/baselines"
+ },
+ "version": "1.0.0",
+ "baselines": {
+ "default": {
+ "name": "default",
+ "createdDate": "2024-03-06 21:08:31Z",
+ "lastUpdatedDate": "2024-03-06 21:08:31Z"
+ }
+ },
+ "results": {
+ "31128318971be3d77cbd3aaf7b6a06d65b1874334a143ee500c7fccb5aa89427": {
+ "signature": "31128318971be3d77cbd3aaf7b6a06d65b1874334a143ee500c7fccb5aa89427",
+ "alternativeSignatures": [
+ "9106dc3b9a335702dc4feeeed54285f07d8a06494f38fc23167f6158793928dc"
+ ],
+ "target": "eng/common/SetupNugetSources.ps1",
+ "line": 38,
+ "memberOf": [
+ "default"
+ ],
+ "tool": "psscriptanalyzer",
+ "ruleId": "PSAvoidUsingUsernameAndPasswordParams",
+ "createdDate": "2024-03-06 21:08:31Z",
+ "expirationDate": "2024-08-23 23:30:43Z",
+ "justification": "This error is baselined with an expiration date of 180 days from 2024-03-06 23:30:43Z"
+ },
+ "992b26983b997813a410dfc25048f3b218c6fc02fc14a5c2ad431ec8e022ac79": {
+ "signature": "992b26983b997813a410dfc25048f3b218c6fc02fc14a5c2ad431ec8e022ac79",
+ "alternativeSignatures": [
+ "23e97da32b7142c282727c96d07fd5ce6aefd6ef26f02e91cb471eb7863542f8"
+ ],
+ "target": "eng/common/SetupNugetSources.ps1",
+ "line": 56,
+ "memberOf": [
+ "default"
+ ],
+ "tool": "psscriptanalyzer",
+ "ruleId": "PSAvoidUsingUsernameAndPasswordParams",
+ "createdDate": "2024-03-06 21:08:31Z",
+ "expirationDate": "2024-08-23 23:30:43Z",
+ "justification": "This error is baselined with an expiration date of 180 days from 2024-03-06 23:30:43Z"
+ },
+ "53b10a5fb6059b0b229ad32c6278123a5603386f65d9e1c5684a2333f2e1dc62": {
+ "signature": "53b10a5fb6059b0b229ad32c6278123a5603386f65d9e1c5684a2333f2e1dc62",
+ "alternativeSignatures": [
+ "cd7b0b0937cfa32a98962a528bd99ede0181ae41a609df430f35fd30763166c4"
+ ],
+ "target": "eng/common/SetupNugetSources.ps1",
+ "line": 88,
+ "memberOf": [
+ "default"
+ ],
+ "tool": "psscriptanalyzer",
+ "ruleId": "PSAvoidUsingUsernameAndPasswordParams",
+ "createdDate": "2024-03-06 21:08:31Z",
+ "expirationDate": "2024-08-23 23:30:43Z",
+ "justification": "This error is baselined with an expiration date of 180 days from 2024-03-06 23:30:43Z"
+ },
+ "2c5f3fa8b37f6dfb1ec7cb1bc64d39a43a9a0184f317d7bd5811d734da9c8626": {
+ "signature": "2c5f3fa8b37f6dfb1ec7cb1bc64d39a43a9a0184f317d7bd5811d734da9c8626",
+ "alternativeSignatures": [
+ "795ef944edceb1b07d6dd64cd3cc30a0d4d874a6dc6f5bc6f6834d2cdcef5e75"
+ ],
+ "target": "artifacts/pkgassets/Microsoft.ML.Mkl.Redist/runtimes/win-x86/native/MklImports.dll",
+ "memberOf": [
+ "default"
+ ],
+ "tool": "binskim",
+ "ruleId": "BA2008",
+ "createdDate": "2024-03-06 21:13:53Z",
+ "expirationDate": "2024-08-23 23:30:43Z",
+ "justification": "This error is baselined with an expiration date of 180 days from 2024-03-06 23:30:43Z"
+ },
+ "17d4115eadce781703d1e090f3c05e73f84fbbab513a1d4c8cd60b54dc8efe8c": {
+ "signature": "17d4115eadce781703d1e090f3c05e73f84fbbab513a1d4c8cd60b54dc8efe8c",
+ "alternativeSignatures": [
+ "be452f644ec14427721109f8264e8074b2a0276ec71a0cd72e41ccbe33094c7f"
+ ],
+ "target": "artifacts/pkgassets/Microsoft.ML.Mkl.Redist/runtimes/win-x64/native/MklImports.dll",
+ "memberOf": [
+ "default"
+ ],
+ "tool": "binskim",
+ "ruleId": "BA2008",
+ "createdDate": "2024-03-06 21:36:33Z",
+ "expirationDate": "2024-08-23 23:30:43Z",
+ "justification": "This error is baselined with an expiration date of 180 days from 2024-03-06 23:30:43Z"
+ }
+ }
+}
\ No newline at end of file
diff --git a/.github/fabricbot.json b/.github/fabricbot.json
deleted file mode 100644
index 2f752f7ae1..0000000000
--- a/.github/fabricbot.json
+++ /dev/null
@@ -1,1577 +0,0 @@
-[
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "IssuesOnlyResponder",
- "version": "1.0",
- "config": {
- "taskName": "Add untriaged label to new/reopened issues without a milestone",
- "actions": [
- {
- "name": "addLabel",
- "parameters": {
- "label": "untriaged"
- }
- }
- ],
- "eventType": "issue",
- "eventNames": [
- "issues"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "operator": "or",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "opened"
- }
- },
- {
- "name": "isAction",
- "parameters": {
- "action": "reopened"
- }
- },
- {
- "name": "removedFromMilestone",
- "parameters": {}
- }
- ]
- },
- {
- "name": "isOpen",
- "parameters": {}
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "isInMilestone",
- "parameters": {}
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "hasLabel",
- "parameters": {
- "label": "untriaged"
- }
- }
- ]
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "IssuesOnlyResponder",
- "version": "1.0",
- "config": {
- "taskName": "Remove untriaged label from issues when closed or added to a milestone",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "untriaged"
- }
- }
- ],
- "eventType": "issue",
- "eventNames": [
- "issues"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "operator": "or",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "closed"
- }
- },
- {
- "name": "addedToMilestone",
- "parameters": {}
- }
- ]
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "untriaged"
- }
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "InPrLabel",
- "subCapability": "InPrLabel",
- "version": "1.0",
- "config": {
- "taskName": "Add `in-pr` label on issue when an open pull request is targeting it",
- "inPrLabelText": "There is an active PR which will close this issue when it is merged",
- "fixedLabelEnabled": false,
- "label_inPr": "in-pr"
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestResponder",
- "version": "1.0",
- "config": {
- "taskName": "Assign Team PRs to author",
- "actions": [
- {
- "name": "assignToUser",
- "parameters": {
- "user": {
- "type": "prAuthor"
- }
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "pull_request"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "opened"
- }
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "activitySenderHasPermissions",
- "parameters": {
- "permissions": "read"
- }
- }
- ]
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestResponder",
- "version": "1.0",
- "config": {
- "taskName": "Label community PRs",
- "actions": [
- {
- "name": "addLabel",
- "parameters": {
- "label": "community-contribution"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "pull_request"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "opened"
- }
- },
- {
- "operator": "and",
- "operands": [
- {
- "operator": "not",
- "operands": [
- {
- "name": "activitySenderHasPermissions",
- "parameters": {
- "permissions": "admin"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "activitySenderHasPermissions",
- "parameters": {
- "permissions": "maintain"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "activitySenderHasPermissions",
- "parameters": {
- "permissions": "write"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": "github-actions[bot]"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": "dotnet-maestro[bot]"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": "dotnet-maestro-bot[bot]"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": "dotnet-maestro-bot"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": "dotnet-maestro"
- }
- }
- ]
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": "github-actions"
- }
- }
- ]
- }
- ]
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "IssuesOnlyResponder",
- "version": "1.0",
- "config": {
- "taskName": "Needs-author-action notification",
- "actions": [
- {
- "name": "addReply",
- "parameters": {
- "comment": "This issue has been marked `needs-author-action` and may be missing some important information."
- }
- }
- ],
- "eventType": "issue",
- "eventNames": [
- "issues"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "labelAdded",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestReviewResponder",
- "version": "1.0",
- "config": {
- "taskName": "PR reviews with \"changes requested\" applies the needs-author-action label",
- "actions": [
- {
- "name": "addLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "pull_request_review"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "operator": "not",
- "operands": [
- {
- "name": "activitySenderHasPermissions",
- "parameters": {
- "state": "changes_requested",
- "permissions": "read"
- }
- }
- ]
- },
- {
- "name": "isAction",
- "parameters": {
- "action": "submitted"
- }
- },
- {
- "name": "isReviewState",
- "parameters": {
- "state": "changes_requested"
- }
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "IssueCommentResponder",
- "version": "1.0",
- "config": {
- "taskName": "Replace `needs-author-action` label with `needs-further-triage` label when the author comments on an issue that is not still untriaged",
- "actions": [
- {
- "name": "addLabel",
- "parameters": {
- "label": "needs-further-triage"
- }
- },
- {
- "name": "removeLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ],
- "eventType": "issue",
- "eventNames": [
- "issue_comment"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "created"
- }
- },
- {
- "name": "isActivitySender",
- "parameters": {
- "user": {
- "type": "author"
- }
- }
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "hasLabel",
- "parameters": {
- "label": "untriaged"
- }
- }
- ]
- },
- {
- "name": "isOpen",
- "parameters": {}
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "IssueCommentResponder",
- "version": "1.0",
- "config": {
- "taskName": "Remove `needs-author-action` label when the author comments on an `untriaged` issue",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ],
- "eventType": "issue",
- "eventNames": [
- "issue_comment"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "created"
- }
- },
- {
- "name": "isActivitySender",
- "parameters": {
- "user": {
- "type": "author"
- }
- }
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "untriaged"
- }
- },
- {
- "name": "isOpen",
- "parameters": {}
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestResponder",
- "version": "1.0",
- "config": {
- "taskName": "Pushing changes to PR branch removes the needs-author-action label",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "pull_request"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "synchronize"
- }
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestCommentResponder",
- "version": "1.0",
- "config": {
- "taskName": "Author commenting in PR removes the needs-author-action label",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "issue_comment"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": {
- "type": "author"
- }
- }
- },
- {
- "name": "isAction",
- "parameters": {
- "action": "created"
- }
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- },
- {
- "name": "isOpen",
- "parameters": {}
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestReviewResponder",
- "version": "1.0",
- "config": {
- "taskName": "Author responding to a pull request review comment removes the needs-author-action label",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "pull_request_review"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isActivitySender",
- "parameters": {
- "user": {
- "type": "author"
- }
- }
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- },
- {
- "name": "isAction",
- "parameters": {
- "action": "submitted"
- }
- },
- {
- "name": "isOpen",
- "parameters": {}
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "scheduled",
- "capabilityId": "ScheduledSearch",
- "subCapability": "ScheduledSearch",
- "version": "1.1",
- "config": {
- "taskName": "Add no-recent-activity label to issues",
- "actions": [
- {
- "name": "addLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "name": "addReply",
- "parameters": {
- "comment": "This issue has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`."
- }
- }
- ],
- "frequency": [
- {
- "weekDay": 0,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 1,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 2,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 3,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 4,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 5,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 6,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- }
- ],
- "searchTerms": [
- {
- "name": "isIssue",
- "parameters": {}
- },
- {
- "name": "isOpen",
- "parameters": {}
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- },
- {
- "name": "noActivitySince",
- "parameters": {
- "days": 14
- }
- },
- {
- "name": "noLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ]
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "scheduled",
- "capabilityId": "ScheduledSearch",
- "subCapability": "ScheduledSearch",
- "version": "1.1",
- "config": {
- "taskName": "Add no-recent-activity label to PRs",
- "actions": [
- {
- "name": "addLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "name": "addReply",
- "parameters": {
- "comment": "This pull request has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`."
- }
- }
- ],
- "frequency": [
- {
- "weekDay": 0,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 1,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 2,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 3,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 4,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 5,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- },
- {
- "weekDay": 6,
- "hours": [
- 4,
- 10,
- 16,
- 22
- ],
- "timezoneOffset": 1
- }
- ],
- "searchTerms": [
- {
- "name": "isPr",
- "parameters": {}
- },
- {
- "name": "isOpen",
- "parameters": {}
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "needs-author-action"
- }
- },
- {
- "name": "noActivitySince",
- "parameters": {
- "days": 14
- }
- },
- {
- "name": "noLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ]
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "IssuesOnlyResponder",
- "version": "1.0",
- "config": {
- "taskName": "Remove `no-recent-activity` label from issues when issue is modified",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ],
- "eventType": "issue",
- "eventNames": [
- "issues"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "operator": "not",
- "operands": [
- {
- "name": "isAction",
- "parameters": {
- "action": "closed"
- }
- }
- ]
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "labelAdded",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ]
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "IssueCommentResponder",
- "version": "1.0",
- "config": {
- "taskName": "Remove `no-recent-activity` label when an issue is commented on",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ],
- "eventType": "issue",
- "eventNames": [
- "issue_comment"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "hasLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestResponder",
- "version": "1.0",
- "config": {
- "taskName": "Remove `no-recent-activity` label from PRs when modified",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "pull_request"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "isOpen",
- "parameters": {}
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "operator": "not",
- "operands": [
- {
- "name": "labelAdded",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ]
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestCommentResponder",
- "version": "1.0",
- "config": {
- "taskName": "Remove `no-recent-activity` label from PRs when commented on",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "issue_comment"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "hasLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "name": "isOpen",
- "parameters": {}
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "trigger",
- "capabilityId": "IssueResponder",
- "subCapability": "PullRequestReviewResponder",
- "version": "1.0",
- "config": {
- "taskName": "Remove `no-recent-activity` label from PRs when new review is added",
- "actions": [
- {
- "name": "removeLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- }
- ],
- "eventType": "pull_request",
- "eventNames": [
- "pull_request_review"
- ],
- "conditions": {
- "operator": "and",
- "operands": [
- {
- "name": "hasLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "name": "isOpen",
- "parameters": {}
- }
- ]
- }
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "scheduled",
- "capabilityId": "ScheduledSearch",
- "subCapability": "ScheduledSearch",
- "version": "1.1",
- "config": {
- "taskName": "Close issues with no recent activity",
- "actions": [
- {
- "name": "addReply",
- "parameters": {
- "comment": "This issue will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the issue, but please note that the issue will be locked if it remains inactive for another 30 days."
- }
- },
- {
- "name": "closeIssue",
- "parameters": {}
- }
- ],
- "frequency": [
- {
- "weekDay": 0,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 1,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 2,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 3,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 4,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 5,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 6,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- }
- ],
- "searchTerms": [
- {
- "name": "isIssue",
- "parameters": {}
- },
- {
- "name": "isOpen",
- "parameters": {}
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "name": "noActivitySince",
- "parameters": {
- "days": 14
- }
- }
- ]
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "scheduled",
- "capabilityId": "ScheduledSearch",
- "subCapability": "ScheduledSearch",
- "version": "1.1",
- "config": {
- "taskName": "Close PRs with no-recent-activity",
- "actions": [
- {
- "name": "addReply",
- "parameters": {
- "comment": "This pull request will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the pull request, but please note that it will be locked if it remains inactive for another 30 days."
- }
- },
- {
- "name": "closeIssue",
- "parameters": {}
- }
- ],
- "frequency": [
- {
- "weekDay": 0,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 1,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 2,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 3,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 4,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 5,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 6,
- "hours": [
- 0,
- 6,
- 12,
- 18
- ],
- "timezoneOffset": 0
- }
- ],
- "searchTerms": [
- {
- "name": "isPr",
- "parameters": {}
- },
- {
- "name": "isOpen",
- "parameters": {}
- },
- {
- "name": "hasLabel",
- "parameters": {
- "label": "no-recent-activity"
- }
- },
- {
- "name": "noActivitySince",
- "parameters": {
- "days": 14
- }
- }
- ]
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "scheduled",
- "capabilityId": "ScheduledSearch",
- "subCapability": "ScheduledSearch",
- "version": "1.1",
- "config": {
- "taskName": "Close inactive Draft PRs",
- "actions": [
- {
- "name": "closeIssue",
- "parameters": {}
- },
- {
- "name": "addReply",
- "parameters": {
- "comment": "Draft Pull Request was automatically closed for 30 days of inactivity. Please [let us know](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you'd like to reopen it."
- }
- }
- ],
- "frequency": [
- {
- "weekDay": 0,
- "hours": [
- 5,
- 11,
- 17,
- 23
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 1,
- "hours": [
- 5,
- 11,
- 17,
- 23
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 2,
- "hours": [
- 5,
- 11,
- 17,
- 23
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 3,
- "hours": [
- 5,
- 11,
- 17,
- 23
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 4,
- "hours": [
- 5,
- 11,
- 17,
- 23
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 5,
- "hours": [
- 5,
- 11,
- 17,
- 23
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 6,
- "hours": [
- 5,
- 11,
- 17,
- 23
- ],
- "timezoneOffset": 0
- }
- ],
- "searchTerms": [
- {
- "name": "isDraftPr",
- "parameters": {
- "value": "true"
- }
- },
- {
- "name": "isOpen",
- "parameters": {}
- },
- {
- "name": "noActivitySince",
- "parameters": {
- "days": 30
- }
- }
- ]
- }
- },
- {
- "taskSource": "fabricbot-config",
- "taskType": "scheduled",
- "capabilityId": "ScheduledSearch",
- "subCapability": "ScheduledSearch",
- "version": "1.1",
- "config": {
- "taskName": "Lock stale issues and PRs",
- "actions": [
- {
- "name": "lockIssue",
- "parameters": {
- "reason": "resolved",
- "label": "will_lock_this"
- }
- }
- ],
- "frequency": [
- {
- "weekDay": 0,
- "hours": [
- 1,
- 7,
- 13,
- 19
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 1,
- "hours": [
- 1,
- 7,
- 13,
- 19
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 2,
- "hours": [
- 1,
- 7,
- 13,
- 19
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 3,
- "hours": [
- 1,
- 7,
- 13,
- 19
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 4,
- "hours": [
- 1,
- 7,
- 13,
- 19
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 5,
- "hours": [
- 1,
- 7,
- 13,
- 19
- ],
- "timezoneOffset": 0
- },
- {
- "weekDay": 6,
- "hours": [
- 1,
- 7,
- 13,
- 19
- ],
- "timezoneOffset": 0
- }
- ],
- "searchTerms": [
- {
- "name": "isClosed",
- "parameters": {}
- },
- {
- "name": "noActivitySince",
- "parameters": {
- "days": 30
- }
- },
- {
- "name": "isUnlocked",
- "parameters": {}
- }
- ]
- }
- }
-]
\ No newline at end of file
diff --git a/.github/policies/resourceManagement.yml b/.github/policies/resourceManagement.yml
new file mode 100644
index 0000000000..a6ab6f9dc8
--- /dev/null
+++ b/.github/policies/resourceManagement.yml
@@ -0,0 +1,290 @@
+id:
+name: GitOps.PullRequestIssueManagement
+description: GitOps.PullRequestIssueManagement primitive
+owner:
+resource: repository
+disabled: false
+where:
+configuration:
+ resourceManagementConfiguration:
+ scheduledSearches:
+ - description: Add no-recent-activity label to issues
+ frequencies:
+ - hourly:
+ hour: 6
+ filters:
+ - isIssue
+ - isOpen
+ - hasLabel:
+ label: needs-author-action
+ - noActivitySince:
+ days: 14
+ - isNotLabeledWith:
+ label: no-recent-activity
+ actions:
+ - addLabel:
+ label: no-recent-activity
+ - addReply:
+ reply: This issue has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`.
+ - description: Add no-recent-activity label to PRs
+ frequencies:
+ - hourly:
+ hour: 6
+ filters:
+ - isPullRequest
+ - isOpen
+ - hasLabel:
+ label: needs-author-action
+ - noActivitySince:
+ days: 14
+ - isNotLabeledWith:
+ label: no-recent-activity
+ actions:
+ - addLabel:
+ label: no-recent-activity
+ - addReply:
+ reply: This pull request has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`.
+ - description: Close issues with no recent activity
+ frequencies:
+ - hourly:
+ hour: 6
+ filters:
+ - isIssue
+ - isOpen
+ - hasLabel:
+ label: no-recent-activity
+ - noActivitySince:
+ days: 14
+ actions:
+ - addReply:
+ reply: This issue will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the issue, but please note that the issue will be locked if it remains inactive for another 30 days.
+ - closeIssue
+ - description: Close PRs with no-recent-activity
+ frequencies:
+ - hourly:
+ hour: 6
+ filters:
+ - isPullRequest
+ - isOpen
+ - hasLabel:
+ label: no-recent-activity
+ - noActivitySince:
+ days: 14
+ actions:
+ - addReply:
+ reply: This pull request will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the pull request, but please note that it will be locked if it remains inactive for another 30 days.
+ - closeIssue
+ - description: Close inactive Draft PRs
+ frequencies:
+ - hourly:
+ hour: 6
+ filters:
+ - isDraftPullRequest
+ - isOpen
+ - noActivitySince:
+ days: 30
+ actions:
+ - closeIssue
+ - addReply:
+ reply: Draft Pull Request was automatically closed for 30 days of inactivity. Please [let us know](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you'd like to reopen it.
+ eventResponderTasks:
+ - if:
+ - payloadType: Pull_Request
+ then:
+ - inPrLabel:
+ label: in-pr
+ description: Add `in-pr` label on issue when an open pull request is targeting it
+ - if:
+ - payloadType: Pull_Request
+ - isAction:
+ action: Opened
+ - not:
+ activitySenderHasPermission:
+ permission: Read
+ then:
+ - assignTo:
+ author: True
+ description: Assign Team PRs to author
+ - if:
+ - payloadType: Pull_Request
+ - isAction:
+ action: Opened
+ - and:
+ - not:
+ activitySenderHasPermission:
+ permission: Admin
+ - not:
+ activitySenderHasPermission:
+ permission: Write
+ - not:
+ isActivitySender:
+ user: github-actions[bot]
+ issueAuthor: False
+ - not:
+ isActivitySender:
+ user: dotnet-maestro[bot]
+ issueAuthor: False
+ - not:
+ isActivitySender:
+ user: dotnet-maestro-bot[bot]
+ issueAuthor: False
+ - not:
+ isActivitySender:
+ user: dotnet-maestro-bot
+ issueAuthor: False
+ - not:
+ isActivitySender:
+ user: dotnet-maestro
+ issueAuthor: False
+ - not:
+ isActivitySender:
+ user: github-actions
+ issueAuthor: False
+ then:
+ - addLabel:
+ label: community-contribution
+ description: Label community PRs
+ - if:
+ - payloadType: Issues
+ - labelAdded:
+ label: needs-author-action
+ then:
+ - addReply:
+ reply: This issue has been marked `needs-author-action` and may be missing some important information.
+ description: Needs-author-action notification
+ - if:
+ - payloadType: Pull_Request_Review
+ - not:
+ activitySenderHasPermission:
+ permission: Read
+ - isAction:
+ action: Submitted
+ - isReviewState:
+ reviewState: Changes_requested
+ then:
+ - addLabel:
+ label: needs-author-action
+ description: PR reviews with "changes requested" applies the needs-author-action label
+ - if:
+ - payloadType: Issue_Comment
+ - isAction:
+ action: Created
+ - isActivitySender:
+ issueAuthor: True
+ - hasLabel:
+ label: needs-author-action
+ - not:
+ hasLabel:
+ label: untriaged
+ - isOpen
+ then:
+ - addLabel:
+ label: needs-further-triage
+ - removeLabel:
+ label: needs-author-action
+ description: Replace `needs-author-action` label with `needs-further-triage` label when the author comments on an issue that is not still untriaged
+ - if:
+ - payloadType: Issue_Comment
+ - isAction:
+ action: Created
+ - isActivitySender:
+ issueAuthor: True
+ - hasLabel:
+ label: needs-author-action
+ - hasLabel:
+ label: untriaged
+ - isOpen
+ then:
+ - removeLabel:
+ label: needs-author-action
+ description: Remove `needs-author-action` label when the author comments on an `untriaged` issue
+ - if:
+ - payloadType: Pull_Request
+ - isAction:
+ action: Synchronize
+ - hasLabel:
+ label: needs-author-action
+ then:
+ - removeLabel:
+ label: needs-author-action
+ description: Pushing changes to PR branch removes the needs-author-action label
+ - if:
+ - payloadType: Issue_Comment
+ - isActivitySender:
+ issueAuthor: True
+ - isAction:
+ action: Created
+ - hasLabel:
+ label: needs-author-action
+ - isOpen
+ then:
+ - removeLabel:
+ label: needs-author-action
+ description: Author commenting in PR removes the needs-author-action label
+ - if:
+ - payloadType: Pull_Request_Review
+ - isActivitySender:
+ issueAuthor: True
+ - hasLabel:
+ label: needs-author-action
+ - isAction:
+ action: Submitted
+ - isOpen
+ then:
+ - removeLabel:
+ label: needs-author-action
+ description: Author responding to a pull request review comment removes the needs-author-action label
+ - if:
+ - payloadType: Issues
+ - not:
+ isAction:
+ action: Closed
+ - hasLabel:
+ label: no-recent-activity
+ - not:
+ labelAdded:
+ label: no-recent-activity
+ then:
+ - removeLabel:
+ label: no-recent-activity
+ description: Remove `no-recent-activity` label from issues when issue is modified
+ - if:
+ - payloadType: Issue_Comment
+ - hasLabel:
+ label: no-recent-activity
+ then:
+ - removeLabel:
+ label: no-recent-activity
+ description: Remove `no-recent-activity` label when an issue is commented on
+ - if:
+ - payloadType: Pull_Request
+ - isOpen
+ - hasLabel:
+ label: no-recent-activity
+ - not:
+ labelAdded:
+ label: no-recent-activity
+ then:
+ - removeLabel:
+ label: no-recent-activity
+ description: Remove `no-recent-activity` label from PRs when modified
+ - if:
+ - payloadType: Issue_Comment
+ - hasLabel:
+ label: no-recent-activity
+ - isOpen
+ then:
+ - removeLabel:
+ label: no-recent-activity
+ description: Remove `no-recent-activity` label from PRs when commented on
+ - if:
+ - payloadType: Pull_Request_Review
+ - hasLabel:
+ label: no-recent-activity
+ - isOpen
+ then:
+ - removeLabel:
+ label: no-recent-activity
+ description: Remove `no-recent-activity` label from PRs when new review is added
+onFailure:
+onSuccess:
diff --git a/.github/policies/untriaged.yml b/.github/policies/untriaged.yml
new file mode 100644
index 0000000000..778539e1c5
--- /dev/null
+++ b/.github/policies/untriaged.yml
@@ -0,0 +1,39 @@
+id: untriaged
+name: GitOps.PullRequestIssueManagement
+description: Manage the 'untriaged' label on issues
+owner:
+resource: repository
+disabled: false
+where:
+configuration:
+ resourceManagementConfiguration:
+ eventResponderTasks:
+ - if:
+ - payloadType: Issues
+ - isOpen
+ - not:
+ isPartOfAnyMilestone
+ - or:
+ - isAction:
+ action: Opened
+ - isAction:
+ action: Reopened
+ - not:
+ hasLabel:
+ label: untriaged
+ then:
+ - addLabel:
+ label: untriaged
+ description: Add untriaged label to new/reopened issues without a milestone
+ - if:
+ - payloadType: Issues
+ - or:
+ - isAction:
+ action: Closed
+ - isPartOfAnyMilestone
+ - hasLabel:
+ label: untriaged
+ then:
+ - removeLabel:
+ label: untriaged
+ description: Remove untriaged label from issues when closed or added to a milestone
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
new file mode 100644
index 0000000000..0a866f1c9d
--- /dev/null
+++ b/.github/workflows/backport.yml
@@ -0,0 +1,29 @@
+name: Backport PR to branch
+on:
+ issue_comment:
+ types: [created]
+ schedule:
+ # once a day at 13:00 UTC to cleanup old runs
+ - cron: '0 13 * * *'
+
+permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+ actions: write
+
+jobs:
+ backport:
+ if: ${{ contains(github.event.comment.body, '/backport to') || github.event_name == 'schedule' }}
+ uses: dotnet/arcade/.github/workflows/backport-base.yml@main
+ with:
+ pr_description_template: |
+ Backport of #%source_pr_number% to %target_branch%
+
+ /cc %cc_users%
+
+ ## Customer Impact
+
+ ## Testing
+
+ ## Risk
diff --git a/.github/workflows/locker.yml b/.github/workflows/locker.yml
new file mode 100644
index 0000000000..f3e3b35fde
--- /dev/null
+++ b/.github/workflows/locker.yml
@@ -0,0 +1,36 @@
+name: Locker - Lock stale issues and PRs
+on:
+ schedule:
+ - cron: '0 9 * * *' # Once per day, early morning PT
+
+ workflow_dispatch:
+ # Manual triggering through the GitHub UI, API, or CLI
+ inputs:
+ daysSinceClose:
+ required: true
+ default: "30"
+ daysSinceUpdate:
+ required: true
+ default: "30"
+
+permissions:
+ issues: write
+ pull-requests: write
+
+jobs:
+ main:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Actions
+ uses: actions/checkout@v4
+ with:
+ repository: "microsoft/vscode-github-triage-actions"
+ path: ./actions
+ ref: cd16cd2aad6ba2da74bb6c6f7293adddd579a90e
+ - name: Install Actions
+ run: npm install --production --prefix ./actions
+ - name: Run Locker
+ uses: ./actions/locker
+ with:
+ daysSinceClose: ${{ fromJson(inputs.daysSinceClose || 30) }}
+ daysSinceUpdate: ${{ fromJson(inputs.daysSinceUpdate || 30) }}
diff --git a/.vsts-dotnet-ci.yml b/.vsts-dotnet-ci.yml
index cd05b1802e..a5478913e9 100644
--- a/.vsts-dotnet-ci.yml
+++ b/.vsts-dotnet-ci.yml
@@ -19,7 +19,7 @@ trigger:
resources:
containers:
- container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
@@ -86,7 +86,7 @@ jobs:
pool:
name: NetCore-Public
demands: ImageOverride -equals build.ubuntu.1804.amd64.open
- helixQueue: Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet-helix
+ helixQueue: Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet-helix
- template: /build/ci/job-template.yml
parameters:
@@ -127,7 +127,7 @@ jobs:
innerLoop: true
pool:
vmImage: macOS-12
- helixQueue: OSX.1100.Arm64.Open
+ helixQueue: OSX.1200.Arm64.Open
- template: /build/ci/job-template.yml
parameters:
diff --git a/Directory.Build.props b/Directory.Build.props
index 443f4258f2..8e67d0905c 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -23,6 +23,9 @@
$(TargetArchitecture.ToLower())
$(Platform).$(Configuration)
Open
+
+
+ $(NoWarn);NETSDK1206
@@ -32,8 +35,17 @@
+ $(ArtifactsDir)models/
$(ArtifactsDir)pkgassets/
$(RepoRoot)pkg/
+
+ Microsoft
+ MIT
+ https://dot.net/ml
+ mlnetlogo.png
+ https://aka.ms/mlnetreleasenotes
+
+ ML.NET ML Machine Learning
@@ -54,8 +66,8 @@
true
-
+
true
- snupkg
+
diff --git a/Directory.Build.targets b/Directory.Build.targets
index 0b93f9177a..f310d751cd 100644
--- a/Directory.Build.targets
+++ b/Directory.Build.targets
@@ -1,6 +1,8 @@
+
+
diff --git a/Microsoft.ML.sln b/Microsoft.ML.sln
index 5e81a5f3b0..5763a903b4 100644
--- a/Microsoft.ML.sln
+++ b/Microsoft.ML.sln
@@ -96,6 +96,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Samples.OneDal
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Recommender", "src\Microsoft.ML.Recommender\Microsoft.ML.Recommender.csproj", "{C8E1772B-DFD9-4A4D-830D-6AAB1C668BB3}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DnnImageFeaturizer.ModelRedist", "src\Microsoft.ML.DnnImageFeaturizer.ModelRedist\Microsoft.ML.DnnImageFeaturizer.ModelRedist.csproj", "{39E89702-1A46-4D5B-BA50-530D11309B5E}"
+EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DnnImageFeaturizer.ResNet18", "src\Microsoft.ML.DnnImageFeaturizer.ResNet18\Microsoft.ML.DnnImageFeaturizer.ResNet18.csproj", "{9222FC9D-599A-49A5-B685-08CC9A5C81D7}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DnnImageFeaturizer.AlexNet", "src\Microsoft.ML.DnnImageFeaturizer.AlexNet\Microsoft.ML.DnnImageFeaturizer.AlexNet.csproj", "{6C29AA9B-054B-4762-BEA5-D305B932AA80}"
@@ -170,6 +172,10 @@ Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "Microsoft.ML.FSharp.Tests",
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Data.Analysis.PerformanceTests", "test\Microsoft.Data.Analysis.PerformanceTests\Microsoft.Data.Analysis.PerformanceTests.csproj", "{FB8A8823-CC6C-4C2F-8539-05FBFB7C91CD}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.TorchSharp.Tests", "test\Microsoft.ML.TorchSharp.Tests\Microsoft.ML.TorchSharp.Tests.csproj", "{AB8D68F1-6C3E-41FD-B0EC-A093E009341D}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.TensorFlow.Tests", "test\Microsoft.ML.TensorFlow.Tests\Microsoft.ML.TensorFlow.Tests.csproj", "{763FF013-8309-4680-A769-B54E7BB99612}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -798,6 +804,30 @@ Global
{FB8A8823-CC6C-4C2F-8539-05FBFB7C91CD}.Release|Any CPU.Build.0 = Release|Any CPU
{FB8A8823-CC6C-4C2F-8539-05FBFB7C91CD}.Release|x64.ActiveCfg = Release|Any CPU
{FB8A8823-CC6C-4C2F-8539-05FBFB7C91CD}.Release|x64.Build.0 = Release|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Debug|x64.Build.0 = Debug|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Release|Any CPU.Build.0 = Release|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Release|x64.ActiveCfg = Release|Any CPU
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D}.Release|x64.Build.0 = Release|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Debug|x64.Build.0 = Debug|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Release|Any CPU.Build.0 = Release|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Release|x64.ActiveCfg = Release|Any CPU
+ {763FF013-8309-4680-A769-B54E7BB99612}.Release|x64.Build.0 = Release|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Debug|x64.Build.0 = Debug|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Release|Any CPU.Build.0 = Release|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Release|x64.ActiveCfg = Release|Any CPU
+ {39E89702-1A46-4D5B-BA50-530D11309B5E}.Release|x64.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -881,6 +911,9 @@ Global
{C3D82402-F207-4F19-8C57-5AF0FBAF9682} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{041CB5CD-5832-413E-A894-D9DBED210B16} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{FB8A8823-CC6C-4C2F-8539-05FBFB7C91CD} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
+ {AB8D68F1-6C3E-41FD-B0EC-A093E009341D} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
+ {763FF013-8309-4680-A769-B54E7BB99612} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
+ {39E89702-1A46-4D5B-BA50-530D11309B5E} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {41165AF1-35BB-4832-A189-73060F82B01D}
diff --git a/NuGet.config b/NuGet.config
index eed95518c1..cf1fc2312f 100644
--- a/NuGet.config
+++ b/NuGet.config
@@ -1,4 +1,4 @@
-
+
diff --git a/build/.night-build.yml b/build/.night-build.yml
index 83c7645cee..d17393bc46 100644
--- a/build/.night-build.yml
+++ b/build/.night-build.yml
@@ -22,7 +22,7 @@ schedules:
resources:
containers:
- container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
diff --git a/build/.outer-loop-build.yml b/build/.outer-loop-build.yml
index 27f1b935ab..2c58e6aacd 100644
--- a/build/.outer-loop-build.yml
+++ b/build/.outer-loop-build.yml
@@ -23,7 +23,7 @@ schedules:
resources:
containers:
- container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
diff --git a/build/ci/job-template.yml b/build/ci/job-template.yml
index 66e79a37c8..b8625090ff 100644
--- a/build/ci/job-template.yml
+++ b/build/ci/job-template.yml
@@ -71,7 +71,7 @@ jobs:
displayName: Install MacOS build dependencies
# Extra Apple MacOS step required to install OS-specific dependencies
- ${{ if and(contains(parameters.pool.vmImage, 'macOS'), contains(parameters.name, 'cross')) }}:
- - script: brew update && brew install libomp && brew link libomp --force
+ - script: brew update && brew install -f --overwrite python@3.12 && brew install libomp && brew link libomp --force
displayName: Install MacOS ARM build dependencies
- ${{ if and( eq(parameters.nightlyBuild, 'true'), eq(parameters.pool.vmImage, 'ubuntu-18.04')) }}:
- bash: echo "##vso[task.setvariable variable=LD_LIBRARY_PATH]$(nightlyBuildRunPath):$LD_LIBRARY_PATH"
@@ -121,7 +121,7 @@ jobs:
- ${{ if eq(parameters.nightlyBuild, 'false') }}:
- ${{ if eq(parameters.innerLoop, 'false') }}:
- ${{ if and(eq(parameters.runSpecific, 'false'), eq(parameters.useVSTestTask, 'false')) }}:
- - script: set PATH=%PATH%;%USERPROFILE%\.nuget\packages\libtorch-cpu-win-x64\1.13.0.1\runtimes\win-x64\native;%USERPROFILE%\.nuget\packages\torchsharp\0.99.5\runtimes\win-x64\native & ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
+ - script: set PATH=%PATH%;%USERPROFILE%\.nuget\packages\libtorch-cpu-win-x64\2.1.0.1\runtimes\win-x64\native;%USERPROFILE%\.nuget\packages\torchsharp\0.101.5\runtimes\win-x64\native & ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
displayName: Run All Tests.
- ${{ if and(eq(parameters.runSpecific, 'true'), eq(parameters.useVSTestTask, 'false')) }}:
- script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:TestRunnerAdditionalArguments='-trait$(spaceValue)Category=RunSpecificTest' /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
diff --git a/build/ci/send-to-helix.yml b/build/ci/send-to-helix.yml
index 93f83e1053..d12ddc0d4e 100644
--- a/build/ci/send-to-helix.yml
+++ b/build/ci/send-to-helix.yml
@@ -11,7 +11,6 @@ parameters:
WarnAsError: ''
TestTargetFramework: ''
HelixConfiguration: '' # optional -- additional property attached to a job
- IncludeDotNetCli: true # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
EnableXUnitReporter: true # optional -- true enables XUnit result reporting to Mission Control
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int)
@@ -34,7 +33,6 @@ steps:
/p:HelixBuild=${{ parameters.HelixBuild }}
/p:HelixConfiguration="${{ parameters.HelixConfiguration }}"
/p:HelixAccessToken="${{ parameters.HelixAccessToken }}"
- /p:IncludeDotNetCli=${{ parameters.IncludeDotNetCli }}
/p:EnableXUnitReporter=${{ parameters.EnableXUnitReporter }}
/p:WaitForWorkItemCompletion=${{ parameters.WaitForWorkItemCompletion }}
/p:HelixBaseUri=${{ parameters.HelixBaseUri }}
diff --git a/build/publish.proj b/build/publish.proj
deleted file mode 100644
index 9391afb0ad..0000000000
--- a/build/publish.proj
+++ /dev/null
@@ -1,40 +0,0 @@
-
-
-
-
-
- Microsoft.SymbolUploader.Build.Task
- true
- 600
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 3650
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/build/vsts-ci.yml b/build/vsts-ci.yml
index 9487484d14..7a803b5a91 100644
--- a/build/vsts-ci.yml
+++ b/build/vsts-ci.yml
@@ -2,309 +2,321 @@
# ML.NET's official, signed build
################################################################################
-resources:
- containers:
- - container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
-
- - container: UbuntuCrossArmContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet-cross-arm
-
- - container: UbuntuCrossArm64Container
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet-cross-arm64
-
variables:
- BuildConfig: Release
- _BuildConfig: $(BuildConfig)
- OfficialBuildId: $(BUILD.BUILDNUMBER)
- DOTNET_CLI_TELEMETRY_OPTOUT: 1
- DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
- DOTNET_MULTILEVEL_LOOKUP: 0
- Codeql.Enabled: true
- Codeql.SkipTaskAutoInjection: True #default to not inject CodeQL tasks, we'll enable it in a single job.
-
-stages:
-- stage: build
- displayName: Build
- jobs:
- ################################################################################
- - job: Linux_x64
- ################################################################################
- pool:
- name: NetCore1ESPool-Internal
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
- container: CentosContainer
- steps:
- # Only build native assets to avoid conflicts.
- - script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
- displayName: Build Native Assets
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Linux package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
-
- ################################################################################
- - job: Linux_arm
- ################################################################################
- variables:
- ROOTFS_DIR: '/crossrootfs/arm'
- pool:
- name: NetCore1ESPool-Internal
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
- container: UbuntuCrossArmContainer
- steps:
- # Only build native assets to avoid conflicts.
- - script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=arm /p:CopyPackageAssets=true
- displayName: Build Native Assets
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Linux_arm package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
-
- ################################################################################
- - job: Linux_arm64
- ################################################################################
- variables:
- ROOTFS_DIR: '/crossrootfs/arm64'
- pool:
- name: NetCore1ESPool-Internal
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
- container: UbuntuCrossArm64Container
- steps:
- # Only build native assets to avoid conflicts.
- - script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=arm64 /p:CopyPackageAssets=true
- displayName: Build Native Assets
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Linux_arm64 package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
-
- ################################################################################
- - job: MacOS
- ################################################################################
- pool:
- vmImage: macOS-12
- steps:
- - script: export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 && rm '/usr/local/bin/2to3-3.11' && brew unlink libomp && brew install $(Build.SourcesDirectory)/build/libomp.rb --build-from-source --formula
- displayName: Install build dependencies
- # Only build native assets to avoid conflicts.
- - script: ./build.sh -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
- displayName: Build Native Assets
-
- - task: PublishBuildArtifacts@1
- displayName: Publish macOS package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
-
- ################################################################################
- - job: MacOS_Apple_Silicon
- ################################################################################
- pool:
- vmImage: macOS-12
- steps:
- # Work around MacOS Homebrew image/environment bug: https://github.com/actions/virtual-environments/issues/2322#issuecomment-749211076
- - script: |
- rm -rf /usr/local/bin/2to3
- displayName: MacOS Homebrew bug Workaround
- continueOnError: true
- - script: brew update && brew install libomp && brew link libomp --force
- displayName: Install build dependencies
- # Only build native assets to avoid conflicts.
- - script: ./build.sh -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=arm64 /p:CopyPackageAssets=true
- displayName: Build Native Assets
-
- - task: PublishBuildArtifacts@1
- displayName: Publish macOS_M1 package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
+- name: BuildConfig
+ value: Release
+- name: _BuildConfig
+ value: $(BuildConfig)
+- name: OfficialBuildId
+ value: $(BUILD.BUILDNUMBER)
+- name: DOTNET_CLI_TELEMETRY_OPTOUT
+ value: 1
+- name: DOTNET_SKIP_FIRST_TIME_EXPERIENCE
+ value: 1
+- name: DOTNET_MULTILEVEL_LOOKUP
+ value: 0
+- name: Codeql.Enabled
+ value: true
+- name: Codeql.SkipTaskAutoInjection
+ value: True #default to not inject CodeQL tasks, we'll enable it in a single job.
+- name: LinuxImage
+ value: 1es-ubuntu-2204-pt
+- name: WindowsImage
+ value: 1es-windows-2019-pt
+- name: MacImage
+ value: macOS-12
+- template: /eng/common/templates-official/variables/pool-providers.yml@self
- ################################################################################
- - job: Windows_arm64
- ################################################################################
- pool:
- name: NetCore1ESPool-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- # Only build native assets to avoid conflicts.
- - script: ./build.cmd -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=arm64 /p:CopyPackageAssets=true
- displayName: Build Native Assets
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Windows_arm64 package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
-
- # Terminate all dotnet build processes.
- - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
- displayName: Dotnet Server Shutdown
-
- ################################################################################
- - job: Windows_x86
- ################################################################################
- variables:
- _TeamName: DotNetCore
- pool:
- name: NetCore1ESPool-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- # Only build native assets to avoid conflicts.
- - script: ./build.cmd -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x86 /p:CopyPackageAssets=true
- displayName: Build Native Assets
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Windows_x86 package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
-
- # Terminate all dotnet build processes.
- - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
- displayName: Dotnet Server Shutdown
-
- ################################################################################
- - job: Windows_x64
- ################################################################################
- variables:
- Codeql.SkipTaskAutoInjection: False # run CodeQL in this job
- _TeamName: DotNetCore
+resources:
+ repositories:
+ - repository: 1ESPipelineTemplates
+ type: git
+ name: 1ESPipelineTemplates/1ESPipelineTemplates
+ ref: refs/tags/release
+
+extends:
+ template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
+ parameters:
+ featureFlags:
+ autoBaseline: true
+ sdl:
+ binskim:
+ enabled: true
+ scanOutputDirectoryOnly: true
pool:
- name: NetCore1ESPool-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
-
- # Build both native and managed assets.
- - script: ./build.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
+ name: $(DncEngInternalBuildPool)
+ image: $(WindowsImage)
+ os: windows
+ containers:
+ CentosContainer:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
+ UbuntuCrossArmContainer:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet-cross-arm
+ UbuntuCrossArm64Container:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet-cross-arm64
+ stages:
+ - stage: build
displayName: Build
-
- - task: ComponentGovernanceComponentDetection@0
- inputs:
- scanType: 'Register'
- verbosity: 'Verbose'
- alertWarningLevel: 'High'
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Windows_x64 package assets
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
- artifactName: pkgassets
- artifactType: container
-
- - template: /eng/common/templates/steps/generate-sbom.yml
-
- # Terminate all dotnet build processes.
- - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
- displayName: Dotnet Server Shutdown
-
- ################################################################################
- - template: /eng/common/templates/job/job.yml
- parameters:
- name: Package
- ################################################################################
- artifacts:
- publish:
- artifacts: false
- logs: true
- manifests: true
- enableMicrobuild: true
- enablePublishUsingPipelines: true
- enableSourceIndex: true
- dependsOn:
- - Linux_x64
- - Linux_arm
- - Linux_arm64
- - MacOS
- - Windows_x86
- - Windows_x64
- - MacOS_Apple_Silicon
- variables:
- - DotnetVersionKind: $[variables.VERSIONKIND] # If no "VERSIONKIND" variable is set when queuing the publishing task, this defaults to empty string.
- - NUGET_PACKAGES: $(Build.SourcesDirectory)/.packages
- - _SignType: real
- - _TeamName: DotNetCore
- - _InternalBuildArgs: /p:DotNetSignType=$(_SignType)
- /p:TeamName=$(_TeamName)
- /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
- /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
- /p:OfficialBuildId=$(OfficialBuildId)
- /p:DotNetPublishUsingPipelines=$true
- pool:
- name: NetCore1ESPool-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
-
- # Download all agent packages from all previous phases
- - task: DownloadBuildArtifacts@0
- displayName: Download package assets
- inputs:
- artifactName: pkgassets
- downloadPath: $(Build.SourcesDirectory)/artifacts
-
- # Depending on the value of DotNetFinalVersionKind, the name of the package will change.
- # For our nightly builds we want it to be empty, and when creating the official nugets, we want it to be "release"
- # the value of the version kind is set when queuing the publishing job on AzureDevOps by adding a VERSIONKIND variable
- # See more info in: https://github.com/dotnet/arcade/blob/master/Documentation/CorePackages/Versioning.md#package-version
- - script: ./build.cmd -configuration $(BuildConfig) -pack -sign -publish -ci /p:DotNetFinalVersionKind=$(DotnetVersionKind) $(_InternalBuildArgs)
- displayName: Build Packages
-
- # Terminate all dotnet build processes.
- - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
- displayName: Dotnet Server Shutdown
-
- - template: /eng/common/templates/job/publish-build-assets.yml
- parameters:
- dependsOn: Package
- enablePublishBuildArtifacts: true
- publishAssetsImmediately: true
- publishUsingPipelines: true
- pool:
- name: NetCore1ESPool-Svc-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
-
-- template: /eng/common/templates/post-build/post-build.yml
- parameters:
- publishingInfraVersion: 3
- # This is to enable SDL runs part of Post-Build Validation Stage
- SDLValidationParameters:
- enable: true
- publishGdn: true
- continueOnError: false
- params: ' -SourceToolsList @("policheck","credscan")
- -TsaInstanceURL https://devdiv.visualstudio.com/
- -TsaProjectName DEVDIV
- -TsaNotificationEmail mlnetcore@microsoft.com
- -TsaCodebaseAdmin REDMOND\ericstj
- -TsaBugAreaPath "DevDiv\Net Libraries"
- -TsaIterationPath DevDiv
- -TsaRepositoryName machinelearning
- -TsaCodebaseName machinelearning
- -TsaPublish $True'
\ No newline at end of file
+ jobs:
+ ################################################################################
+ - job: Linux_x64
+ ################################################################################
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(LinuxImage)
+ os: linux
+ container: CentosContainer
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+ # Only build native assets to avoid conflicts.
+ - script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
+ displayName: Build Native Assets
+
+ ################################################################################
+ - job: Linux_arm
+ ################################################################################
+ variables:
+ ROOTFS_DIR: '/crossrootfs/arm'
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(LinuxImage)
+ os: linux
+ container: UbuntuCrossArmContainer
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+ # Only build native assets to avoid conflicts.
+ - script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=arm /p:CopyPackageAssets=true
+ displayName: Build Native Assets
+
+ ################################################################################
+ - job: Linux_arm64
+ ################################################################################
+ variables:
+ ROOTFS_DIR: '/crossrootfs/arm64'
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(LinuxImage)
+ os: linux
+ container: UbuntuCrossArm64Container
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+ # Only build native assets to avoid conflicts.
+ - script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=arm64 /p:CopyPackageAssets=true
+ displayName: Build Native Assets
+
+ ################################################################################
+ - job: MacOS
+ ################################################################################
+ pool:
+ name: Azure Pipelines
+ vmImage: $(MacImage)
+ os: macOS
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+ - script: export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 && rm '/usr/local/bin/2to3-3.11' && brew unlink libomp && brew install $(Build.SourcesDirectory)/build/libomp.rb --build-from-source --formula
+ displayName: Install build dependencies
+ # Only build native assets to avoid conflicts.
+ - script: ./build.sh -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
+ displayName: Build Native Assets
+
+ ################################################################################
+ - job: MacOS_Apple_Silicon
+ ################################################################################
+ pool:
+ name: Azure Pipelines
+ vmImage: $(MacImage)
+ os: macOS
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+ # Work around MacOS Homebrew image/environment bug: https://github.com/actions/virtual-environments/issues/2322#issuecomment-749211076
+ - script: |
+ rm -rf /usr/local/bin/2to3
+ displayName: MacOS Homebrew bug Workaround
+ continueOnError: true
+ - script: brew update && brew install -f --overwrite python@3.12 && brew install libomp && brew link libomp --force
+ displayName: Install build dependencies
+ # Only build native assets to avoid conflicts.
+ - script: ./build.sh -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=arm64 /p:CopyPackageAssets=true
+ displayName: Build Native Assets
+
+ ################################################################################
+ - job: Windows_arm64
+ ################################################################################
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(WindowsImage)
+ os: windows
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+ # Only build native assets to avoid conflicts.
+ - script: ./build.cmd -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=arm64 /p:CopyPackageAssets=true
+ displayName: Build Native Assets
+
+ # Terminate all dotnet build processes.
+ - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
+ displayName: Dotnet Server Shutdown
+
+ ################################################################################
+ - job: Windows_x86
+ ################################################################################
+ variables:
+ _TeamName: DotNetCore
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(WindowsImage)
+ os: windows
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+ # Only build native assets to avoid conflicts.
+ - script: ./build.cmd -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x86 /p:CopyPackageAssets=true
+ displayName: Build Native Assets
+
+ # Terminate all dotnet build processes.
+ - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
+ displayName: Dotnet Server Shutdown
+
+ ################################################################################
+ - job: Windows_x64
+ ################################################################################
+ variables:
+ Codeql.SkipTaskAutoInjection: False # run CodeQL in this job
+ _TeamName: DotNetCore
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(WindowsImage)
+ os: windows
+ templateContext:
+ outputs:
+ - output: buildArtifacts
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
+ ArtifactName: pkgassets
+ steps:
+
+ # Build both native and managed assets.
+ - script: ./build.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
+ displayName: Build
+
+ - task: ComponentGovernanceComponentDetection@0
+ inputs:
+ scanType: 'Register'
+ verbosity: 'Verbose'
+ alertWarningLevel: 'High'
+
+ # Terminate all dotnet build processes.
+ - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
+ displayName: Dotnet Server Shutdown
+
+ ################################################################################
+ - template: /eng/common/templates-official/job/job.yml@self
+ parameters:
+ name: Package
+ ################################################################################
+ #disable log upload for now, https://github.com/dotnet/dnceng/issues/2133
+ #artifacts:
+ # publish:
+ # artifacts: false
+ # logs: true
+ # manifests: true
+ enableMicrobuild: true
+ enablePublishUsingPipelines: true
+ enableSourceIndex: true
+ dependsOn:
+ - Linux_x64
+ - Linux_arm
+ - Linux_arm64
+ - MacOS
+ - Windows_x86
+ - Windows_x64
+ - MacOS_Apple_Silicon
+ variables:
+ - DotnetVersionKind: $[variables.VERSIONKIND] # If no "VERSIONKIND" variable is set when queuing the publishing task, this defaults to empty string.
+ - NUGET_PACKAGES: $(Build.SourcesDirectory)/.packages
+ - _SignType: real
+ - _TeamName: DotNetCore
+ - _InternalBuildArgs: /p:DotNetSignType=$(_SignType)
+ /p:TeamName=$(_TeamName)
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:OfficialBuildId=$(OfficialBuildId)
+ /p:DotNetPublishUsingPipelines=$true
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(WindowsImage)
+ os: windows
+ steps:
+
+ # Download all agent packages from all previous phases
+ - task: DownloadBuildArtifacts@0
+ displayName: Download package assets
+ inputs:
+ artifactName: pkgassets
+ downloadPath: $(Build.SourcesDirectory)/artifacts
+
+ # Depending on the value of DotNetFinalVersionKind, the name of the package will change.
+ # For our nightly builds we want it to be empty, and when creating the official nugets, we want it to be "release"
+ # the value of the version kind is set when queuing the publishing job on AzureDevOps by adding a VERSIONKIND variable
+ # See more info in: https://github.com/dotnet/arcade/blob/master/Documentation/CorePackages/Versioning.md#package-version
+ - script: ./build.cmd -configuration $(BuildConfig) -pack -sign -publish -ci /p:DotNetFinalVersionKind=$(DotnetVersionKind) $(_InternalBuildArgs)
+ displayName: Build Packages
+
+ # Terminate all dotnet build processes.
+ - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
+ displayName: Dotnet Server Shutdown
+
+ - template: /eng/common/templates-official/job/publish-build-assets.yml@self
+ parameters:
+ dependsOn: Package
+ enablePublishBuildArtifacts: true
+ publishAssetsImmediately: true
+ publishUsingPipelines: true
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: $(WindowsImage)
+ os: windows
+
+ - template: /eng/common/templates-official/post-build/post-build.yml@self
+ parameters:
+ publishingInfraVersion: 3
+ # This is to enable SDL runs part of Post-Build Validation Stage
+ SDLValidationParameters:
+ enable: true
+ publishGdn: true
+ continueOnError: false
+ params: ' -SourceToolsList @("policheck","credscan")
+ -TsaInstanceURL https://devdiv.visualstudio.com/
+ -TsaProjectName DEVDIV
+ -TsaNotificationEmail mlnetcore@microsoft.com
+ -TsaCodebaseAdmin REDMOND\ericstj
+ -TsaBugAreaPath "DevDiv\Net Libraries"
+ -TsaIterationPath DevDiv
+ -TsaRepositoryName machinelearning
+ -TsaCodebaseName machinelearning
+ -TsaPublish $True'
diff --git a/docs/project-docs/developer-guide.md b/docs/project-docs/developer-guide.md
index e3fca18138..0ddd9f9b46 100644
--- a/docs/project-docs/developer-guide.md
+++ b/docs/project-docs/developer-guide.md
@@ -102,8 +102,8 @@ These steps are demonstrated in this demonstrative [commit](https://github.com/d
During development, there may also arise a need to debug hanging tests. In this scenario, it can be beneficial to collect the memory dump while a given test is hanging.
-In this case, the given needs needs to be implemented according to the Microsoft test framework. Please check out the [Microsoft test framework walkthrough](https://docs.microsoft.com/en-us/visualstudio/test/walkthrough-creating-and-running-unit-tests-for-managed-code?view=vs-2019) and the VSTest [sample](https://github.com/dotnet/samples/tree/master/core/getting-started/unit-testing-using-mstest) demonstrating the "TestClass", "TestMethod", "DataTestMethod", and "DataRow" attributes.
+In this case, the given needs to be implemented according to the Microsoft test framework. Please check out the [Microsoft test framework walkthrough](https://docs.microsoft.com/en-us/visualstudio/test/walkthrough-creating-and-running-unit-tests-for-managed-code?view=vs-2019) and the VSTest [sample](https://github.com/dotnet/samples/tree/master/core/getting-started/unit-testing-using-mstest) demonstrating the "TestClass", "TestMethod", "DataTestMethod", and "DataRow" attributes.
-Once the unit test(s) are implemented according to VSTest and ready to be debugged, the `useVSTestTask` parameter in `build\ci\job-template.yml` needs to be set to `True`. Once these steps are completed and pushed in your pull request, the unit test(s) will run and produce a full memory dump. At the end of a run, the memory dump `.dmp` file will be availible for downloading and inspection in the published artifacts of the build, in the folder `TestResults`.
+Once the unit test(s) are implemented according to VSTest and ready to be debugged, the `useVSTestTask` parameter in `build\ci\job-template.yml` needs to be set to `True`. Once these steps are completed and pushed in your pull request, the unit test(s) will run and produce a full memory dump. At the end of a run, the memory dump `.dmp` file will be available for downloading and inspection in the published artifacts of the build, in the folder `TestResults`.
-Note: this is only supported on Windows builds, as [ProcDump](https://docs.microsoft.com/en-us/sysinternals/downloads/procdump) is officially only available on Windows.
\ No newline at end of file
+Note: this is only supported on Windows builds, as [ProcDump](https://docs.microsoft.com/en-us/sysinternals/downloads/procdump) is officially only available on Windows.
diff --git a/docs/release-notes/3.0.1/release-3.0.1.md b/docs/release-notes/3.0.1/release-3.0.1.md
new file mode 100644
index 0000000000..2511c03176
--- /dev/null
+++ b/docs/release-notes/3.0.1/release-3.0.1.md
@@ -0,0 +1,38 @@
+# [ML.NET](http://dot.net/ml) 3.0.1
+
+## **New Features**
+- **Add support for Apache.Arrow.Types.TimestampType to DataFrame** ([#6871](https://github.com/dotnet/machinelearning/pull/6871)) - Thanks @asmirnov82!
+
+
+## **Enhancements**
+- **Update TorchSharp to latest version** ([#6954](https://github.com/dotnet/machinelearning/pull/6954))
+- **Reorganize dataframe files** ([#6872](https://github.com/dotnet/machinelearning/pull/6872)) - Thanks @asmirnov82!
+- **Add sample variance and standard deviation to NormalizeMeanVariance** ([#6885](https://github.com/dotnet/machinelearning/pull/6885)) - Thanks @tearlant!
+- **Fixes NER to correctly expand/shrink the labels** ([#6928](https://github.com/dotnet/machinelearning/pull/6928))
+
+
+## **Bug Fixes**
+- **Fix SearchSpace reference not being included** ([#6951](https://github.com/dotnet/machinelearning/pull/6951))
+- **Rename NameEntity to NamedEntity** ([#6917](https://github.com/dotnet/machinelearning/pull/6917))
+- **Fix assert by only accessing idx** ([#6924](https://github.com/dotnet/machinelearning/pull/6924))
+
+
+## **Build / Test updates**
+- **Add Backport github workflow** ([#6944](https://github.com/dotnet/machinelearning/pull/6944))
+- **Branding for 3.0.1** ([#6943](https://github.com/dotnet/machinelearning/pull/6943))
+- **Only use semi-colons for NoWarn - fixes build break** ([#6935](https://github.com/dotnet/machinelearning/pull/6935))
+- **Update dependencies from dotnet/arcade** ([#6703](https://github.com/dotnet/machinelearning/pull/6703))
+- **Update dependencies from dotnet/arcade** ([#6957](https://github.com/dotnet/machinelearning/pull/6957))
+- **Migrate to the 'locker' GitHub action for locking closed/stale issues/PRs** ([#6896](https://github.com/dotnet/machinelearning/pull/6896))
+- **Make double assertions compare with tolerance instead of precision** ([#6923](https://github.com/dotnet/machinelearning/pull/6923))
+- **Don't include the SDK in our helix payload** ([#6918](https://github.com/dotnet/machinelearning/pull/6918))
+
+
+## **Documentation Updates**
+- **Updated ml.net versioning** ([#6907](https://github.com/dotnet/machinelearning/pull/6907))
+- **Update developer-guide.md** ([#6870](https://github.com/dotnet/machinelearning/pull/6870)) - Thanks @computerscienceiscool!
+- **Update release-3.0.0.md** ([#6895](https://github.com/dotnet/machinelearning/pull/6895)) - Thanks @taeerhebend!
+
+
+## **Breaking changes**
+- **Rename NameEntity to NamedEntity** ([#6917](https://github.com/dotnet/machinelearning/pull/6917))
diff --git a/docs/release-notes/3.0/release-3.0.0.md b/docs/release-notes/3.0/release-3.0.0.md
index 594f5d409d..6ee9e04da2 100644
--- a/docs/release-notes/3.0/release-3.0.0.md
+++ b/docs/release-notes/3.0/release-3.0.0.md
@@ -1,7 +1,7 @@
# [ML.NET](http://dot.net/ml) 3.0.0
## **New Features**
-- **Add the ability to use Object Detection using TorchSharp** ([#6605](https://github.com/dotnet/machinelearning/pull/6605)) - We have added a new deep learning model back by TorchSharp that lets you fine tune your own Object Detection model!
+- **Add the ability to use Object Detection using TorchSharp** ([#6605](https://github.com/dotnet/machinelearning/pull/6605)) - We have added a new deep learning model backed by TorchSharp that lets you fine tune your own Object Detection model!
- **Add SamplingKeyColumnName to AutoMLExperiment API** ([#6649](https://github.com/dotnet/machinelearning/pull/6649)) - You can now set the SamplingKeyColumnName when you are using AutoML. Thanks @torronen!
- **Add Object Detection to AutoML Sweeper** ([#6633](https://github.com/dotnet/machinelearning/pull/6633)) - Added Object Detection to the AutoML Sweeper so now they can be used together.
- **Add String Vector support to DataFrame** ([#6628](https://github.com/dotnet/machinelearning/pull/6628)) - Adds support for String Vectors in DataFrame. This also allows for Better IDataView <-> DataFrame conversions.
@@ -132,4 +132,4 @@
- **Fix docs for DataViewRowCursor** ([#6855](https://github.com/dotnet/machinelearning/pull/6855)) - Thanks @Akash190104
## **Breaking changes**
-- None
\ No newline at end of file
+- None
diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj b/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj
index e753ee59f6..628cbe5293 100644
--- a/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj
+++ b/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj
@@ -23,4 +23,5 @@
+
diff --git a/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj b/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj
index 8b40f06dae..4c1cb229e4 100644
--- a/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj
+++ b/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj
@@ -23,6 +23,7 @@
+
@@ -54,14 +55,14 @@
-
+
DnnImageModels\ResNet18Onnx\ResNet18.onnx
PreserveNewest
-
+
DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx
PreserveNewest
diff --git a/docs/samples/Microsoft.ML.Samples.OneDal/Microsoft.ML.Samples.OneDal.csproj b/docs/samples/Microsoft.ML.Samples.OneDal/Microsoft.ML.Samples.OneDal.csproj
index 527354dab4..feeb4e6f23 100755
--- a/docs/samples/Microsoft.ML.Samples.OneDal/Microsoft.ML.Samples.OneDal.csproj
+++ b/docs/samples/Microsoft.ML.Samples.OneDal/Microsoft.ML.Samples.OneDal.csproj
@@ -48,7 +48,7 @@
-
+
diff --git a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj
index 3fba560fd5..8e168ad456 100644
--- a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj
+++ b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj
@@ -981,14 +981,14 @@
-
+
DnnImageModels\ResNet18Onnx\ResNet18.onnx
PreserveNewest
-
+
DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx
PreserveNewest
diff --git a/eng/BranchInfo.props b/eng/BranchInfo.props
index 19dab3e959..1d2155b657 100644
--- a/eng/BranchInfo.props
+++ b/eng/BranchInfo.props
@@ -28,15 +28,15 @@
true
- 3
+ 4
0
0
- 2.0.0-preview.22551.1
+ 3.0.0
0
- 21
+ 22
0
diff --git a/eng/Build.props b/eng/Build.props
index de6a651461..ca19b06276 100644
--- a/eng/Build.props
+++ b/eng/Build.props
@@ -9,7 +9,6 @@
-
diff --git a/eng/Packaging.targets b/eng/Packaging.targets
new file mode 100644
index 0000000000..52ac5a667d
--- /dev/null
+++ b/eng/Packaging.targets
@@ -0,0 +1,47 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ $(TargetsForTfmSpecificContentInPackage);IncludeAdditionalFilesInPackage;
+
+
+
+
+
+
+ <_projectReferenceCopyLocalPaths Include="@(ReferenceCopyLocalPaths->WithMetadataValue('ReferenceSourceTarget', 'ProjectReference')->WithMetadataValue('Pack', 'true'))" />
+
+
+
+
+
+
+
+ <_runtimeFiles Include="$(PackageAssetsPath)$(PackageId)\runtimes\**\*%(NativeAssemblyReference.Identity)*" />
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/eng/Signing.props b/eng/Signing.props
index 91b6ee12dd..976ae1ba79 100644
--- a/eng/Signing.props
+++ b/eng/Signing.props
@@ -1,9 +1,4 @@
-
-
-
-
-
true
diff --git a/eng/Tools.props b/eng/Tools.props
deleted file mode 100644
index a0890081f2..0000000000
--- a/eng/Tools.props
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 0ae3fc4712..9cd9862862 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -7,65 +7,33 @@
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 87d89025bdd8827c016e4083660d31f497670e5c
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 87d89025bdd8827c016e4083660d31f497670e5c
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 87d89025bdd8827c016e4083660d31f497670e5c
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 87d89025bdd8827c016e4083660d31f497670e5c
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 87d89025bdd8827c016e4083660d31f497670e5c
-
- https://github.com/dotnet/arcade-services
- cd705029f2675970b42f9273ae359d0926c5e815
-
-
- https://github.com/dotnet/arcade-services
- cd705029f2675970b42f9273ae359d0926c5e815
-
-
- https://github.com/dotnet/xharness
- 89cb4b1d368e0f15b4df8e02a176dd1f1c33958b
-
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
-
-
- https://github.com/dotnet/roslyn
- d57cda76c2b76cff75487a085d289cfadd99150b
-
-
- https://github.com/dotnet/sourcelink
- 8a3edd1902dbfe3adba65f22e3bb7aa2cc73e97f
+ 87d89025bdd8827c016e4083660d31f497670e5c
-
- https://github.com/dotnet/sourcelink
- 8a3edd1902dbfe3adba65f22e3bb7aa2cc73e97f
-
-
- https://github.com/dotnet/symreader-converter
- c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0
-
-
- https://github.com/dotnet/symreader-converter
- c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0
-
-
- https://github.com/dotnet/xliff-tasks
- 397ff033b467003d51619f9ac3928e02a4d4178f
+
+ https://github.com/dotnet/arcade
+ 87d89025bdd8827c016e4083660d31f497670e5c
diff --git a/eng/Versions.props b/eng/Versions.props
index 74b5adf15b..510e8f46f0 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -9,14 +9,14 @@
https://github.com/dotnet/arcade/blob/c788ffa83b088cafe9dbffc1cbc8155ba88b2553/Documentation/CorePackages/Versioning.md#output
-->
true
- 3.0.0
+ 4.0.0
preview
1.0.0.0
6.0.0
2.1.0
6.0.0
- 6.7.0
+ 6.9.1
2.88.6
4.5.1
4.5.0
@@ -39,7 +39,7 @@
3.3.0
3.9.0
1.0.0-beta.23509.3
- 1.14.0
+ 1.16.3
0.0.0.12
1.12.4
3.1.2
3.1.2
3.3.1
4.5.0
- 1.1.0-beta-20206-02
- 1.1.225302
4.3.6
1.2.0
5.4.7
0.13.1
- 6.0.9
- 8.0.0
+ 6.0.26
+ 8.0.1
5.10.2
1.1.2-beta1.23431.1
- 8.0.0-beta.23265.1
+ 9.0.0-beta.24151.5
2.1.0
3.0.1
0.0.6-test
0.0.13-test
0.0.6-test
0.0.7-test
- 4.8.5
+ 4.8.6
1.0.118
1.2.7
- 2.4.2
false
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index 6e99723945..6c65e81925 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -153,7 +153,7 @@ if ($dotnet31Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
-$dotnetVersions = @('5','6','7')
+$dotnetVersions = @('5','6','7','8')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index 8af7d899db..d387c7eac9 100755
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -105,7 +105,7 @@ if [ "$?" == "0" ]; then
PackageSources+=('dotnet3.1-internal-transport')
fi
-DotNetVersions=('5' '6' '7')
+DotNetVersions=('5' '6' '7' '8')
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
diff --git a/eng/common/build.cmd b/eng/common/build.cmd
new file mode 100644
index 0000000000..99daf368ab
--- /dev/null
+++ b/eng/common/build.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*"
+exit /b %ErrorLevel%
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 33a6f2d0e2..510458eb35 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -19,6 +19,8 @@ Param(
[switch] $pack,
[switch] $publish,
[switch] $clean,
+ [switch] $verticalBuild,
+ [switch][Alias('pb')]$productBuild,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
@@ -58,6 +60,8 @@ function Print-Usage() {
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
+ Write-Host " -verticalBuild Run in 'vertical build' infra mode."
+ Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
Write-Host ""
Write-Host "Advanced settings:"
@@ -120,6 +124,8 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
+ /p:DotNetBuildRepo=$($productBuild -or $verticalBuild) `
+ /p:ArcadeBuildVertical=$verticalBuild `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
diff --git a/eng/common/build.sh b/eng/common/build.sh
index 50af40cdd2..bec7d02594 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -22,6 +22,9 @@ usage()
echo " --sourceBuild Source-build the solution (short: -sb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
+ echo " --productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
+ echo " Will additionally trigger the following actions: --restore, --build, --pack"
+ echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution (short: -t)"
echo " --integrationTest Run all integration tests in the solution"
@@ -59,6 +62,8 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
+vertical_build=false
+product_build=false
rebuild=false
test=false
integration_test=false
@@ -105,7 +110,7 @@ while [[ $# > 0 ]]; do
-binarylog|-bl)
binary_log=true
;;
- -excludeCIBinarylog|-nobl)
+ -excludecibinarylog|-nobl)
exclude_ci_binary_log=true
;;
-pipelineslog|-pl)
@@ -126,6 +131,20 @@ while [[ $# > 0 ]]; do
-sourcebuild|-sb)
build=true
source_build=true
+ product_build=true
+ restore=true
+ pack=true
+ ;;
+ -productBuild|-pb)
+ build=true
+ product_build=true
+ restore=true
+ pack=true
+ ;;
+ -verticalbuild|-vb)
+ build=true
+ vertical_build=true
+ product_build=true
restore=true
pack=true
;;
@@ -219,7 +238,10 @@ function Build {
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
+ /p:DotNetBuildRepo=$product_build \
/p:ArcadeBuildFromSource=$source_build \
+ /p:DotNetBuildSourceOnly=$source_build \
+ /p:ArcadeBuildVertical=$vertical_build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index 9caf9b021d..99a9dd6bb6 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -8,7 +8,7 @@ usage()
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
- echo " for FreeBSD can be: freebsd12, freebsd13"
+ echo " for FreeBSD can be: freebsd13, freebsd14"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
@@ -71,9 +71,9 @@ __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
-__FreeBSDBase="12.4-RELEASE"
+__FreeBSDBase="13.2-RELEASE"
__FreeBSDPkg="1.17.0"
-__FreeBSDABI="12"
+__FreeBSDABI="13"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
@@ -182,12 +182,12 @@ while :; do
__AlpinePackages="${__AlpinePackages// lldb-dev/}"
__QEMUArch=riscv64
__UbuntuArch=riscv64
- __UbuntuRepo="http://deb.debian.org/debian-ports"
+ __UbuntuRepo="http://deb.debian.org/debian"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
- if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
- __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
+ if [[ -e "/usr/share/keyrings/debian-archive-keyring.gpg" ]]; then
+ __Keyring="--keyring /usr/share/keyrings/debian-archive-keyring.gpg --include=debian-archive-keyring"
fi
;;
ppc64le)
@@ -334,14 +334,14 @@ while :; do
__AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
fi
;;
- freebsd12)
+ freebsd13)
__CodeName=freebsd
__SkipUnmount=1
;;
- freebsd13)
+ freebsd14)
__CodeName=freebsd
- __FreeBSDBase="13.2-RELEASE"
- __FreeBSDABI="13"
+ __FreeBSDBase="14.0-RELEASE"
+ __FreeBSDABI="14"
__SkipUnmount=1
;;
illumos)
@@ -487,7 +487,7 @@ if [[ "$__CodeName" == "alpine" ]]; then
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
- search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
+ search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
diff --git a/eng/common/cross/riscv64/sources.list.sid b/eng/common/cross/riscv64/sources.list.sid
index 65f730d224..b5f7a7e6e1 100644
--- a/eng/common/cross/riscv64/sources.list.sid
+++ b/eng/common/cross/riscv64/sources.list.sid
@@ -1 +1 @@
-deb http://deb.debian.org/debian-ports sid main
+deb http://deb.debian.org/debian sid main
diff --git a/eng/common/cross/riscv64/tizen/tizen.patch b/eng/common/cross/riscv64/tizen/tizen.patch
new file mode 100644
index 0000000000..eb6d1c0747
--- /dev/null
+++ b/eng/common/cross/riscv64/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf64-littleriscv)
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) )
diff --git a/eng/common/cross/tizen-build-rootfs.sh b/eng/common/cross/tizen-build-rootfs.sh
index ac84173d44..ba31c93285 100644
--- a/eng/common/cross/tizen-build-rootfs.sh
+++ b/eng/common/cross/tizen-build-rootfs.sh
@@ -22,6 +22,10 @@ case "$ARCH" in
TIZEN_ARCH="x86_64"
LINK_ARCH="x86"
;;
+ riscv64)
+ TIZEN_ARCH="riscv64"
+ LINK_ARCH="riscv"
+ ;;
*)
echo "Unsupported architecture for tizen: $ARCH"
exit 1
@@ -58,4 +62,21 @@ rm -rf $TIZEN_TMP_DIR
echo ">>Start configuring Tizen rootfs"
ln -sfn asm-${LINK_ARCH} ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+if [[ "$TIZEN_ARCH" == "riscv64" ]]; then
+ echo "Fixing broken symlinks in $PWD"
+ rm ./usr/lib64/libresolv.so
+ ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so
+ rm ./usr/lib64/libpthread.so
+ ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so
+ rm ./usr/lib64/libdl.so
+ ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so
+ rm ./usr/lib64/libutil.so
+ ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so
+ rm ./usr/lib64/libm.so
+ ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so
+ rm ./usr/lib64/librt.so
+ ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so
+ rm ./lib/ld-linux-riscv64-lp64d.so.1
+ ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1
+fi
echo "< 0 ]]; do
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
index abd045a324..7e69e3a9e2 100755
--- a/eng/common/dotnet-install.sh
+++ b/eng/common/dotnet-install.sh
@@ -54,6 +54,10 @@ cpuname=$(uname -m)
case $cpuname in
arm64|aarch64)
buildarch=arm64
+ if [ "$(getconf LONG_BIT)" -lt 64 ]; then
+ # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
+ buildarch=arm
+ fi
;;
loongarch64)
buildarch=loongarch64
diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj
index d7f185856e..c1323bf412 100644
--- a/eng/common/helixpublish.proj
+++ b/eng/common/helixpublish.proj
@@ -1,3 +1,4 @@
+
diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props
index dbf99d82a5..a735fe9a13 100644
--- a/eng/common/internal/Directory.Build.props
+++ b/eng/common/internal/Directory.Build.props
@@ -1,4 +1,6 @@
+
+
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
index 7f5ce6d608..8fa77e5b18 100644
--- a/eng/common/internal/Tools.csproj
+++ b/eng/common/internal/Tools.csproj
@@ -1,5 +1,6 @@
+
net472
false
@@ -27,4 +28,5 @@
+
diff --git a/eng/common/loc/P22DotNetHtmlLocalization.lss b/eng/common/loc/P22DotNetHtmlLocalization.lss
index 6661fed566..5d892d6193 100644
Binary files a/eng/common/loc/P22DotNetHtmlLocalization.lss and b/eng/common/loc/P22DotNetHtmlLocalization.lss differ
diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh
index 517401b688..f5c1ec7eaf 100644
--- a/eng/common/native/init-compiler.sh
+++ b/eng/common/native/init-compiler.sh
@@ -63,7 +63,7 @@ if [ -z "$CLR_CC" ]; then
# Set default versions
if [ -z "$majorVersion" ]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
- if [ "$compiler" = "clang" ]; then versions="16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
+ if [ "$compiler" = "clang" ]; then versions="17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi
for version in $versions; do
diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh
new file mode 100644
index 0000000000..5dcbfd700f
--- /dev/null
+++ b/eng/common/native/init-distro-rid.sh
@@ -0,0 +1,126 @@
+#!/bin/sh
+
+# getNonPortableDistroRid
+#
+# Input:
+# targetOs: (str)
+# targetArch: (str)
+# rootfsDir: (str)
+#
+# Return:
+# non-portable rid
+getNonPortableDistroRid()
+{
+ targetOs="$1"
+ targetArch="$2"
+ rootfsDir="$3"
+ nonPortableRid=""
+
+ if [ "$targetOs" = "linux" ]; then
+ # shellcheck disable=SC1091
+ if [ -e "${rootfsDir}/etc/os-release" ]; then
+ . "${rootfsDir}/etc/os-release"
+ if [ "${ID}" = "rhel" ] || [ "${ID}" = "rocky" ] || [ "${ID}" = "alpine" ]; then
+ VERSION_ID="${VERSION_ID%.*}" # Remove the last version digit for these distros
+ fi
+
+ if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
+ nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
+ else
+ # Rolling release distros either do not set VERSION_ID, set it as blank or
+ # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux);
+ # so omit it here to be consistent with everything else.
+ nonPortableRid="${ID}-${targetArch}"
+ fi
+ elif [ -e "${rootfsDir}/android_platform" ]; then
+ # shellcheck disable=SC1091
+ . "${rootfsDir}/android_platform"
+ nonPortableRid="$RID"
+ fi
+ fi
+
+ if [ "$targetOs" = "freebsd" ]; then
+ # $rootfsDir can be empty. freebsd-version is a shell script and should always work.
+ __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
+ nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
+ elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
+ __android_sdk_version=$(getprop ro.build.version.sdk)
+ nonPortableRid="android.$__android_sdk_version-${targetArch}"
+ elif [ "$targetOs" = "illumos" ]; then
+ __uname_version=$(uname -v)
+ case "$__uname_version" in
+ omnios-*)
+ __omnios_major_version=$(echo "$__uname_version" | cut -c9-10)
+ nonPortableRid="omnios.$__omnios_major_version-${targetArch}"
+ ;;
+ joyent_*)
+ __smartos_major_version=$(echo "$__uname_version" | cut -c9-10)
+ nonPortableRid="smartos.$__smartos_major_version-${targetArch}"
+ ;;
+ *)
+ nonPortableRid="illumos-${targetArch}"
+ ;;
+ esac
+ elif [ "$targetOs" = "solaris" ]; then
+ __uname_version=$(uname -v)
+ __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
+ nonPortableRid="solaris.$__solaris_major_version-${targetArch}"
+ elif [ "$targetOs" = "haiku" ]; then
+ __uname_release="$(uname -r)"
+ nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
+ fi
+
+ echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]'
+}
+
+# initDistroRidGlobal
+#
+# Input:
+# os: (str)
+# arch: (str)
+# rootfsDir?: (nullable:string)
+#
+# Return:
+# None
+#
+# Notes:
+# It is important to note that the function does not return anything, but it
+# exports the following variables on success:
+# __DistroRid : Non-portable rid of the target platform.
+# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
+initDistroRidGlobal()
+{
+ targetOs="$1"
+ targetArch="$2"
+ rootfsDir=""
+ if [ $# -ge 3 ]; then
+ rootfsDir="$3"
+ fi
+
+ if [ -n "${rootfsDir}" ]; then
+ # We may have a cross build. Check for the existence of the rootfsDir
+ if [ ! -e "${rootfsDir}" ]; then
+ echo "Error: rootfsDir has been passed, but the location is not valid."
+ exit 1
+ fi
+ fi
+
+ __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}")
+
+ if [ -z "${__PortableTargetOS:-}" ]; then
+ __PortableTargetOS="$targetOs"
+
+ STRINGS="$(command -v strings || true)"
+ if [ -z "$STRINGS" ]; then
+ STRINGS="$(command -v llvm-strings || true)"
+ fi
+
+ # Check for musl-based distros (e.g. Alpine Linux, Void Linux).
+ if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
+ ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
+ __PortableTargetOS="linux-musl"
+ fi
+ fi
+
+ export __DistroRid __PortableTargetOS
+}
diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh
new file mode 100644
index 0000000000..38921d4338
--- /dev/null
+++ b/eng/common/native/init-os-and-arch.sh
@@ -0,0 +1,85 @@
+#!/bin/sh
+
+# Use uname to determine what the OS is.
+OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
+
+if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
+ OSName="android"
+fi
+
+case "$OSName" in
+freebsd|linux|netbsd|openbsd|sunos|android|haiku)
+ os="$OSName" ;;
+darwin)
+ os=osx ;;
+*)
+ echo "Unsupported OS $OSName detected!"
+ exit 1 ;;
+esac
+
+# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html
+# and `uname -p` returns processor type (e.g. i386 on amd64).
+# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html.
+if [ "$os" = "sunos" ]; then
+ if uname -o 2>&1 | grep -q illumos; then
+ os="illumos"
+ else
+ os="solaris"
+ fi
+ CPUName=$(isainfo -n)
+else
+ # For the rest of the operating systems, use uname(1) to determine what the CPU is.
+ CPUName=$(uname -m)
+fi
+
+case "$CPUName" in
+ arm64|aarch64)
+ arch=arm64
+ if [ "$(getconf LONG_BIT)" -lt 64 ]; then
+ # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
+ arch=arm
+ fi
+ ;;
+
+ loongarch64)
+ arch=loongarch64
+ ;;
+
+ riscv64)
+ arch=riscv64
+ ;;
+
+ amd64|x86_64)
+ arch=x64
+ ;;
+
+ armv7l|armv8l)
+ # shellcheck disable=SC1091
+ if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
+ arch=armel
+ else
+ arch=arm
+ fi
+ ;;
+
+ armv6l)
+ arch=armv6
+ ;;
+
+ i[3-6]86)
+ echo "Unsupported CPU $CPUName detected, build might not succeed!"
+ arch=x86
+ ;;
+
+ s390x)
+ arch=s390x
+ ;;
+
+ ppc64le)
+ arch=ppc64le
+ ;;
+ *)
+ echo "Unknown CPU $CPUName detected!"
+ exit 1
+ ;;
+esac
diff --git a/eng/common/post-build/add-build-to-channel.ps1 b/eng/common/post-build/add-build-to-channel.ps1
index de2d957922..49938f0c89 100644
--- a/eng/common/post-build/add-build-to-channel.ps1
+++ b/eng/common/post-build/add-build-to-channel.ps1
@@ -2,7 +2,7 @@ param(
[Parameter(Mandatory=$true)][int] $BuildId,
[Parameter(Mandatory=$true)][int] $ChannelId,
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com/',
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net/',
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
)
diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1
index 63f3464c98..1728f035a9 100644
--- a/eng/common/post-build/check-channel-consistency.ps1
+++ b/eng/common/post-build/check-channel-consistency.ps1
@@ -7,7 +7,7 @@ try {
. $PSScriptRoot\post-build-utils.ps1
if ($PromoteToChannels -eq "") {
- Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
+ Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
ExitWithExitCode 0
}
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
index 8508397d77..5a3a32ea8d 100644
--- a/eng/common/post-build/publish-using-darc.ps1
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -3,7 +3,7 @@ param(
[Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
[Parameter(Mandatory=$true)][string] $AzdoToken,
[Parameter(Mandatory=$true)][string] $MaestroToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com/',
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net/',
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
[Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
@@ -12,7 +12,7 @@ param(
try {
. $PSScriptRoot\post-build-utils.ps1
- $darc = Get-Darc
+ $darc = Get-Darc
$optionalParams = [System.Collections.ArrayList]::new()
@@ -46,7 +46,7 @@ try {
}
Write-Host 'done.'
-}
+}
catch {
Write-Host $_
Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1
new file mode 100644
index 0000000000..82d91f6fd0
--- /dev/null
+++ b/eng/common/post-build/redact-logs.ps1
@@ -0,0 +1,81 @@
+[CmdletBinding(PositionalBinding=$False)]
+param(
+ [Parameter(Mandatory=$true, Position=0)][string] $InputPath,
+ [Parameter(Mandatory=$true)][string] $BinlogToolVersion,
+ [Parameter(Mandatory=$false)][string] $DotnetPath,
+ [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json',
+ # File with strings to redact - separated by newlines.
+ # For comments start the line with '# ' - such lines are ignored
+ [Parameter(Mandatory=$false)][string] $TokensFilePath,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ $packageName = 'binlogtool'
+
+ $dotnet = $DotnetPath
+
+ if (!$dotnet) {
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ }
+
+ $toolList = & "$dotnet" tool list -g
+
+ if ($toolList -like "*$packageName*") {
+ & "$dotnet" tool uninstall $packageName -g
+ }
+
+ $toolPath = "$PSScriptRoot\..\..\..\.tools"
+ $verbosity = 'minimal'
+
+ New-Item -ItemType Directory -Force -Path $toolPath
+
+ Push-Location -Path $toolPath
+
+ try {
+ Write-Host "Installing Binlog redactor CLI..."
+ Write-Host "'$dotnet' new tool-manifest"
+ & "$dotnet" new tool-manifest
+ Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
+ & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
+
+ if (Test-Path $TokensFilePath) {
+ Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
+ $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " }
+ }
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ Foreach ($p in $TokensToRedact)
+ {
+ if($p -match '^\$\(.*\)$')
+ {
+ Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p)
+ }
+ elseif($p)
+ {
+ $optionalParams.Add("-p:" + $p) | Out-Null
+ }
+ }
+
+ & $dotnet binlogtool redact --input:$InputPath --recurse --in-place `
+ @optionalParams
+
+ if ($LastExitCode -ne 0) {
+ Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now."
+ }
+ }
+ finally {
+ Pop-Location
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_"
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
index 55dea518ac..ac9a95778f 100644
--- a/eng/common/post-build/trigger-subscriptions.ps1
+++ b/eng/common/post-build/trigger-subscriptions.ps1
@@ -2,7 +2,7 @@ param(
[Parameter(Mandatory=$true)][string] $SourceRepo,
[Parameter(Mandatory=$true)][int] $ChannelId,
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com/',
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net/',
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
)
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index e10a596879..091023970f 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -64,7 +64,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.4.1" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.5" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1
index bdbf49e6c7..27f5a4115f 100644
--- a/eng/common/sdl/configure-sdl-tool.ps1
+++ b/eng/common/sdl/configure-sdl-tool.ps1
@@ -17,7 +17,9 @@ Param(
# Optional: Additional params to add to any tool using PoliCheck.
[string[]] $PoliCheckAdditionalRunConfigParams,
# Optional: Additional params to add to any tool using CodeQL/Semmle.
- [string[]] $CodeQLAdditionalRunConfigParams
+ [string[]] $CodeQLAdditionalRunConfigParams,
+ # Optional: Additional params to add to any tool using Binskim.
+ [string[]] $BinskimAdditionalRunConfigParams
)
$ErrorActionPreference = 'Stop'
@@ -69,22 +71,34 @@ try {
$gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
# For some tools, add default and automatic args.
- if ($tool.Name -eq 'credscan') {
- if ($targetDirectory) {
- $tool.Args += "`"TargetDirectory < $TargetDirectory`""
+ switch -Exact ($tool.Name) {
+ 'credscan' {
+ if ($targetDirectory) {
+ $tool.Args += "`"TargetDirectory < $TargetDirectory`""
+ }
+ $tool.Args += "`"OutputType < pre`""
+ $tool.Args += $CrScanAdditionalRunConfigParams
}
- $tool.Args += "`"OutputType < pre`""
- $tool.Args += $CrScanAdditionalRunConfigParams
- } elseif ($tool.Name -eq 'policheck') {
- if ($targetDirectory) {
- $tool.Args += "`"Target < $TargetDirectory`""
+ 'policheck' {
+ if ($targetDirectory) {
+ $tool.Args += "`"Target < $TargetDirectory`""
+ }
+ $tool.Args += $PoliCheckAdditionalRunConfigParams
}
- $tool.Args += $PoliCheckAdditionalRunConfigParams
- } elseif ($tool.Name -eq 'semmle' -or $tool.Name -eq 'codeql') {
- if ($targetDirectory) {
- $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
+ {$_ -in 'semmle', 'codeql'} {
+ if ($targetDirectory) {
+ $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
+ }
+ $tool.Args += $CodeQLAdditionalRunConfigParams
+ }
+ 'binskim' {
+ if ($targetDirectory) {
+ # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924.
+ # We are excluding all `_.pdb` files from the scan.
+ $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`""
+ }
+ $tool.Args += $BinskimAdditionalRunConfigParams
}
- $tool.Args += $CodeQLAdditionalRunConfigParams
}
# Create variable pointing to the args array directly so we can use splat syntax later.
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
index 4797e012c7..4715d75e97 100644
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -35,6 +35,7 @@ Param(
[string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
[string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
[string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
+ [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1")
[bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
)
@@ -107,7 +108,8 @@ try {
-GuardianLoggerLevel $GuardianLoggerLevel `
-CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
-PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
- -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams
+ -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams `
+ -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams
if ($BreakOnFailure) {
Exit-IfNZEC "Sdl"
}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
index 7f28d9c59e..f031ed5b25 100644
--- a/eng/common/sdl/extract-artifact-packages.ps1
+++ b/eng/common/sdl/extract-artifact-packages.ps1
@@ -35,31 +35,33 @@ try {
param(
[string] $PackagePath # Full path to a NuGet package
)
-
+
if (!(Test-Path $PackagePath)) {
Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
ExitWithExitCode 1
}
-
+
$RelevantExtensions = @('.dll', '.exe', '.pdb')
Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
-
+
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
-
+
Add-Type -AssemblyName System.IO.Compression.FileSystem
-
+
[System.IO.Directory]::CreateDirectory($ExtractPath);
-
+
try {
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
$zip.Entries |
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
ForEach-Object {
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
-
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+ $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName)
+ [System.IO.Directory]::CreateDirectory($TargetPath);
+
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile)
}
}
catch {
diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1
new file mode 100644
index 0000000000..0daa2a9e94
--- /dev/null
+++ b/eng/common/sdl/trim-assets-version.ps1
@@ -0,0 +1,75 @@
+<#
+.SYNOPSIS
+Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name.
+
+.PARAMETER InputPath
+Full path to directory where artifact packages are stored
+
+.PARAMETER Recursive
+Search for NuGet packages recursively
+
+#>
+
+Param(
+ [string] $InputPath,
+ [bool] $Recursive = $true
+)
+
+$CliToolName = "Microsoft.DotNet.VersionTools.Cli"
+
+function Install-VersionTools-Cli {
+ param(
+ [Parameter(Mandatory=$true)][string]$Version
+ )
+
+ Write-Host "Installing the package '$CliToolName' with a version of '$version' ..."
+ $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
+
+ $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed")
+ Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
+}
+
+# -------------------------------------------------------------------
+
+if (!(Test-Path $InputPath)) {
+ Write-Host "Input Path '$InputPath' does not exist"
+ ExitWithExitCode 1
+}
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+$disableConfigureToolsetImport = $true
+$global:LASTEXITCODE = 0
+
+# `tools.ps1` checks $ci to perform some actions. Since the SDL
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+. $PSScriptRoot\..\tools.ps1
+
+try {
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+
+ $toolsetVersion = Read-ArcadeSdkVersion
+ Install-VersionTools-Cli -Version $toolsetVersion
+
+ $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName})
+ if ($null -eq $cliToolFound) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed."
+ ExitWithExitCode 1
+ }
+
+ Exec-BlockVerbosely {
+ & "$dotnet" $CliToolName trim-assets-version `
+ --assets-path $InputPath `
+ --recursive $Recursive
+ Exit-IfNZEC "Sdl"
+ }
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
new file mode 100644
index 0000000000..352607308f
--- /dev/null
+++ b/eng/common/templates-official/job/job.yml
@@ -0,0 +1,255 @@
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ enableBuildRetry: false
+ disableComponentGovernance: ''
+ componentGovernanceIgnoreDirectories: ''
+ mergeTestResults: false
+ testRunTitle: ''
+ testResultsFormat: ''
+ name: ''
+ preSteps: []
+ runAsPublic: false
+# Sbom related params
+ enableSbom: true
+ PackageVersion: 7.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle template variable syntax
+ # example:
+ # - template: path/to/template.yml
+ # parameters:
+ # [key]: [value]
+ - ${{ if ne(variable.template, '') }}:
+ - template: ${{ variable.template }}
+ ${{ if ne(variable.parameters, '') }}:
+ parameters: ${{ variable.parameters }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@3
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
+ - task: NuGetAuthenticate@1
+
+ - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
+ continueOnError: true
+
+ - template: /eng/common/templates-official/steps/component-governance.yml
+ parameters:
+ ${{ if eq(parameters.disableComponentGovernance, '') }}:
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
+ disableComponentGovernance: false
+ ${{ else }}:
+ disableComponentGovernance: true
+ ${{ else }}:
+ disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
+ componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish pipeline artifacts
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - publish: artifacts/log
+ artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: Publish logs
+ continueOnError: true
+ condition: always()
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - template: /eng/common/templates-official/steps/generate-sbom.yml
+ parameters:
+ PackageVersion: ${{ parameters.packageVersion}}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration
+ artifact: BuildConfiguration
+ displayName: Publish build retry configuration
+ continueOnError: true
diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml
new file mode 100644
index 0000000000..ba9ba49303
--- /dev/null
+++ b/eng/common/templates-official/job/onelocbuild.yml
@@ -0,0 +1,112 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: ''
+
+ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
+ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
+
+ SourcesDirectory: $(Build.SourcesDirectory)
+ CreatePr: true
+ AutoCompletePr: false
+ ReusePr: true
+ UseLfLineEndings: true
+ UseCheckedInLocProjectJson: false
+ SkipLocProjectJsonGeneration: false
+ LanguageSet: VS_Main_Languages
+ LclSource: lclFilesInRepo
+ LclPackageId: ''
+ RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
+ JobNameSuffix: ''
+
+jobs:
+- job: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ variables:
+ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
+ - name: _GenerateLocProjectArguments
+ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
+ -LanguageSet "${{ parameters.LanguageSet }}"
+ -CreateNeutralXlfs
+ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
+ - name: _GenerateLocProjectArguments
+ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022-pt
+ os: windows
+
+ steps:
+ - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
+ - task: Powershell@2
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
+ arguments: $(_GenerateLocProjectArguments)
+ displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
+
+ - task: OneLocBuild@2
+ displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ inputs:
+ locProj: eng/Localize/LocProject.json
+ outDir: $(Build.ArtifactStagingDirectory)
+ lclSource: ${{ parameters.LclSource }}
+ lclPackageId: ${{ parameters.LclPackageId }}
+ isCreatePrSelected: ${{ parameters.CreatePr }}
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ packageSourceAuth: patAuth
+ patVariable: ${{ parameters.CeapexPat }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish Localization Files
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
+ PublishLocation: Container
+ ArtifactName: Loc
+ condition: ${{ parameters.condition }}
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish LocProject.json
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
+ PublishLocation: Container
+ ArtifactName: Loc
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml
new file mode 100644
index 0000000000..5f54135569
--- /dev/null
+++ b/eng/common/templates-official/job/publish-build-assets.yml
@@ -0,0 +1,157 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishAssetsImmediately: false
+
+ artifactsPublishingAdditionalParameters: ''
+
+ signingValidationAdditionalParameters: ''
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+ timeoutInMinutes: 150
+
+ ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ displayName: Publish Assets
+ ${{ else }}:
+ displayName: Publish to Build Asset Registry
+
+ variables:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: Publish-Build-Assets
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: runCodesignValidationInjection
+ value: false
+ # unconditional - needed for logs publishing (redactor tool version)
+ - template: /eng/common/templates-official/post-build/common-variables.yml
+
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022-pt
+ os: windows
+ steps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - checkout: self
+ fetchDepth: 3
+ clean: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro.dot.net
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish ReleaseConfigs Artifact
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - task: powershell@2
+ displayName: Check if SymbolPublishingExclusionsFile.txt exists
+ inputs:
+ targetType: inline
+ script: |
+ $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if(Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
+ }
+ else{
+ Write-Host "Symbols Exclusion file does not exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
+ }
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish SymbolPublishingExclusionsFile Artifact
+ condition: eq(variables['SymbolExclusionFile'], 'true')
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/templates-official/steps/publish-logs.yml
+ parameters:
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml
new file mode 100644
index 0000000000..50f04e642a
--- /dev/null
+++ b/eng/common/templates-official/job/source-build.yml
@@ -0,0 +1,67 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+
+ ${{ if eq(parameters.platform.pool, '') }}:
+ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
+ # source-build builds run in Docker, including the default managed platform.
+ # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals build.ubuntu.1804.amd64
+
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ image: 1es-mariner-2-pt
+ os: linux
+
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - template: /eng/common/templates-official/steps/source-build.yml
+ parameters:
+ platform: ${{ parameters.platform }}
diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml
new file mode 100644
index 0000000000..757af7c7c4
--- /dev/null
+++ b/eng/common/templates-official/job/source-index-stage1.yml
@@ -0,0 +1,67 @@
+parameters:
+ runAsPublic: false
+ sourceIndexPackageVersion: 1.0.1-20231213.4
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
+ preSteps: []
+ binlogPath: artifacts/log/Debug/Build.binlog
+ condition: ''
+ dependsOn: ''
+ pool: ''
+
+jobs:
+- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
+ variables:
+ - name: SourceIndexPackageVersion
+ value: ${{ parameters.sourceIndexPackageVersion }}
+ - name: SourceIndexPackageSource
+ value: ${{ parameters.sourceIndexPackageSource }}
+ - name: BinlogPath
+ value: ${{ parameters.binlogPath }}
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: source-dot-net stage1 variables
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $(DncEngPublicBuildPool)
+ image: windows.vs2022.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2022.amd64
+
+ steps:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - task: UseDotNet@2
+ displayName: Use .NET 8 SDK
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: Download Tools
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: ${{ parameters.sourceIndexBuildCommand }}
+ displayName: Build Repository
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: Process Binlog into indexable sln
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
+ displayName: Upload stage1 artifacts to source index
+ env:
+ BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml
new file mode 100644
index 0000000000..b68d3c2f31
--- /dev/null
+++ b/eng/common/templates-official/jobs/codeql-build.yml
@@ -0,0 +1,31 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+
+jobs:
+- template: /eng/common/templates-official/jobs/jobs.yml
+ parameters:
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishTestResults: false
+ enablePublishBuildAssets: false
+ enablePublishUsingPipelines: false
+ enableTelemetry: true
+
+ variables:
+ - group: Publish-Build-Assets
+ # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+ # sync with the packages.config file.
+ - name: DefaultGuardianVersion
+ value: 0.109.0
+ - name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+
+ jobs: ${{ parameters.jobs }}
+
diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml
new file mode 100644
index 0000000000..857a0f8ba4
--- /dev/null
+++ b/eng/common/templates-official/jobs/jobs.yml
@@ -0,0 +1,97 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ # Optional: Enable running the source-build jobs to build repo from source
+ enableSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/templates-official/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
+ publishAssetsImmediately: false
+
+ # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ enableSourceIndex: false
+ sourceIndexParams: {}
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - template: ../job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/templates-official/jobs/source-build.yml
+ parameters:
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
+ - template: ../job/source-index-stage1.yml
+ parameters:
+ runAsPublic: ${{ parameters.runAsPublic }}
+ ${{ each parameter in parameters.sourceIndexParams }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - Source_Build_Complete
+
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml
new file mode 100644
index 0000000000..08e5db9bb1
--- /dev/null
+++ b/eng/common/templates-official/jobs/source-build.yml
@@ -0,0 +1,46 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # The name of the "join" job for all source-build platforms. If set to empty string, the job is
+ # not included. Existing repo pipelines can use this job depend on all source-build jobs
+ # completing without maintaining a separate list of every single job ID: just depend on this one
+ # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
+ allCompletedJobId: ''
+
+ # See /eng/common/templates-official/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
+ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+jobs:
+
+- ${{ if ne(parameters.allCompletedJobId, '') }}:
+ - job: ${{ parameters.allCompletedJobId }}
+ displayName: Source-Build Complete
+ pool: server
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+ - ${{ if eq(length(parameters.platforms), 0) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/templates-official/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+
+- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/templates-official/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml
new file mode 100644
index 0000000000..b9ede10bf0
--- /dev/null
+++ b/eng/common/templates-official/post-build/common-variables.yml
@@ -0,0 +1,24 @@
+variables:
+ - group: Publish-Build-Assets
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro.dot.net/"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+ - name: BinlogToolVersion
+ value: 1.0.11
+
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml
new file mode 100644
index 0000000000..5c98fe1c0f
--- /dev/null
+++ b/eng/common/templates-official/post-build/post-build.yml
@@ -0,0 +1,285 @@
+parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V1 is no longer supported
+ # Publishing V2 is no longer supported
+ # Publishing V3 is the default
+ - name: publishingInfraVersion
+ displayName: Which version of publishing should be used to promote the build definition?
+ type: number
+ default: 3
+ values:
+ - 3
+
+ - name: BARBuildId
+ displayName: BAR Build Id
+ type: number
+ default: 0
+
+ - name: PromoteToChannelIds
+ displayName: Channel to promote BARBuildId to
+ type: string
+ default: ''
+
+ - name: enableSourceLinkValidation
+ displayName: Enable SourceLink validation
+ type: boolean
+ default: false
+
+ - name: enableSigningValidation
+ displayName: Enable signing validation
+ type: boolean
+ default: true
+
+ - name: enableSymbolValidation
+ displayName: Enable symbol validation
+ type: boolean
+ default: false
+
+ - name: enableNugetValidation
+ displayName: Enable NuGet validation
+ type: boolean
+ default: true
+
+ - name: publishInstallersAndChecksums
+ displayName: Publish installers and checksums
+ type: boolean
+ default: true
+
+ - name: SDLValidationParameters
+ type: object
+ default:
+ enable: false
+ publishGdn: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+ downloadArtifacts: true
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ - name: symbolPublishingAdditionalParameters
+ displayName: Symbol publishing additional parameters
+ type: string
+ default: ''
+
+ - name: artifactsPublishingAdditionalParameters
+ displayName: Artifact publishing additional parameters
+ type: string
+ default: ''
+
+ - name: signingValidationAdditionalParameters
+ displayName: Signing validation additional parameters
+ type: string
+ default: ''
+
+ # Which stages should finish execution before post-build stages start
+ - name: validateDependsOn
+ type: object
+ default:
+ - build
+
+ - name: publishDependsOn
+ type: object
+ default:
+ - Validate
+
+ # Optional: Call asset publishing rather than running in a separate stage
+ - name: publishAssetsImmediately
+ type: boolean
+ default: false
+
+stages:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ - stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: common-variables.yml
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ jobs:
+ - job:
+ displayName: NuGet Validation
+ condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022-pt
+ os: windows
+
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - job:
+ displayName: Signing Validation
+ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022-pt
+ os: windows
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ itemPattern: |
+ **
+ !**/Microsoft.SourceBuild.Intermediate.*.nupkg
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@1
+ displayName: 'Authenticate to AzDO Feeds'
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: ../steps/publish-logs.yml
+ parameters:
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
+
+ - job:
+ displayName: SourceLink Validation
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022-pt
+ os: windows
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
+ - stage: publish_using_darc
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ dependsOn: ${{ parameters.publishDependsOn }}
+ ${{ else }}:
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: common-variables.yml
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ jobs:
+ - job:
+ displayName: Publish Using Darc
+ timeoutInMinutes: 120
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022-pt
+ os: windows
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml
new file mode 100644
index 0000000000..0c87f149a4
--- /dev/null
+++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml
@@ -0,0 +1,70 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+
+steps:
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ targetType: inline
+ pwsh: true
+ script: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
+
+ $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $apiHeaders.Add('Accept', 'application/json')
+ $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
+
+ $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+
+ $BarId = $Env:BARBuildId
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml
new file mode 100644
index 0000000000..da669030da
--- /dev/null
+++ b/eng/common/templates-official/post-build/trigger-subscription.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml
new file mode 100644
index 0000000000..f67a210d62
--- /dev/null
+++ b/eng/common/templates-official/steps/add-build-to-channel.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml
new file mode 100644
index 0000000000..0ecec47b0c
--- /dev/null
+++ b/eng/common/templates-official/steps/component-governance.yml
@@ -0,0 +1,13 @@
+parameters:
+ disableComponentGovernance: false
+ componentGovernanceIgnoreDirectories: ''
+
+steps:
+- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
+ - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ displayName: Set skipComponentGovernanceDetection variable
+- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
+ - task: ComponentGovernanceComponentDetection@0
+ continueOnError: true
+ inputs:
+ ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml
new file mode 100644
index 0000000000..488b560e8b
--- /dev/null
+++ b/eng/common/templates-official/steps/generate-sbom.yml
@@ -0,0 +1,48 @@
+# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
+# PackageName - The name of the package this SBOM represents.
+# PackageVersion - The version of the package this SBOM represents.
+# ManifestDirPath - The path of the directory where the generated manifest files will be placed
+# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
+
+parameters:
+ PackageVersion: 7.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ PackageName: '.NET'
+ ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
+ IgnoreDirectories: ''
+ sbomContinueOnError: true
+
+steps:
+- task: PowerShell@2
+ displayName: Prep for SBOM generation in (Non-linux)
+ condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
+ inputs:
+ filePath: ./eng/common/generate-sbom-prep.ps1
+ arguments: ${{parameters.manifestDirPath}}
+
+# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
+- script: |
+ chmod +x ./eng/common/generate-sbom-prep.sh
+ ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
+ displayName: Prep for SBOM generation in (Linux)
+ condition: eq(variables['Agent.Os'], 'Linux')
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+
+- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
+ displayName: 'Generate SBOM manifest'
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+ inputs:
+ PackageName: ${{ parameters.packageName }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ PackageVersion: ${{ parameters.packageVersion }}
+ ManifestDirPath: ${{ parameters.manifestDirPath }}
+ ${{ if ne(parameters.IgnoreDirectories, '') }}:
+ AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
+
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: Publish SBOM manifest
+ continueOnError: ${{parameters.sbomContinueOnError}}
+ inputs:
+ targetPath: '${{parameters.manifestDirPath}}'
+ artifactName: $(ARTIFACT_NAME)
+
diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml
new file mode 100644
index 0000000000..84b2f559c5
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-logs.yml
@@ -0,0 +1,49 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+ CustomSensitiveDataList: ''
+ # A default - in case value from eng/common/templates-official/post-build/common-variables.yml is not passed
+ BinlogToolVersion: '1.0.11'
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PowerShell@2
+ displayName: Redact Logs
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
+ # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
+ # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # If the file exists - sensitive data for redaction will be sourced from it
+ # (single entry per line, lines starting with '# ' are considered comments and skipped)
+ arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
+ -BinlogToolVersion ${{parameters.BinlogToolVersion}}
+ -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ '$(publishing-dnceng-devdiv-code-r-build-re)'
+ '$(MaestroAccessToken)'
+ '$(dn-bot-all-orgs-artifact-feeds-rw)'
+ '$(akams-client-id)'
+ '$(akams-client-secret)'
+ '$(microsoft-symbol-server-pat)'
+ '$(symweb-symbol-server-pat)'
+ '$(dn-bot-all-orgs-build-rw-code-rw)'
+ ${{parameters.CustomSensitiveDataList}}
+ continueOnError: true
+ condition: always()
+
+- task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
+ PublishLocation: Container
+ ArtifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml
new file mode 100644
index 0000000000..83d97a26a0
--- /dev/null
+++ b/eng/common/templates-official/steps/retain-build.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Optional azure devops PAT with build execute permissions for the build's organization,
+ # only needed if the build that should be retained ran on a different organization than
+ # the pipeline where this template is executing from
+ Token: ''
+ # Optional BuildId to retain, defaults to the current running build
+ BuildId: ''
+ # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
+ # Defaults to the organization the current pipeline is running on
+ AzdoOrgUri: '$(System.CollectionUri)'
+ # Azure devops project for the build. Defaults to the project the current pipeline is running on
+ AzdoProject: '$(System.TeamProject)'
+
+steps:
+ - task: powershell@2
+ inputs:
+ targetType: 'filePath'
+ filePath: eng/common/retain-build.ps1
+ pwsh: true
+ arguments: >
+ -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
+ -AzdoProject ${{parameters.AzdoProject}}
+ -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
+ -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
+ displayName: Enable permanent build retention
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ BUILD_ID: $(Build.BuildId)
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml
new file mode 100644
index 0000000000..3eb7e2d5f8
--- /dev/null
+++ b/eng/common/templates-official/steps/send-to-helix.yml
@@ -0,0 +1,91 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml
new file mode 100644
index 0000000000..b1db70842f
--- /dev/null
+++ b/eng/common/templates-official/steps/source-build.yml
@@ -0,0 +1,129 @@
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
+ # In that case, call the feed setup script to add internal feeds corresponding to public ones.
+ # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
+ # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
+ # changes.
+ internalRestoreArgs=
+ if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
+ # Temporarily work around https://github.com/dotnet/arcade/issues/7709
+ chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+ $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
+ internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
+
+ # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
+ # This only works if there is a username/email configured, which won't be the case in most CI runs.
+ git config --get user.email
+ if [ $? -ne 0 ]; then
+ git config user.email dn-bot@microsoft.com
+ git config user.name dn-bot
+ fi
+ fi
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ fi
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ officialBuildArgs=
+ if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
+ officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ runtimeOsArgs=
+ if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
+ runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
+ fi
+
+ baseOsArgs=
+ if [ '${{ parameters.platform.baseOS }}' != '' ]; then
+ baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
+ fi
+
+ publishArgs=
+ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
+ publishArgs='--publish'
+ fi
+
+ assetManifestFileName=SourceBuild_RidSpecific.xml
+ if [ '${{ parameters.platform.name }}' != '' ]; then
+ assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack $publishArgs -bl \
+ $officialBuildArgs \
+ $internalRuntimeDownloadArgs \
+ $internalRestoreArgs \
+ $targetRidArgs \
+ $runtimeOsArgs \
+ $baseOsArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+ /p:ArcadeBuildFromSource=true \
+ /p:AssetManifestFileName=$assetManifestFileName
+ displayName: Build
+
+# Upload build logs for diagnosis.
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ artifacts/sb/prebuilt-report/**
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: Publish BuildLogs
+ inputs:
+ targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
+
+# Manually inject component detection so that we can ignore the source build upstream cache, which contains
+# a nupkg cache of input packages (a local feed).
+# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
+# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
+- task: ComponentGovernanceComponentDetection@0
+ displayName: Component Detection (Exclude upstream cache)
+ inputs:
+ ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml
new file mode 100644
index 0000000000..beab7d1bfb
--- /dev/null
+++ b/eng/common/templates-official/variables/pool-providers.yml
@@ -0,0 +1,45 @@
+# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
+# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
+
+# Motivation:
+# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
+# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
+# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
+# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
+# team needs to move resources around and create new and potentially differently-named pools. Using this template
+# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
+
+# How to use:
+# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
+# If we find alternate naming conventions in broad usage it can be added to the condition below.
+#
+# First, import the template in an arcade-ified repo to pick up the variables, e.g.:
+#
+# variables:
+# - template: /eng/common/templates-official/variables/pool-providers.yml
+#
+# ... then anywhere specifying the pool provider use the runtime variables,
+# $(DncEngInternalBuildPool)
+#
+# pool:
+# name: $(DncEngInternalBuildPool)
+# image: 1es-windows-2022-pt
+
+variables:
+ # Coalesce the target and source branches so we know when a PR targets a release branch
+ # If these variables are somehow missing, fall back to main (tends to have more capacity)
+
+ # Any new -Svc alternative pools should have variables added here to allow for splitting work
+
+ - name: DncEngInternalBuildPool
+ value: $[
+ replace(
+ replace(
+ eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
+ True,
+ 'NetCore1ESPool-Svc-Internal'
+ ),
+ False,
+ 'NetCore1ESPool-Internal'
+ )
+ ]
\ No newline at end of file
diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml
new file mode 100644
index 0000000000..dbdd66d4a4
--- /dev/null
+++ b/eng/common/templates-official/variables/sdl-variables.yml
@@ -0,0 +1,7 @@
+variables:
+# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+# sync with the packages.config file.
+- name: DefaultGuardianVersion
+ value: 0.109.0
+- name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
index 7aabaa1801..7870f93bc1 100644
--- a/eng/common/templates/job/execute-sdl.yml
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -105,6 +105,11 @@ jobs:
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
+ - powershell: eng/common/sdl/trim-assets-version.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts
+ displayName: Trim the version from the NuGet packages
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 44ad26abf5..01c0dd995e 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -75,6 +75,10 @@ jobs:
- ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- name: EnableRichCodeNavigation
value: 'true'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
@@ -83,7 +87,7 @@ jobs:
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
value: ${{ variable.value }}
-
+
# handle variable groups
- ${{ if ne(variable.group, '') }}:
- group: ${{ variable.group }}
@@ -132,7 +136,7 @@ jobs:
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@0
+ - task: NuGetAuthenticate@1
- ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- task: DownloadPipelineArtifact@2
@@ -150,7 +154,7 @@ jobs:
displayName: RichCodeNav Upload
inputs:
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
continueOnError: true
@@ -169,7 +173,7 @@ jobs:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
+ displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
env:
@@ -219,7 +223,7 @@ jobs:
displayName: Publish XUnit Test Results
inputs:
testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
+ testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }}
@@ -230,7 +234,7 @@ jobs:
displayName: Publish TRX Test Results
inputs:
testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
+ testResultsFiles: '*.trx'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }}
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index 42017109f3..bb42240f86 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -48,8 +48,8 @@ jobs:
- group: AzureDevOps-Artifact-Feeds-Pats
- name: runCodesignValidationInjection
value: false
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/common-variables.yml
+ # unconditional - needed for logs publishing (redactor tool version)
+ - template: /eng/common/templates/post-build/common-variables.yml
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
@@ -58,11 +58,15 @@ jobs:
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
+ name: NetCore1ESPool-Publishing-Internal
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - checkout: self
+ fetchDepth: 3
+ clean: true
+
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
@@ -71,8 +75,8 @@ jobs:
checkDownloadedFiles: true
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@0
+
+ - task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Build Assets
@@ -81,12 +85,12 @@ jobs:
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:MaestroApiEndpoint=https://maestro.dot.net
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
-
+
- task: powershell@2
displayName: Create ReleaseConfigs Artifact
inputs:
@@ -95,7 +99,7 @@ jobs:
Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
-
+
- task: PublishBuildArtifacts@1
displayName: Publish ReleaseConfigs Artifact
inputs:
@@ -121,7 +125,7 @@ jobs:
- task: PublishBuildArtifacts@1
displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
+ condition: eq(variables['SymbolExclusionFile'], 'true')
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
PublishLocation: Container
@@ -137,7 +141,7 @@ jobs:
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
+ arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion 3
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
@@ -148,4 +152,4 @@ jobs:
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/templates/steps/publish-logs.yml
parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
index b98202aa02..b5a3e5c4a6 100644
--- a/eng/common/templates/job/source-index-stage1.yml
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -1,6 +1,6 @@
parameters:
runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20230228.2
+ sourceIndexPackageVersion: 1.0.1-20240129.2
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
@@ -30,20 +30,20 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64.open
+ demands: ImageOverride -equals windows.vs2022.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
+ demands: ImageOverride -equals windows.vs2022.amd64
steps:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- task: UseDotNet@2
- displayName: Use .NET Core SDK 6
+ displayName: Use .NET 8 SDK
inputs:
packageType: sdk
- version: 6.0.x
+ version: 8.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
index c24193acfc..b9ede10bf0 100644
--- a/eng/common/templates/post-build/common-variables.yml
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -7,7 +7,7 @@ variables:
# Default Maestro++ API Endpoint and API Version
- name: MaestroApiEndPoint
- value: "https://maestro-prod.westus2.cloudapp.azure.com/"
+ value: "https://maestro.dot.net/"
- name: MaestroApiAccessToken
value: $(MaestroAccessToken)
- name: MaestroApiVersion
@@ -17,6 +17,8 @@ variables:
value: 3.0.0
- name: SymbolToolVersion
value: 1.0.1
+ - name: BinlogToolVersion
+ value: 1.0.11
- name: runCodesignValidationInjection
value: false
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
index ef720f9d78..ee70e2b399 100644
--- a/eng/common/templates/post-build/post-build.yml
+++ b/eng/common/templates/post-build/post-build.yml
@@ -39,7 +39,7 @@ parameters:
displayName: Enable NuGet validation
type: boolean
default: true
-
+
- name: publishInstallersAndChecksums
displayName: Publish installers and checksums
type: boolean
@@ -131,8 +131,8 @@ stages:
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
- job:
displayName: Signing Validation
@@ -169,7 +169,7 @@ stages:
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
# otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@0
+ - task: NuGetAuthenticate@1
displayName: 'Authenticate to AzDO Feeds'
# Signing validation will optionally work with the buildmanifest file which is downloaded from
@@ -187,6 +187,7 @@ stages:
parameters:
StageLabel: 'Validation'
JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
- job:
displayName: SourceLink Validation
@@ -221,9 +222,9 @@ stages:
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
-GHCommit $(Build.SourceVersion)
-SourcelinkCliVersion $(SourceLinkCLIVersion)
continueOnError: true
@@ -258,7 +259,7 @@ stages:
demands: Cmd
# If it's not devdiv, it's dnceng
${{ else }}:
- name: $(DncEngInternalBuildPool)
+ name: NetCore1ESPool-Publishing-Internal
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
@@ -266,13 +267,13 @@ stages:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- - task: NuGetAuthenticate@0
+ - task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
+ arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml
index 9dd5709f66..07426fde05 100644
--- a/eng/common/templates/steps/execute-sdl.yml
+++ b/eng/common/templates/steps/execute-sdl.yml
@@ -33,7 +33,7 @@ steps:
- ${{ if ne(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL
+ displayName: Execute SDL (Overridden)
continueOnError: ${{ parameters.sdlContinueOnError }}
condition: ${{ parameters.condition }}
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
index 88f238f36b..80861297dd 100644
--- a/eng/common/templates/steps/publish-logs.yml
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -1,6 +1,9 @@
parameters:
StageLabel: ''
JobLabel: ''
+ CustomSensitiveDataList: ''
+ # A default - in case value from eng/common/templates/post-build/common-variables.yml is not passed
+ BinlogToolVersion: '1.0.11'
steps:
- task: Powershell@2
@@ -12,7 +15,30 @@ steps:
Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true
condition: always()
-
+
+- task: PowerShell@2
+ displayName: Redact Logs
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
+ # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
+ # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # If the file exists - sensitive data for redaction will be sourced from it
+ # (single entry per line, lines starting with '# ' are considered comments and skipped)
+ arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
+ -BinlogToolVersion ${{parameters.BinlogToolVersion}}
+ -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ '$(publishing-dnceng-devdiv-code-r-build-re)'
+ '$(MaestroAccessToken)'
+ '$(dn-bot-all-orgs-artifact-feeds-rw)'
+ '$(akams-client-id)'
+ '$(akams-client-secret)'
+ '$(microsoft-symbol-server-pat)'
+ '$(symweb-symbol-server-pat)'
+ '$(dn-bot-all-orgs-build-rw-code-rw)'
+ ${{parameters.CustomSensitiveDataList}}
+ continueOnError: true
+ condition: always()
+
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
index 3eb7e2d5f8..68fa739c4a 100644
--- a/eng/common/templates/steps/send-to-helix.yml
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -5,6 +5,8 @@ parameters:
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
+ HelixProjectArguments: '' # optional -- arguments passed to the build command
HelixConfiguration: '' # optional -- additional property attached to a job
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
@@ -29,7 +31,7 @@ parameters:
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
env:
BuildConfig: $(_BuildConfig)
@@ -59,7 +61,7 @@ steps:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
env:
BuildConfig: $(_BuildConfig)
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
index a97a185a36..32738aa938 100644
--- a/eng/common/templates/steps/source-build.yml
+++ b/eng/common/templates/steps/source-build.yml
@@ -68,6 +68,11 @@ steps:
runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
fi
+ baseOsArgs=
+ if [ '${{ parameters.platform.baseOS }}' != '' ]; then
+ baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
+ fi
+
publishArgs=
if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
publishArgs='--publish'
@@ -86,8 +91,11 @@ steps:
$internalRestoreArgs \
$targetRidArgs \
$runtimeOsArgs \
+ $baseOsArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true \
+ /p:DotNetBuildSourceOnly=true \
+ /p:DotNetBuildRepo=true \
/p:AssetManifestFileName=$assetManifestFileName
displayName: Build
@@ -99,7 +107,7 @@ steps:
Contents: |
**/*.log
**/*.binlog
- artifacts/source-build/self/prebuilt-report/**
+ artifacts/sb/prebuilt-report/**
TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
CleanTargetFolder: true
continueOnError: true
@@ -112,3 +120,12 @@ steps:
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
+
+# Manually inject component detection so that we can ignore the source build upstream cache, which contains
+# a nupkg cache of input packages (a local feed).
+# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
+# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
+- task: ComponentGovernanceComponentDetection@0
+ displayName: Component Detection (Exclude upstream cache)
+ inputs:
+ ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml
index 9cc5c550d3..d236f9fdbb 100644
--- a/eng/common/templates/variables/pool-providers.yml
+++ b/eng/common/templates/variables/pool-providers.yml
@@ -1,15 +1,15 @@
-# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
+# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
-# Motivation:
+# Motivation:
# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
-# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
-# team needs to move resources around and create new and potentially differently-named pools. Using this template
+# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
+# team needs to move resources around and create new and potentially differently-named pools. Using this template
# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
-# How to use:
+# How to use:
# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
# If we find alternate naming conventions in broad usage it can be added to the condition below.
#
@@ -54,4 +54,4 @@ variables:
False,
'NetCore1ESPool-Internal'
)
- ]
\ No newline at end of file
+ ]
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index 38cf94ff88..9bf873e3c2 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -65,6 +65,11 @@ $ErrorActionPreference = 'Stop'
# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
+# True if the build is a product build
+[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false }
+
+[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() }
+
function Create-Directory ([string[]] $path) {
New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
}
@@ -158,18 +163,13 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
$env:DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we do not need all ASP.NET packages restored.
- $env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+ $env:DOTNET_NOLOGO=1
# Disable telemetry on CI.
if ($ci) {
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
}
- # Source Build uses DotNetCoreSdkDir variable
- if ($env:DotNetCoreSdkDir -ne $null) {
- $env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
- }
-
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
$dotnetExecutable = GetExecutableFileName 'dotnet'
@@ -228,7 +228,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
Write-PipelinePrependPath -Path $dotnetRoot
Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
- Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
+ Write-PipelineSetVariable -Name 'DOTNET_NOLOGO' -Value '1'
return $global:_DotNetInstallDir = $dotnetRoot
}
@@ -379,13 +379,13 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
# Minimum VS version to require.
- $vsMinVersionReqdStr = '16.8'
+ $vsMinVersionReqdStr = '17.7'
$vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
- # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.4.1&view=overview
- $defaultXCopyMSBuildVersion = '17.4.1'
+ # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.8.5
+ $defaultXCopyMSBuildVersion = '17.8.5'
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
@@ -450,7 +450,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
if ($xcopyMSBuildVersion.Trim() -ine "none") {
$vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
if ($vsInstallDir -eq $null) {
- throw "Could not xcopy msbuild. Please check that package 'RoslynTools.MSBuild @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
+ throw "Could not xcopy msbuild. Please check that package 'Microsoft.DotNet.Arcade.MSBuild.Xcopy @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
}
}
if ($vsInstallDir -eq $null) {
@@ -487,7 +487,7 @@ function InstallXCopyMSBuild([string]$packageVersion) {
}
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
- $packageName = 'RoslynTools.MSBuild'
+ $packageName = 'Microsoft.DotNet.Arcade.MSBuild.Xcopy'
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
@@ -601,7 +601,15 @@ function InitializeBuildTool() {
ExitWithExitCode 1
}
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
- $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net8.0' }
+
+ # Use override if it exists - commonly set by source-build
+ if ($null -eq $env:_OverrideArcadeInitializeBuildToolFramework) {
+ $initializeBuildToolFramework="net9.0"
+ } else {
+ $initializeBuildToolFramework=$env:_OverrideArcadeInitializeBuildToolFramework
+ }
+
+ $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = $initializeBuildToolFramework }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
@@ -671,20 +679,30 @@ function InitializeNativeTools() {
}
}
+function Read-ArcadeSdkVersion() {
+ return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
+}
+
function InitializeToolset() {
- if (Test-Path variable:global:_ToolsetBuildProj) {
- return $global:_ToolsetBuildProj
+ # For Unified Build/Source-build support, check whether the environment variable is
+ # set. If it is, then use this as the toolset build project.
+ if ($env:_InitializeToolset -ne $null) {
+ return $global:_InitializeToolset = $env:_InitializeToolset
+ }
+
+ if (Test-Path variable:global:_InitializeToolset) {
+ return $global:_InitializeToolset
}
$nugetCache = GetNuGetPackageCachePath
- $toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
+ $toolsetVersion = Read-ArcadeSdkVersion
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
- return $global:_ToolsetBuildProj = $path
+ return $global:_InitializeToolset = $path
}
}
@@ -707,7 +725,7 @@ function InitializeToolset() {
throw "Invalid toolset path: $path"
}
- return $global:_ToolsetBuildProj = $path
+ return $global:_InitializeToolset = $path
}
function ExitWithExitCode([int] $exitCode) {
@@ -759,12 +777,10 @@ function MSBuild() {
# new scripts need to work with old packages, so we need to look for the old names/versions
(Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
(Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
- (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
- (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
(Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
+ (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll')),
+ (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')),
+ (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
)
$selectedPath = $null
foreach ($path in $possiblePaths) {
@@ -823,7 +839,8 @@ function MSBuild-Core() {
}
}
- $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs"
+ # Be sure quote the path in case there are spaces in the dotnet installation location.
+ $env:ARCADE_BUILD_TOOL_COMMAND = "`"$($buildTool.Path)`" $cmdArgs"
$exitCode = Exec-Process $buildTool.Path $cmdArgs
@@ -838,7 +855,8 @@ function MSBuild-Core() {
}
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null) {
+ # Skip this when the build is a child of the VMR orchestrator build.
+ if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and $properties -notlike "*DotNetBuildRepo=true*") {
Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index e8d4789433..db64e298ff 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -68,6 +68,9 @@ fi
runtime_source_feed=${runtime_source_feed:-''}
runtime_source_feed_key=${runtime_source_feed_key:-''}
+# True if the build is a product build
+product_build=${product_build:-false}
+
# Resolve any symlinks in the given path.
function ResolvePath {
local path=$1
@@ -112,7 +115,7 @@ function InitializeDotNetCli {
export DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we want to control all package sources
- export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+ export DOTNET_NOLOGO=1
# Disable telemetry on CI
if [[ $ci == true ]]; then
@@ -123,11 +126,6 @@ function InitializeDotNetCli {
# so it doesn't output warnings to the console.
export LTTNG_HOME="$HOME"
- # Source Build uses DotNetCoreSdkDir variable
- if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
- export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
- fi
-
# Find the first path on $PATH that contains the dotnet.exe
if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
local dotnet_path=`command -v dotnet`
@@ -146,7 +144,7 @@ function InitializeDotNetCli {
if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
- dotnet_root="$repo_root/.dotnet"
+ dotnet_root="${repo_root}.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
@@ -165,7 +163,7 @@ function InitializeDotNetCli {
Write-PipelinePrependPath -path "$dotnet_root"
Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
- Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
+ Write-PipelineSetVariable -name "DOTNET_NOLOGO" -value "1"
# return value
_InitializeDotNetCli="$dotnet_root"
@@ -310,7 +308,7 @@ function GetDotNetInstallScript {
curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
if command -v openssl &> /dev/null; then
echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation"
- echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443
+ echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true
fi
echo "Will now retry the same URL with verbose logging."
with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || {
@@ -341,7 +339,12 @@ function InitializeBuildTool {
# return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
- _InitializeBuildToolFramework="net8.0"
+ # use override if it exists - commonly set by source-build
+ if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then
+ _InitializeBuildToolFramework="net9.0"
+ else
+ _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}"
+ fi
}
# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116
@@ -453,12 +456,10 @@ function MSBuild {
local possiblePaths=()
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" )
+ possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" )
+ possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" )
for path in "${possiblePaths[@]}"; do
if [[ -f $path ]]; then
selectedPath=$path
@@ -505,7 +506,8 @@ function MSBuild-Core {
echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then
+ # Skip this when the build is a child of the VMR orchestrator build.
+ if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
diff --git a/eng/helix.proj b/eng/helix.proj
index b68de50cd6..4c5de6b839 100644
--- a/eng/helix.proj
+++ b/eng/helix.proj
@@ -70,6 +70,8 @@
+
+
@@ -99,6 +101,9 @@
$(HelixPreCommands);export ML_TEST_DATADIR=$HELIX_CORRELATION_PAYLOAD;export MICROSOFTML_RESOURCE_PATH=$HELIX_WORKITEM_ROOT;sudo chmod -R 777 $HELIX_WORKITEM_ROOT;sudo chown -R $USER $HELIX_WORKITEM_ROOT
$(HelixPreCommands);set ML_TEST_DATADIR=%HELIX_CORRELATION_PAYLOAD%;set MICROSOFTML_RESOURCE_PATH=%HELIX_WORKITEM_ROOT%
+ $(HelixPreCommands);export PATH=$HELIX_CORRELATION_PAYLOAD/$(DotNetCliDestination):$PATH
+ $(HelixPreCommands);set PATH=%HELIX_CORRELATION_PAYLOAD%\$(DotNetCliDestination)%3B%PATH%
+
$(HelixPreCommands);export LD_LIBRARY_PATH=/opt/homebrew/opt/mono-libgdiplus/lib;
$(HelixPreCommands);sudo apt update;sudo apt-get install libomp-dev libomp5 -y
diff --git a/eng/pkg/Pack.props b/eng/pkg/Pack.props
deleted file mode 100644
index 888000e570..0000000000
--- a/eng/pkg/Pack.props
+++ /dev/null
@@ -1,85 +0,0 @@
-
-
-
- $(ArtifactsDir)pkgassets/
- true
- true
- $(MSBuildProjectName.Replace('.symbols', ''))
- true
- true
-
-
-
-
- Microsoft
- MIT
- https://dot.net/ml
- mlnetlogo.png
- https://aka.ms/mlnetreleasenotes
-
- ML.NET ML Machine Learning
-
- .dll
- .so
- .dylib
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- PreserveNewest
- false
- %(Filename)%(Extension)
-
-
-
- PreserveNewest
- false
- %(Filename)%(Extension)
-
-
-
- PreserveNewest
- false
- %(Filename)%(Extension)
-
-
-
-
-
-
-
- None
-
-
-
-
\ No newline at end of file
diff --git a/global.json b/global.json
index 25df1701da..1aeb82630a 100644
--- a/global.json
+++ b/global.json
@@ -1,20 +1,21 @@
{
"tools": {
- "dotnet": "8.0.100-preview.3.23178.7",
+ "dotnet": "9.0.100-preview.1.24101.2",
"runtimes": {
"dotnet/x64": [
- "$(DotNetRuntime60Version)"
+ "$(DotNetRuntime60Version)",
+ "$(DotNetRuntime80Version)"
],
"dotnet/x86": [
- "$(DotNetRuntime60Version)"
+ "$(DotNetRuntime60Version)",
+ "$(DotNetRuntime80Version)"
]
}
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.23265.1",
- "Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.23265.1",
- "Microsoft.Build.Traversal": "3.2.0",
- "Microsoft.SourceLink.GitHub": "1.1.0-beta-20206-02",
- "Microsoft.SourceLink.Common": "1.1.0-beta-20206-02"
+ "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24151.5",
+ "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24151.5",
+ "Microsoft.Build.NoTargets": "3.7.0",
+ "Microsoft.Build.Traversal": "3.2.0"
}
}
diff --git a/src/Directory.Build.props b/src/Directory.Build.props
index 90b94346bc..3afbdbedea 100644
--- a/src/Directory.Build.props
+++ b/src/Directory.Build.props
@@ -3,7 +3,7 @@
- true
+ true
diff --git a/src/Microsoft.Data.Analysis/DataFrame.Arrow.cs b/src/Microsoft.Data.Analysis/DataFrame.Arrow.cs
index 2d67b9ee78..7a0aae7cbf 100644
--- a/src/Microsoft.Data.Analysis/DataFrame.Arrow.cs
+++ b/src/Microsoft.Data.Analysis/DataFrame.Arrow.cs
@@ -102,11 +102,25 @@ private static void AppendDataFrameColumnFromArrowArray(Field field, IArrowArray
}
break;
case ArrowTypeId.Date64:
- Date64Array arrowDate64Array = (Date64Array)arrowArray;
- dataFrameColumn = new DateTimeDataFrameColumn(fieldName, arrowDate64Array.Data.Length);
- for (int i = 0; i < arrowDate64Array.Data.Length; i++)
{
- dataFrameColumn[i] = arrowDate64Array.GetDateTime(i);
+ Date64Array arrowDate64Array = (Date64Array)arrowArray;
+ var dataTimeDataFrameColumn = new DateTimeDataFrameColumn(fieldName, arrowDate64Array.Data.Length);
+ for (int i = 0; i < arrowDate64Array.Data.Length; i++)
+ {
+ dataTimeDataFrameColumn[i] = arrowDate64Array.GetDateTime(i);
+ }
+ dataFrameColumn = dataTimeDataFrameColumn;
+ }
+ break;
+ case ArrowTypeId.Timestamp:
+ {
+ TimestampArray arrowTimeStampArray = (TimestampArray)arrowArray;
+ var dataTimeDataFrameColumn = new DateTimeDataFrameColumn(fieldName, arrowTimeStampArray.Data.Length);
+ for (int i = 0; i < arrowTimeStampArray.Data.Length; i++)
+ {
+ dataTimeDataFrameColumn[i] = arrowTimeStampArray.GetTimestamp(i)?.DateTime;
+ }
+ dataFrameColumn = dataTimeDataFrameColumn;
}
break;
case ArrowTypeId.Decimal128:
@@ -122,7 +136,7 @@ private static void AppendDataFrameColumnFromArrowArray(Field field, IArrowArray
case ArrowTypeId.Null:
case ArrowTypeId.Time32:
case ArrowTypeId.Time64:
- case ArrowTypeId.Timestamp:
+
default:
throw new NotImplementedException($"{fieldType.Name}");
}
diff --git a/src/Microsoft.Data.Analysis/ArrowStringDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/ArrowStringDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/ArrowStringDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/ArrowStringDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/BooleanDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/BooleanDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/BooleanDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/BooleanDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/ByteDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/ByteDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/ByteDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/ByteDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/CharDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/CharDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/CharDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/CharDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/DateTimeDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/DateTimeDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/DateTimeDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/DateTimeDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/DecimalDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/DecimalDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/DecimalDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/DecimalDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/DoubleDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/DoubleDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/DoubleDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/DoubleDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/Int16DataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/Int16DataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/Int16DataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/Int16DataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/Int32DataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/Int32DataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/Int32DataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/Int32DataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/Int64DataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/Int64DataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/Int64DataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/Int64DataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/SByteDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/SByteDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/SByteDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/SByteDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/SingleDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/SingleDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/SingleDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/SingleDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/UInt16DataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/UInt16DataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/UInt16DataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/UInt16DataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/UInt32DataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/UInt32DataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/UInt32DataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/UInt32DataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/UInt64DataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/UInt64DataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/UInt64DataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/PrimitiveDataFrameColumns/UInt64DataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/StringDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/StringDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/StringDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/StringDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/VBufferDataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumns/VBufferDataFrameColumn.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/VBufferDataFrameColumn.cs
rename to src/Microsoft.Data.Analysis/DataFrameColumns/VBufferDataFrameColumn.cs
diff --git a/src/Microsoft.Data.Analysis/Microsoft.Data.Analysis.csproj b/src/Microsoft.Data.Analysis/Microsoft.Data.Analysis.csproj
index 4b4fda6652..1badcdef5c 100644
--- a/src/Microsoft.Data.Analysis/Microsoft.Data.Analysis.csproj
+++ b/src/Microsoft.Data.Analysis/Microsoft.Data.Analysis.csproj
@@ -1,9 +1,10 @@
-
+
netstandard2.0
true
false
+ true
This package contains easy-to-use and high-performance libraries for data analysis and transformation.
Initial preview of robust and extensible types and algorithms for manipulating structured data that supports aggregations, statistical funtions, sorting, grouping, joins, merges, handling missing values and more.
ML.NET ML Machine Learning Data Science DataFrame Preparation DataView Analytics Exploration
@@ -16,7 +17,9 @@
$(TargetsForTfmSpecificContentInPackage);AddMDAIToInteractiveExtensionsFolder
-
+
@@ -237,4 +240,5 @@
Microsoft.Data
+
diff --git a/src/Microsoft.Data.Analysis/ArrayUtility.cs b/src/Microsoft.Data.Analysis/Utils/ArrayUtility.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/ArrayUtility.cs
rename to src/Microsoft.Data.Analysis/Utils/ArrayUtility.cs
diff --git a/src/Microsoft.Data.Analysis/BitUtility.cs b/src/Microsoft.Data.Analysis/Utils/BitUtility.cs
similarity index 100%
rename from src/Microsoft.Data.Analysis/BitUtility.cs
rename to src/Microsoft.Data.Analysis/Utils/BitUtility.cs
diff --git a/src/Microsoft.Extensions.ML/Microsoft.Extensions.ML.csproj b/src/Microsoft.Extensions.ML/Microsoft.Extensions.ML.csproj
index 2970566af3..037c7adce1 100644
--- a/src/Microsoft.Extensions.ML/Microsoft.Extensions.ML.csproj
+++ b/src/Microsoft.Extensions.ML/Microsoft.Extensions.ML.csproj
@@ -1,9 +1,8 @@
-
netstandard2.0
- Microsoft.Extensions.ML
+ true
An integration package for ML.NET models on scalable web apps and services.
@@ -13,14 +12,8 @@
-
- all
-
-
-
- all
-
-
+
+
diff --git a/src/Microsoft.ML.AutoML/CodeGen/estimator-schema.json b/src/Microsoft.ML.AutoML/CodeGen/estimator-schema.json
index 8c12390426..d82cd4031c 100644
--- a/src/Microsoft.ML.AutoML/CodeGen/estimator-schema.json
+++ b/src/Microsoft.ML.AutoML/CodeGen/estimator-schema.json
@@ -74,7 +74,9 @@
"TextClassifcation",
"SentenceSimilarity",
"ObjectDetection",
- "QuestionAnswering"
+ "QuestionAnswering",
+ "NamedEntityRecognition",
+ "NormalizeText"
]
},
"nugetDependencies": {
@@ -113,7 +115,8 @@
"Microsoft.ML.Transforms.Image",
"Microsoft.ML.Trainers.FastTree",
"Microsoft.ML.TorchSharp",
- "Microsoft.ML.Trainers.LightGbm"
+ "Microsoft.ML.Trainers.LightGbm",
+ "Microsoft.ML.Transforms.Text.TextNormalizingEstimator"
]
}
},
@@ -197,7 +200,11 @@
"scoreThreshold",
"steps",
"initLearningRate",
- "weightDecay"
+ "weightDecay",
+ "caseMode",
+ "keepPunctuations",
+ "keepDiacritics",
+ "keepNumbers"
]
},
"argumentType": {
diff --git a/src/Microsoft.ML.AutoML/CodeGen/map_value_to_key_search_space.json b/src/Microsoft.ML.AutoML/CodeGen/map_value_to_key_search_space.json
index 1663a95c18..6e9390f141 100644
--- a/src/Microsoft.ML.AutoML/CodeGen/map_value_to_key_search_space.json
+++ b/src/Microsoft.ML.AutoML/CodeGen/map_value_to_key_search_space.json
@@ -14,6 +14,10 @@
"name": "AddKeyValueAnnotationsAsText",
"type": "boolean",
"default": false
+ },
+ {
+ "name": "KeyData",
+ "type": "dataView"
}
]
}
diff --git a/src/Microsoft.ML.AutoML/CodeGen/named_entity_recognition_search_space.json b/src/Microsoft.ML.AutoML/CodeGen/named_entity_recognition_search_space.json
new file mode 100644
index 0000000000..bd5a66036d
--- /dev/null
+++ b/src/Microsoft.ML.AutoML/CodeGen/named_entity_recognition_search_space.json
@@ -0,0 +1,36 @@
+{
+ "$schema": "./search-space-schema.json#",
+ "name": "named_entity_recognition_option",
+ "search_space": [
+ {
+ "name": "PredictionColumnName",
+ "type": "string",
+ "default": "predictedLabel"
+ },
+ {
+ "name": "LabelColumnName",
+ "type": "string",
+ "default": "Label"
+ },
+ {
+ "name": "Sentence1ColumnName",
+ "type": "string",
+ "default": "Sentence"
+ },
+ {
+ "name": "BatchSize",
+ "type": "integer",
+ "default": 32
+ },
+ {
+ "name": "MaxEpochs",
+ "type": "integer",
+ "default": 10
+ },
+ {
+ "name": "Architecture",
+ "type": "bertArchitecture",
+ "default": "BertArchitecture.Roberta"
+ }
+ ]
+}
diff --git a/src/Microsoft.ML.AutoML/CodeGen/normalize_text_search_space.json b/src/Microsoft.ML.AutoML/CodeGen/normalize_text_search_space.json
new file mode 100644
index 0000000000..64359eaf26
--- /dev/null
+++ b/src/Microsoft.ML.AutoML/CodeGen/normalize_text_search_space.json
@@ -0,0 +1,34 @@
+{
+ "$schema": "./search-space-schema.json#",
+ "name": "normalize_text_option",
+ "search_space": [
+ {
+ "name": "InputColumnName",
+ "type": "string"
+ },
+ {
+ "name": "OutputColumnName",
+ "type": "string"
+ },
+ {
+ "name": "CaseMode",
+ "type": "caseMode",
+ "default": "CaseMode.Lower"
+ },
+ {
+ "name": "KeepDiacritics",
+ "type": "boolean",
+ "default": false
+ },
+ {
+ "name": "KeepPunctuations",
+ "type": "boolean",
+ "default": true
+ },
+ {
+ "name": "KeepNumbers",
+ "type": "boolean",
+ "default": true
+ }
+ ]
+}
diff --git a/src/Microsoft.ML.AutoML/CodeGen/search-space-schema.json b/src/Microsoft.ML.AutoML/CodeGen/search-space-schema.json
index 0ccb7b1fcf..e61fc0f9b2 100644
--- a/src/Microsoft.ML.AutoML/CodeGen/search-space-schema.json
+++ b/src/Microsoft.ML.AutoML/CodeGen/search-space-schema.json
@@ -66,6 +66,14 @@
"DataKind.DateTimeOffset"
]
},
+ "caseMode": {
+ "type": "string",
+ "enum": [
+ "CaseMode.Lower",
+ "CaseMode.Upper",
+ "CaseMode.None"
+ ]
+ },
"bertArchitectureArray": {
"type": "array",
"items": {
@@ -90,7 +98,7 @@
"$ref": "#/definitions/dnnModelFactoryArray"
},
{
- "$ref": "#/definitions/imageClassificationArchArray"
+ "$ref": "#/definitions/imageClassificationArchArray"
},
{
"$ref": "#/definitions/boolArray"
@@ -167,7 +175,9 @@
"text_classification_option",
"sentence_similarity_option",
"object_detection_option",
- "question_answering_option"
+ "question_answering_option",
+ "named_entity_recognition_option",
+ "normalize_text_option"
]
},
"option_name": {
@@ -238,7 +248,13 @@
"AnswerIndexStartColumnName",
"predictedAnswerColumnName",
"TopKAnswers",
- "TargetType"
+ "TargetType",
+ "PredictionColumnName",
+ "KeyData",
+ "CaseMode",
+ "KeepPunctuations",
+ "KeepDiacritics",
+ "KeepNumbers"
]
},
"option_type": {
@@ -257,7 +273,9 @@
"dnnModelFactory",
"bertArchitecture",
"imageClassificationArchType",
- "dataKind"
+ "dataKind",
+ "dataView",
+ "caseMode"
]
}
},
diff --git a/src/Microsoft.ML.AutoML/CodeGen/trainer-estimators.json b/src/Microsoft.ML.AutoML/CodeGen/trainer-estimators.json
index 0ce5a45e37..e0df321f38 100644
--- a/src/Microsoft.ML.AutoML/CodeGen/trainer-estimators.json
+++ b/src/Microsoft.ML.AutoML/CodeGen/trainer-estimators.json
@@ -539,6 +539,13 @@
"usingStatements": [ "Microsoft.ML", "Microsoft.ML.Trainers", "Microsoft.ML.TorchSharp" ],
"searchOption": "question_answering_option"
},
+ {
+ "functionName": "NamedEntityRecognition",
+ "estimatorTypes": [ "MultiClassification" ],
+ "nugetDependencies": [ "Microsoft.ML", "Microsoft.ML.TorchSharp" ],
+ "usingStatements": [ "Microsoft.ML", "Microsoft.ML.Trainers", "Microsoft.ML.TorchSharp" ],
+ "searchOption": "named_entity_recognition_option"
+ },
{
"functionName": "ForecastBySsa",
"estimatorTypes": [ "Forecasting" ],
diff --git a/src/Microsoft.ML.AutoML/CodeGen/transformer-estimators.json b/src/Microsoft.ML.AutoML/CodeGen/transformer-estimators.json
index 0fc42d3aa6..c9f176ba89 100644
--- a/src/Microsoft.ML.AutoML/CodeGen/transformer-estimators.json
+++ b/src/Microsoft.ML.AutoML/CodeGen/transformer-estimators.json
@@ -180,6 +180,39 @@
"usingStatements": [ "Microsoft.ML", "Microsoft.ML.Data" ],
"searchOption": "featurize_text_option"
},
+ {
+ "functionName": "NormalizeText",
+ "estimatorTypes": [ "Text" ],
+ "arguments": [
+ {
+ "argumentName": "outputColumnName",
+ "argumentType": "string"
+ },
+ {
+ "argumentName": "inputColumnName",
+ "argumentType": "string"
+ },
+ {
+ "argumentName": "caseMode",
+ "argumentType": "caseMode"
+ },
+ {
+ "argumentName": "keepDiacritics",
+ "argumentType": "boolean"
+ },
+ {
+ "argumentName": "keepPunctuations",
+ "argumentType": "boolean"
+ },
+ {
+ "argumentName": "keepNumbers",
+ "argumentType": "boolean"
+ }
+ ],
+ "nugetDependencies": [ "Microsoft.ML" ],
+ "usingStatements": [ "Microsoft.ML", "Microsoft.ML.Data", "Microsoft.ML.Transforms.Text.TextNormalizingEstimator"],
+ "searchOption": "normalize_text_option"
+ },
{
"functionName": "ConvertType",
"estimatorTypes": [ "Conversion" ],
diff --git a/src/Microsoft.ML.AutoML/CodeGen/type_converter_search_space.json b/src/Microsoft.ML.AutoML/CodeGen/type_converter_search_space.json
index 38d860e9c4..22e2a37605 100644
--- a/src/Microsoft.ML.AutoML/CodeGen/type_converter_search_space.json
+++ b/src/Microsoft.ML.AutoML/CodeGen/type_converter_search_space.json
@@ -14,6 +14,14 @@
"name": "TargetType",
"type": "dataKind",
"default": "DataKind.Single"
+ },
+ {
+ "name": "KeyData",
+ "type": "dataView"
+ },
+ {
+ "name": "CaseMode",
+ "type": "caseMode"
}
]
}
diff --git a/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj b/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj
index 669afe59de..909139b33b 100644
--- a/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj
+++ b/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj
@@ -1,12 +1,9 @@
-
netstandard2.0
- Microsoft.ML.AutoML
+ true
ML.NET AutoML: Optimizes an ML pipeline for your dataset, by automatically locating the best feature engineering, model, and hyperparameters
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/MapValueToKey.cs b/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/MapValueToKey.cs
index a48827478f..decbd39109 100644
--- a/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/MapValueToKey.cs
+++ b/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/MapValueToKey.cs
@@ -8,7 +8,7 @@ internal partial class MapValueToKey
{
public override IEstimator BuildFromOption(MLContext context, MapValueToKeyOption param)
{
- return context.Transforms.Conversion.MapValueToKey(param.OutputColumnName, param.InputColumnName, addKeyValueAnnotationsAsText: param.AddKeyValueAnnotationsAsText);
+ return context.Transforms.Conversion.MapValueToKey(param.OutputColumnName, param.InputColumnName, addKeyValueAnnotationsAsText: param.AddKeyValueAnnotationsAsText, keyData: param.KeyData);
}
}
diff --git a/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/NamedEntityRecognitionMulti.cs b/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/NamedEntityRecognitionMulti.cs
new file mode 100644
index 0000000000..8913f93d83
--- /dev/null
+++ b/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/NamedEntityRecognitionMulti.cs
@@ -0,0 +1,26 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Microsoft.ML.TorchSharp;
+using Microsoft.ML.TorchSharp.NasBert;
+
+namespace Microsoft.ML.AutoML.CodeGen
+{
+ internal partial class NamedEntityRecognitionMulti
+ {
+ public override IEstimator BuildFromOption(MLContext context, NamedEntityRecognitionOption param)
+ {
+ return context.MulticlassClassification.Trainers.NamedEntityRecognition(
+ labelColumnName: param.LabelColumnName,
+ outputColumnName: param.PredictionColumnName,
+ sentence1ColumnName: param.Sentence1ColumnName,
+ batchSize: param.BatchSize,
+ maxEpochs: param.MaxEpochs,
+ architecture: BertArchitecture.Roberta);
+ }
+ }
+}
diff --git a/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/NormalizeText.cs b/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/NormalizeText.cs
new file mode 100644
index 0000000000..a78ccd3161
--- /dev/null
+++ b/src/Microsoft.ML.AutoML/SweepableEstimator/Estimators/NormalizeText.cs
@@ -0,0 +1,18 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Microsoft.ML.AutoML.CodeGen
+{
+ internal partial class NormalizeText
+ {
+ public override IEstimator BuildFromOption(MLContext context, NormalizeTextOption param)
+ {
+ return context.Transforms.Text.NormalizeText(param.OutputColumnName, param.InputColumnName, param.CaseMode, param.KeepDiacritics, param.KeepPunctuations, param.KeepNumbers);
+ }
+ }
+}
diff --git a/src/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.csproj b/src/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.csproj
index 58b6f74f1b..a468b72fde 100644
--- a/src/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.csproj
+++ b/src/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.csproj
@@ -1,9 +1,8 @@
-
netstandard2.0
- Microsoft.ML.CodeGenerator
+ true
ML.NET Code Generator
$(NoWarn)
@@ -24,13 +23,9 @@
-
- all
-
+
-
- all
-
+
@@ -145,4 +140,5 @@
PredictProject.tt
+
diff --git a/src/Microsoft.ML.Core/Microsoft.ML.Core.csproj b/src/Microsoft.ML.Core/Microsoft.ML.Core.csproj
index cc342e96e1..807fa85088 100644
--- a/src/Microsoft.ML.Core/Microsoft.ML.Core.csproj
+++ b/src/Microsoft.ML.Core/Microsoft.ML.Core.csproj
@@ -2,7 +2,6 @@
netstandard2.0
- Microsoft.ML
true
CORECLR
Microsoft.ML
diff --git a/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs
index 433ad2fa1c..68b9250eeb 100644
--- a/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs
+++ b/src/Microsoft.ML.Core/Properties/AssemblyInfo.cs
@@ -14,6 +14,8 @@
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.OnnxTransformerTest" + PublicKey.Value)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.AutoML.Tests" + PublicKey.Value)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.PerformanceTests" + PublicKey.Value)]
+[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TorchSharp.Tests" + PublicKey.TestValue)]
+[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TensorFlow.Tests" + PublicKey.TestValue)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.EntryPoints" + PublicKey.Value)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Maml" + PublicKey.Value)]
diff --git a/src/Microsoft.ML.Core/SearchSpace/BoolearnChoiceAttribute.cs b/src/Microsoft.ML.Core/SearchSpace/BoolearnChoiceAttribute.cs
new file mode 100644
index 0000000000..a9a50de5f0
--- /dev/null
+++ b/src/Microsoft.ML.Core/SearchSpace/BoolearnChoiceAttribute.cs
@@ -0,0 +1,33 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+
+namespace Microsoft.ML.SearchSpace;
+
+///
+/// Boolean choice attribute
+///
+[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, Inherited = false, AllowMultiple = false)]
+public sealed class BooleanChoiceAttribute : Attribute
+{
+ ///
+ /// Create a .
+ ///
+ public BooleanChoiceAttribute()
+ {
+ DefaultValue = true;
+ }
+
+ ///
+ /// Create a with default value.
+ ///
+ /// default value for this option.
+ public BooleanChoiceAttribute(bool defaultValue)
+ {
+ DefaultValue = defaultValue;
+ }
+
+ public bool DefaultValue { get; }
+}
diff --git a/src/Microsoft.ML.Core/SearchSpace/ChoiceAttribute.cs b/src/Microsoft.ML.Core/SearchSpace/ChoiceAttribute.cs
new file mode 100644
index 0000000000..24db500703
--- /dev/null
+++ b/src/Microsoft.ML.Core/SearchSpace/ChoiceAttribute.cs
@@ -0,0 +1,50 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Diagnostics.Contracts;
+using System.Linq;
+
+namespace Microsoft.ML.SearchSpace;
+
+///
+/// Choice attribute
+///
+[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, Inherited = false, AllowMultiple = false)]
+public sealed class ChoiceAttribute : Attribute
+{
+ ///
+ /// Create a with .
+ ///
+ public ChoiceAttribute(params object[] candidates)
+ {
+ var candidatesType = candidates.Select(o => o.GetType()).Distinct();
+ Contract.Assert(candidatesType.Count() == 1, "multiple candidates type detected");
+ this.Candidates = candidates;
+ this.DefaultValue = null;
+ }
+
+ ///
+ /// Create a with and .
+ ///
+ public ChoiceAttribute(object[] candidates, object defaultValue)
+ {
+ var candidatesType = candidates.Select(o => o.GetType()).Distinct();
+ Contract.Assert(candidatesType.Count() == 1, "multiple candidates type detected");
+ Contract.Assert(candidatesType.First() == defaultValue.GetType(), "candidates type doesn't match with defaultValue type");
+
+ this.Candidates = candidates;
+ this.DefaultValue = defaultValue;
+ }
+
+ ///
+ /// Get the candidates of this option.
+ ///
+ public object[] Candidates { get; }
+
+ ///
+ /// Get the default value of this option.
+ ///
+ public object DefaultValue { get; }
+}
diff --git a/src/Microsoft.ML.Core/SearchSpace/NestOptionAttribute.cs b/src/Microsoft.ML.Core/SearchSpace/NestOptionAttribute.cs
new file mode 100644
index 0000000000..2a46530ed8
--- /dev/null
+++ b/src/Microsoft.ML.Core/SearchSpace/NestOptionAttribute.cs
@@ -0,0 +1,21 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+
+namespace Microsoft.ML.SearchSpace;
+
+///
+/// attribution class for nest option.
+///
+[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, Inherited = false, AllowMultiple = false)]
+public sealed class NestOptionAttribute : Attribute
+{
+ ///
+ /// Create an .
+ ///
+ public NestOptionAttribute()
+ {
+ }
+}
diff --git a/src/Microsoft.ML.Core/SearchSpace/RangeAttribute.cs b/src/Microsoft.ML.Core/SearchSpace/RangeAttribute.cs
new file mode 100644
index 0000000000..f907650be7
--- /dev/null
+++ b/src/Microsoft.ML.Core/SearchSpace/RangeAttribute.cs
@@ -0,0 +1,88 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+
+namespace Microsoft.ML.SearchSpace;
+
+///
+/// Range attribute
+///
+[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, Inherited = false, AllowMultiple = false)]
+public sealed class RangeAttribute : Attribute
+{
+ ///
+ /// Create a
+ ///
+ public RangeAttribute(double min, double max, bool logBase = false)
+ {
+ this.Min = min;
+ this.Max = max;
+ this.Init = null;
+ this.LogBase = logBase;
+ }
+
+ ///
+ /// Create a
+ ///
+ public RangeAttribute(double min, double max, double init, bool logBase = false)
+ {
+ this.Min = min;
+ this.Max = max;
+ this.Init = init;
+ this.LogBase = logBase;
+ }
+
+ ///
+ /// Create a
+ ///
+ public RangeAttribute(int min, int max, bool logBase = false)
+ {
+ this.Min = min;
+ this.Max = max;
+ this.Init = null;
+ this.LogBase = logBase;
+ }
+
+ ///
+ /// Create a
+ ///
+ public RangeAttribute(int min, int max, int init, bool logBase = false)
+ {
+ this.Min = min;
+ this.Max = max;
+ this.Init = init;
+ this.LogBase = logBase;
+ }
+
+ ///
+ /// Create a
+ ///
+ public RangeAttribute(float min, float max, bool logBase = false)
+ {
+ this.Min = min;
+ this.Max = max;
+ this.Init = null;
+ this.LogBase = logBase;
+ }
+
+ ///
+ /// Create a
+ ///
+ public RangeAttribute(float min, float max, float init, bool logBase = false)
+ {
+ this.Min = min;
+ this.Max = max;
+ this.Init = init;
+ this.LogBase = logBase;
+ }
+
+ public object Min { get; }
+
+ public object Max { get; }
+
+ public object Init { get; }
+
+ public bool LogBase { get; }
+}
diff --git a/src/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.csproj b/src/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.csproj
index 699bc4e2f6..f6f7abb7da 100644
--- a/src/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.csproj
+++ b/src/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.csproj
@@ -1,12 +1,11 @@
-
+
netstandard2.0;net6.0
- Microsoft.ML.CpuMath
+ true
Microsoft.ML.CpuMath contains optimized math routines for ML.NET.
true
$(DefineConstants);CPUMATH_INFRASTRUCTURE
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
@@ -33,12 +32,5 @@
-
-
-
-
-
-
-
-
+
\ No newline at end of file
diff --git a/src/Microsoft.ML.Data/DataView/CacheDataView.cs b/src/Microsoft.ML.Data/DataView/CacheDataView.cs
index 4c2491e09a..e6c8755edf 100644
--- a/src/Microsoft.ML.Data/DataView/CacheDataView.cs
+++ b/src/Microsoft.ML.Data/DataView/CacheDataView.cs
@@ -1320,7 +1320,7 @@ public virtual void Freeze()
private sealed class ImplVec : ColumnCache>
{
- // The number of rows cached.
+ // The number of rows cached. Only to be accesssed by the Caching thread.
private int _rowCount;
// For a given row [r], elements at [r] and [r+1] specify the inclusive
// and exclusive range of values for the two big arrays. In the case
@@ -1384,10 +1384,10 @@ public override void CacheCurrent()
public override void Fetch(int idx, ref VBuffer value)
{
- Ctx.Assert(0 <= idx && idx < _rowCount);
- Ctx.Assert(_rowCount < Utils.Size(_indexBoundaries));
- Ctx.Assert(_rowCount < Utils.Size(_valueBoundaries));
- Ctx.Assert(_uniformLength > 0 || _rowCount <= Utils.Size(_lengths));
+ Ctx.Assert(0 <= idx);
+ Ctx.Assert((idx + 1) < Utils.Size(_indexBoundaries));
+ Ctx.Assert((idx + 1) < Utils.Size(_valueBoundaries));
+ Ctx.Assert(_uniformLength > 0 || idx < Utils.Size(_lengths));
Ctx.Assert(_indexBoundaries[idx + 1] - _indexBoundaries[idx] <= int.MaxValue);
int indexCount = (int)(_indexBoundaries[idx + 1] - _indexBoundaries[idx]);
diff --git a/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj b/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj
index 0ea1f6e1f9..d212f5e4ee 100644
--- a/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj
+++ b/src/Microsoft.ML.Data/Microsoft.ML.Data.csproj
@@ -2,7 +2,6 @@
netstandard2.0
- Microsoft.ML
true
CORECLR
diff --git a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs
index c0b9e1782c..12c25255d5 100644
--- a/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs
+++ b/src/Microsoft.ML.Data/Properties/AssemblyInfo.cs
@@ -14,6 +14,8 @@
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TimeSeries.Tests" + PublicKey.Value)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.PerformanceTests" + PublicKey.Value)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.AutoML.Tests" + PublicKey.Value)]
+[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TorchSharp.Tests" + PublicKey.TestValue)]
+[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TensorFlow.Tests" + PublicKey.TestValue)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.PerformanceTests" + PublicKey.Value)]
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.EntryPoints" + PublicKey.Value)]
diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs
index 26b6b35c05..c19b23b92a 100644
--- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs
+++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs
@@ -437,16 +437,26 @@ public Double[] Mean
get { return _mean; }
}
- public Double[] StdDev
+ public Double[] StdDevPopulation
{
get { return _m2.Select((m2, i) => Math.Sqrt(m2 / _cnz[i])).ToArray(); }
}
+ public Double[] StdDevSample
+ {
+ get { return _m2.Select((m2, i) => Math.Sqrt(m2 / Math.Max(0, _cnz[i] - 1))).ToArray(); }
+ }
+
public Double[] MeanSquareError
{
get { return _m2.Select((m2, i) => m2 / _cnz[i]).ToArray(); }
}
+ public Double[] SampleVariance
+ {
+ get { return _m2.Select((m2, i) => m2 / Math.Max(0, _cnz[i] - 1)).ToArray(); }
+ }
+
public Double[] M2
{
get { return _m2; }
@@ -1637,15 +1647,17 @@ public sealed class MeanVarOneColumnFunctionBuilder : OneColumnFunctionBuilderBa
private readonly bool _useLog;
private readonly bool _useCdf;
private readonly bool _fix;
+ private readonly bool _useSampleVariance;
private readonly MeanVarDblAggregator _aggregator;
private VBuffer _buffer;
- private MeanVarOneColumnFunctionBuilder(IHost host, long lim, bool fix, ValueGetter getSrc, bool useLog, bool useCdf)
+ private MeanVarOneColumnFunctionBuilder(IHost host, long lim, bool fix, ValueGetter getSrc, bool useLog, bool useCdf, bool useSampleVariance)
: base(host, lim, getSrc)
{
_useLog = useLog;
_useCdf = useCdf;
_fix = fix;
+ _useSampleVariance = useSampleVariance;
_aggregator = new MeanVarDblAggregator(1, useLog);
_buffer = new VBuffer(1, new TFloat[1]);
}
@@ -1654,7 +1666,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.MeanVarianceCol
ValueGetter getter)
{
host.CheckUserArg(column.MaximumExampleCount > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
- return new MeanVarOneColumnFunctionBuilder(host, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf);
+ return new MeanVarOneColumnFunctionBuilder(host, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf, column.UseSampleVariance);
}
public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVarianceColumnOptions column, IHost host, DataViewType srcType,
@@ -1662,7 +1674,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVariance
{
var lim = column.MaximumExampleCount;
host.CheckUserArg(lim > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
- return new MeanVarOneColumnFunctionBuilder(host, lim, false, getter, true, column.UseCdf);
+ return new MeanVarOneColumnFunctionBuilder(host, lim, false, getter, true, column.UseCdf, column.UseSampleVariance);
}
protected override bool ProcessValue(in TFloat origVal)
@@ -1689,10 +1701,13 @@ private IColumnFunction CreateAffineColumnFunction()
return AffineColumnFunction.Create(Host, (TFloat)0, (TFloat)0);
TFloat scale;
TFloat offset;
+ var stdDev = _useSampleVariance ? _aggregator.StdDevSample[0] : _aggregator.StdDevPopulation[0];
+ var variance = _useSampleVariance ? _aggregator.SampleVariance[0] : _aggregator.MeanSquareError[0];
+
if (_fix)
- MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[0], _aggregator.MeanSquareError[0], out scale, out offset);
+ MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[0], variance, out scale, out offset);
else
- MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[0], _aggregator.StdDev[0], out scale, out offset);
+ MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[0], stdDev, out scale, out offset);
return AffineColumnFunction.Create(Host, scale, offset);
}
@@ -1703,7 +1718,9 @@ private IColumnFunction CreateCdfColumnFunction()
if (_aggregator.M2[0] == 0 || _aggregator.Counts[0] == 0)
return CdfColumnFunction.Create(Host, (TFloat)0, (TFloat)0, _useLog);
- return CdfColumnFunction.Create(Host, (TFloat)_aggregator.Mean[0], (TFloat)_aggregator.StdDev[0], _useLog);
+ var stdDev = _useSampleVariance ? _aggregator.StdDevSample[0] : _aggregator.StdDevPopulation[0];
+
+ return CdfColumnFunction.Create(Host, (TFloat)_aggregator.Mean[0], (TFloat)stdDev, _useLog);
}
}
@@ -1712,16 +1729,18 @@ public sealed class MeanVarVecColumnFunctionBuilder : VecColumnFunctionBuilderBa
private readonly bool _fix;
private readonly bool _useLog;
private readonly bool _useCdf;
+ private readonly bool _useSampleVariance;
private readonly MeanVarDblAggregator _aggregator;
private MeanVarVecColumnFunctionBuilder(IHost host, int cv, long lim, bool fix,
- ValueGetter> getSrc, bool useLog, bool useCdf)
+ ValueGetter> getSrc, bool useLog, bool useCdf, bool useSampleVariance)
: base(host, lim, getSrc)
{
_aggregator = new MeanVarDblAggregator(cv, useLog);
_fix = fix;
_useLog = useLog;
_useCdf = useCdf;
+ _useSampleVariance = useSampleVariance;
}
public static IColumnFunctionBuilder Create(NormalizingEstimator.MeanVarianceColumnOptions column, IHost host, VectorDataViewType srcType,
@@ -1729,7 +1748,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.MeanVarianceCol
{
host.CheckUserArg(column.MaximumExampleCount > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
var cv = srcType.Size;
- return new MeanVarVecColumnFunctionBuilder(host, cv, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf);
+ return new MeanVarVecColumnFunctionBuilder(host, cv, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf, column.UseSampleVariance);
}
public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVarianceColumnOptions column, IHost host, VectorDataViewType srcType,
@@ -1738,7 +1757,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVariance
var lim = column.MaximumExampleCount;
host.CheckUserArg(lim > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
var cv = srcType.Size;
- return new MeanVarVecColumnFunctionBuilder(host, cv, lim, false, getter, true, column.UseCdf);
+ return new MeanVarVecColumnFunctionBuilder(host, cv, lim, false, getter, true, column.UseCdf, column.UseSampleVariance);
}
protected override bool ProcessValue(in VBuffer buffer)
@@ -1776,10 +1795,14 @@ private IColumnFunction CreateAffineColumnFunction()
scale[i] = offset[i] = 0;
continue;
}
+
+ var stdDev = _useSampleVariance ? _aggregator.StdDevSample[i] : _aggregator.StdDevPopulation[i];
+ var variance = _useSampleVariance ? _aggregator.SampleVariance[i] : _aggregator.MeanSquareError[i];
+
if (_fix)
- MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[i], _aggregator.MeanSquareError[i], out scale[i], out offset[i]);
+ MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[i], variance, out scale[i], out offset[i]);
else
- MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[i], _aggregator.StdDev[i], out scale[i], out offset[i]);
+ MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[i], stdDev, out scale[i], out offset[i]);
if (offset[i] != 0 && nz.Count < lim)
nz.Add(i);
}
@@ -1819,7 +1842,8 @@ private IColumnFunction CreateCdfColumnFunction()
continue;
}
mean[i] = (TFloat)_aggregator.Mean[i];
- stddev[i] = (TFloat)_aggregator.StdDev[i];
+ stddev[i] = (TFloat)(_useSampleVariance ? _aggregator.StdDevSample[i] : _aggregator.StdDevPopulation[i]);
+
}
return CdfColumnFunction.Create(Host, mean, stddev, _useLog);
diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs
index 861cfd7368..69a0d1b88d 100644
--- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs
+++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs
@@ -438,16 +438,26 @@ public Double[] Mean
get { return _mean; }
}
- public Double[] StdDev
+ public Double[] StdDevPopulation
{
get { return _m2.Select((m2, i) => Math.Sqrt(m2 / _cnz[i])).ToArray(); }
}
+ public Double[] StdDevSample
+ {
+ get { return _m2.Select((m2, i) => Math.Sqrt(m2 / Math.Max(0, _cnz[i] - 1))).ToArray(); }
+ }
+
public Double[] MeanSquareError
{
get { return _m2.Select((m2, i) => m2 / _cnz[i]).ToArray(); }
}
+ public Double[] SampleVariance
+ {
+ get { return _m2.Select((m2, i) => m2 / Math.Max(0, _cnz[i] - 1)).ToArray(); }
+ }
+
public Double[] M2
{
get { return _m2; }
@@ -1800,15 +1810,17 @@ public sealed class MeanVarOneColumnFunctionBuilder : OneColumnFunctionBuilderBa
private readonly bool _useLog;
private readonly bool _useCdf;
private readonly bool _fix;
+ private readonly bool _useSampleVariance;
private readonly MeanVarSngAggregator _aggregator;
private VBuffer _buffer;
- private MeanVarOneColumnFunctionBuilder(IHost host, long lim, bool fix, ValueGetter getSrc, bool useLog, bool useCdf)
+ private MeanVarOneColumnFunctionBuilder(IHost host, long lim, bool fix, ValueGetter getSrc, bool useLog, bool useCdf, bool useSampleVariance)
: base(host, lim, getSrc)
{
_useLog = useLog;
_useCdf = useCdf;
_fix = fix;
+ _useSampleVariance = useSampleVariance;
_aggregator = new MeanVarSngAggregator(1, useLog);
_buffer = new VBuffer(1, new TFloat[1]);
}
@@ -1817,7 +1829,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.MeanVarianceCol
ValueGetter getter)
{
host.CheckUserArg(column.MaximumExampleCount > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
- return new MeanVarOneColumnFunctionBuilder(host, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf);
+ return new MeanVarOneColumnFunctionBuilder(host, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf, column.UseSampleVariance);
}
public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVarianceColumnOptions column, IHost host, DataViewType srcType,
@@ -1825,7 +1837,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVariance
{
var lim = column.MaximumExampleCount;
host.CheckUserArg(lim > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
- return new MeanVarOneColumnFunctionBuilder(host, lim, column.EnsureZeroUntouched, getter, true, column.UseCdf);
+ return new MeanVarOneColumnFunctionBuilder(host, lim, column.EnsureZeroUntouched, getter, true, column.UseCdf, column.UseSampleVariance);
}
protected override bool ProcessValue(in TFloat origVal)
@@ -1852,10 +1864,13 @@ private IColumnFunction CreateAffineColumnFunction()
return AffineColumnFunction.Create(Host, (TFloat)0, (TFloat)0);
TFloat scale;
TFloat offset;
+ var stdDev = _useSampleVariance ? _aggregator.StdDevSample[0] : _aggregator.StdDevPopulation[0];
+ var variance = _useSampleVariance ? _aggregator.SampleVariance[0] : _aggregator.MeanSquareError[0];
+
if (_fix)
- MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[0], _aggregator.MeanSquareError[0], out scale, out offset);
+ MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[0], variance, out scale, out offset);
else
- MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[0], _aggregator.StdDev[0], out scale, out offset);
+ MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[0], stdDev, out scale, out offset);
return AffineColumnFunction.Create(Host, scale, offset);
}
@@ -1866,7 +1881,9 @@ private IColumnFunction CreateCdfColumnFunction()
if (_aggregator.M2[0] == 0 || _aggregator.Counts[0] == 0)
return CdfColumnFunction.Create(Host, (TFloat)0, (TFloat)0, _useLog);
- return CdfColumnFunction.Create(Host, (TFloat)_aggregator.Mean[0], (TFloat)_aggregator.StdDev[0], _useLog);
+ var stdDev = _useSampleVariance ? _aggregator.StdDevSample[0] : _aggregator.StdDevPopulation[0];
+
+ return CdfColumnFunction.Create(Host, (TFloat)_aggregator.Mean[0], (TFloat)stdDev, _useLog);
}
}
@@ -1875,16 +1892,18 @@ public sealed class MeanVarVecColumnFunctionBuilder : VecColumnFunctionBuilderBa
private readonly bool _fix;
private readonly bool _useLog;
private readonly bool _useCdf;
+ private readonly bool _useSampleVariance;
private readonly MeanVarSngAggregator _aggregator;
private MeanVarVecColumnFunctionBuilder(IHost host, int cv, long lim, bool fix,
- ValueGetter> getSrc, bool useLog, bool useCdf)
+ ValueGetter> getSrc, bool useLog, bool useCdf, bool useSampleVariance)
: base(host, lim, getSrc)
{
_aggregator = new MeanVarSngAggregator(cv, useLog);
_fix = fix;
_useLog = useLog;
_useCdf = useCdf;
+ _useSampleVariance = useSampleVariance;
}
public static IColumnFunctionBuilder Create(NormalizingEstimator.MeanVarianceColumnOptions column, IHost host, VectorDataViewType srcType,
@@ -1892,7 +1911,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.MeanVarianceCol
{
host.CheckUserArg(column.MaximumExampleCount > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
var cv = srcType.Size;
- return new MeanVarVecColumnFunctionBuilder(host, cv, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf);
+ return new MeanVarVecColumnFunctionBuilder(host, cv, column.MaximumExampleCount, column.EnsureZeroUntouched, getter, false, column.UseCdf, column.UseSampleVariance);
}
public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVarianceColumnOptions column, IHost host, VectorDataViewType srcType,
@@ -1901,7 +1920,7 @@ public static IColumnFunctionBuilder Create(NormalizingEstimator.LogMeanVariance
var lim = column.MaximumExampleCount;
host.CheckUserArg(lim > 1, nameof(column.MaximumExampleCount), "Must be greater than 1");
var cv = srcType.Size;
- return new MeanVarVecColumnFunctionBuilder(host, cv, lim, column.EnsureZeroUntouched, getter, true, column.UseCdf);
+ return new MeanVarVecColumnFunctionBuilder(host, cv, lim, column.EnsureZeroUntouched, getter, true, column.UseCdf, column.UseSampleVariance);
}
protected override bool ProcessValue(in VBuffer buffer)
@@ -1939,10 +1958,14 @@ private IColumnFunction CreateAffineColumnFunction()
scale[i] = offset[i] = 0;
continue;
}
+
+ var stdDev = _useSampleVariance ? _aggregator.StdDevSample[i] : _aggregator.StdDevPopulation[i];
+ var variance = _useSampleVariance ? _aggregator.SampleVariance[i] : _aggregator.MeanSquareError[i];
+
if (_fix)
- MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[i], _aggregator.MeanSquareError[i], out scale[i], out offset[i]);
+ MeanVarUtils.ComputeScaleAndOffsetFixZero(_aggregator.Mean[i], variance, out scale[i], out offset[i]);
else
- MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[i], _aggregator.StdDev[i], out scale[i], out offset[i]);
+ MeanVarUtils.ComputeScaleAndOffset(_aggregator.Mean[i], stdDev, out scale[i], out offset[i]);
if (offset[i] != 0 && nz.Count < lim)
nz.Add(i);
}
@@ -1982,7 +2005,8 @@ private IColumnFunction CreateCdfColumnFunction()
continue;
}
mean[i] = (TFloat)_aggregator.Mean[i];
- stddev[i] = (TFloat)_aggregator.StdDev[i];
+ stddev[i] = (TFloat)(_useSampleVariance ? _aggregator.StdDevSample[i] : _aggregator.StdDevPopulation[i]);
+
}
return CdfColumnFunction.Create(Host, mean, stddev, _useLog);
diff --git a/src/Microsoft.ML.Data/Transforms/Normalizer.cs b/src/Microsoft.ML.Data/Transforms/Normalizer.cs
index 325fb6519c..731e3d3ba8 100644
--- a/src/Microsoft.ML.Data/Transforms/Normalizer.cs
+++ b/src/Microsoft.ML.Data/Transforms/Normalizer.cs
@@ -91,6 +91,7 @@ internal static class Defaults
public const bool CenterData = true;
public const uint QuantileMin = 25;
public const uint QuantileMax = 75;
+ public const bool UseSampleVariance = false;
}
[BestFriend]
@@ -191,12 +192,14 @@ internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, D
internal sealed class MeanVarianceColumnOptions : ControlZeroColumnOptionsBase
{
public readonly bool UseCdf;
+ public readonly bool UseSampleVariance;
public MeanVarianceColumnOptions(string outputColumnName, string inputColumnName = null,
- long maximumExampleCount = Defaults.MaximumExampleCount, bool fixZero = Defaults.EnsureZeroUntouched, bool useCdf = Defaults.MeanVarCdf)
+ long maximumExampleCount = Defaults.MaximumExampleCount, bool fixZero = Defaults.EnsureZeroUntouched, bool useCdf = Defaults.MeanVarCdf, bool useSampleVariance = Defaults.UseSampleVariance)
: base(outputColumnName, inputColumnName ?? outputColumnName, maximumExampleCount, fixZero)
{
UseCdf = useCdf;
+ UseSampleVariance = useSampleVariance;
}
internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, DataViewType srcType, DataViewRowCursor cursor)
@@ -207,12 +210,14 @@ internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, D
internal sealed class LogMeanVarianceColumnOptions : ControlZeroColumnOptionsBase
{
public readonly bool UseCdf;
+ public readonly bool UseSampleVariance;
public LogMeanVarianceColumnOptions(string outputColumnName, string inputColumnName = null,
- long maximumExampleCount = Defaults.MaximumExampleCount, bool useCdf = Defaults.LogMeanVarCdf, bool fixZero = Defaults.EnsureZeroUntouched)
+ long maximumExampleCount = Defaults.MaximumExampleCount, bool useCdf = Defaults.LogMeanVarCdf, bool fixZero = Defaults.EnsureZeroUntouched, bool useSampleVariance = Defaults.UseSampleVariance)
: base(outputColumnName, inputColumnName ?? outputColumnName, maximumExampleCount, fixZero)
{
UseCdf = useCdf;
+ UseSampleVariance = useSampleVariance;
}
internal override IColumnFunctionBuilder MakeBuilder(IHost host, int srcIndex, DataViewType srcType, DataViewRowCursor cursor)
diff --git a/src/Microsoft.ML.DataView/Microsoft.ML.DataView.csproj b/src/Microsoft.ML.DataView/Microsoft.ML.DataView.csproj
index a81ffdaf17..59ac30649e 100644
--- a/src/Microsoft.ML.DataView/Microsoft.ML.DataView.csproj
+++ b/src/Microsoft.ML.DataView/Microsoft.ML.DataView.csproj
@@ -1,9 +1,8 @@
-
netstandard2.0
- Microsoft.ML.DataView
+ true
Contains the IDataView system which is a set of interfaces and components that provide efficient, compositional processing of schematized data for machine learning and advanced analytics applications.
diff --git a/src/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.csproj b/src/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.csproj
index ec75b23693..66ded5a388 100644
--- a/src/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.csproj
+++ b/src/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.csproj
@@ -1,24 +1,22 @@
-
netstandard2.0
- Microsoft.ML.DnnImageFeaturizer.AlexNet
+ true
ML.NET component for pretrained AlexNet image featurization
-
- all
-
-
- all
-
+
+
+
+
+
diff --git a/src/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.csproj b/src/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.csproj
new file mode 100644
index 0000000000..f899b4fe13
--- /dev/null
+++ b/src/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.csproj
@@ -0,0 +1,51 @@
+
+
+
+ netstandard2.0
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.csproj b/src/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.csproj
index 70dfa08966..77edab125f 100644
--- a/src/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.csproj
+++ b/src/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.csproj
@@ -1,24 +1,23 @@
-
netstandard2.0
- Microsoft.ML.DnnImageFeaturizer.ResNet101
+ true
ML.NET component for pretrained ResNet101 image featurization
-
- all
-
-
- all
-
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.csproj b/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.csproj
index db02568be9..7fd5310d08 100644
--- a/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.csproj
+++ b/src/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.csproj
@@ -1,24 +1,22 @@
-
netstandard2.0
- Microsoft.ML.DnnImageFeaturizer.ResNet18
+ true
ML.NET component for pretrained ResNet18 image featurization
-
- all
-
-
- all
-
+
+
+
+
+
diff --git a/src/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.csproj b/src/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.csproj
index 5c6e91aefd..a19e37f96b 100644
--- a/src/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.csproj
+++ b/src/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.csproj
@@ -1,24 +1,22 @@
-
netstandard2.0
- Microsoft.ML.DnnImageFeaturizer.ResNet50
+ true
ML.NET component for pretrained ResNet50 image featurization
-
- all
-
-
- all
-
+
+
+
+
+
diff --git a/src/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.csproj b/src/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.csproj
index 766107cd6b..4f83132434 100644
--- a/src/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.csproj
+++ b/src/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.csproj
@@ -1,24 +1,17 @@
-
netstandard2.0
- Microsoft.ML.Ensemble
+ true
ML.NET component for Ensembles
CORECLR
-
- all
-
-
- all
-
-
- all
-
+
+
+
diff --git a/src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj b/src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj
index a2b946550c..c84a5d5c96 100644
--- a/src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj
+++ b/src/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.csproj
@@ -1,27 +1,18 @@
-
netstandard2.0
- Microsoft.ML.EntryPoints
+ true
Microsoft.ML.EntryPoints contains the ML.NET entry point API catalog.
-
- all
-
-
- all
-
-
- all
-
-
- all
-
+
+
+
+
diff --git a/src/Microsoft.ML.Experimental/Microsoft.ML.Experimental.csproj b/src/Microsoft.ML.Experimental/Microsoft.ML.Experimental.csproj
index 1996182a09..6a47692468 100644
--- a/src/Microsoft.ML.Experimental/Microsoft.ML.Experimental.csproj
+++ b/src/Microsoft.ML.Experimental/Microsoft.ML.Experimental.csproj
@@ -1,18 +1,15 @@
-
netstandard2.0
- Microsoft.ML.Experimental
+ true
Microsoft.ML.Experimental contains experimental work such extension methods to access internal methods.
-
- all
-
+
diff --git a/src/Microsoft.ML.Fairlearn/Microsoft.ML.Fairlearn.csproj b/src/Microsoft.ML.Fairlearn/Microsoft.ML.Fairlearn.csproj
index 72b2ad0edb..a1b8d3fec4 100644
--- a/src/Microsoft.ML.Fairlearn/Microsoft.ML.Fairlearn.csproj
+++ b/src/Microsoft.ML.Fairlearn/Microsoft.ML.Fairlearn.csproj
@@ -1,27 +1,21 @@
-
netstandard2.0
- Microsoft.ML.Fairlearn
+ true
None
-
- all
-
-
- all
- true
-
+
+
+
+
+
-
- all
-
diff --git a/src/Microsoft.ML.FastTree/Microsoft.ML.FastTree.csproj b/src/Microsoft.ML.FastTree/Microsoft.ML.FastTree.csproj
index acbef6b851..80d0aab011 100644
--- a/src/Microsoft.ML.FastTree/Microsoft.ML.FastTree.csproj
+++ b/src/Microsoft.ML.FastTree/Microsoft.ML.FastTree.csproj
@@ -1,37 +1,21 @@
-
+
netstandard2.0
- Microsoft.ML.FastTree
+ true
ML.NET component for FastTree
$(DefineConstants);NO_STORE;CORECLR
true
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
-
- all
-
-
- all
-
-
- all
-
-
- all
-
+
+
+
+
-
-
-
-
-
-
-
-
+
\ No newline at end of file
diff --git a/src/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.csproj b/src/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.csproj
index 5edf8ebb14..ad687a0ad0 100644
--- a/src/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.csproj
+++ b/src/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.csproj
@@ -1,9 +1,8 @@
-
netstandard2.0
- Microsoft.ML.ImageAnalytics
+ true
ML.NET component for Image support
true
@@ -20,12 +19,8 @@
-
- all
-
-
- all
-
+
+
diff --git a/src/Microsoft.ML.ImageAnalytics/Properties/AssemblyInfo.cs b/src/Microsoft.ML.ImageAnalytics/Properties/AssemblyInfo.cs
index 3f769a0e11..8dd51f9671 100644
--- a/src/Microsoft.ML.ImageAnalytics/Properties/AssemblyInfo.cs
+++ b/src/Microsoft.ML.ImageAnalytics/Properties/AssemblyInfo.cs
@@ -6,5 +6,6 @@
using Microsoft.ML;
[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.Tests" + PublicKey.TestValue)]
+[assembly: InternalsVisibleTo(assemblyName: "Microsoft.ML.TensorFlow.Tests" + PublicKey.TestValue)]
[assembly: WantsToBeBestFriends]
diff --git a/src/Microsoft.ML.KMeansClustering/Microsoft.ML.KMeansClustering.csproj b/src/Microsoft.ML.KMeansClustering/Microsoft.ML.KMeansClustering.csproj
index f7e24f9457..41a772450c 100644
--- a/src/Microsoft.ML.KMeansClustering/Microsoft.ML.KMeansClustering.csproj
+++ b/src/Microsoft.ML.KMeansClustering/Microsoft.ML.KMeansClustering.csproj
@@ -2,7 +2,6 @@
netstandard2.0
- Microsoft.ML
diff --git a/src/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.csproj b/src/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.csproj
index 4d4c405e01..7b63f306d5 100644
--- a/src/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.csproj
+++ b/src/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.csproj
@@ -1,10 +1,9 @@
-
netstandard2.0
- Microsoft.ML.LightGbm
true
+ true
ML.NET component for LightGBM
@@ -12,19 +11,14 @@
-
- all
-
-
- all
-
-
- all
-
+
+
+
-
+
+
diff --git a/src/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.csproj b/src/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.csproj
index 56566d43df..ae356239ac 100644
--- a/src/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.csproj
+++ b/src/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.csproj
@@ -1,27 +1,18 @@
-
+
netstandard2.0
- Microsoft.ML.Mkl.Components
true
+ true
ML.NET additional learners making use of Intel Mkl.
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
-
- all
-
-
- all
-
-
- all
-
-
- all
-
+
+
+
+
@@ -30,16 +21,7 @@
-
- all
-
+
-
-
-
-
-
-
-
-
+
\ No newline at end of file
diff --git a/src/Microsoft.ML.Mkl.Redist/CompatibilitySuppressions.xml b/src/Microsoft.ML.Mkl.Redist/CompatibilitySuppressions.xml
deleted file mode 100644
index 90523af626..0000000000
--- a/src/Microsoft.ML.Mkl.Redist/CompatibilitySuppressions.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
- PKV006
- .NETStandard,Version=v2.0
-
-
\ No newline at end of file
diff --git a/src/Microsoft.ML.Mkl.Redist/Microsoft.ML.Mkl.Redist.csproj b/src/Microsoft.ML.Mkl.Redist/Microsoft.ML.Mkl.Redist.csproj
index 440caa98d1..31fbd674d0 100644
--- a/src/Microsoft.ML.Mkl.Redist/Microsoft.ML.Mkl.Redist.csproj
+++ b/src/Microsoft.ML.Mkl.Redist/Microsoft.ML.Mkl.Redist.csproj
@@ -1,8 +1,10 @@
-
-
+
+
- netstandard2.0
- false
+ netstandard2.0
+ true
+ true
+ false
LICENSE.txt
$(MSBuildProjectName) contains the MKL library redistributed as a NuGet package.
@@ -10,8 +12,7 @@
false
- $(NoWarn);NU5127;NU5128
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
+ $(NoWarn);NU5127;NU5128;NU5109
@@ -26,13 +27,4 @@
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/src/Microsoft.ML.OneDal/Microsoft.ML.OneDal.csproj b/src/Microsoft.ML.OneDal/Microsoft.ML.OneDal.csproj
index ab1cc7a4cb..b38ffb379a 100644
--- a/src/Microsoft.ML.OneDal/Microsoft.ML.OneDal.csproj
+++ b/src/Microsoft.ML.OneDal/Microsoft.ML.OneDal.csproj
@@ -1,26 +1,17 @@
-
+
netstandard2.0
- Microsoft.ML.OneDal
true
+ true
ML.NET additional learners making use of Intel® oneAPI Data Analytics Library (oneDAL).
- $(TargetsForTfmSpecificBuildOutput)
-
- all
-
-
- all
-
-
- all
-
-
- all
-
+
+
+
+
win
@@ -30,4 +21,5 @@
+
diff --git a/src/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.csproj b/src/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.csproj
index 56fefc841f..6182288560 100644
--- a/src/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.csproj
+++ b/src/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.csproj
@@ -1,10 +1,9 @@
-
netstandard2.0
- Microsoft.ML.OnnxConverter
Microsoft.ML.Model.Onnx
+ true
ML.NET component for exporting ONNX Models
@@ -15,9 +14,7 @@
-
- all
-
+
diff --git a/src/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.csproj b/src/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.csproj
index 54517aebaa..2b0aefc490 100644
--- a/src/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.csproj
+++ b/src/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.csproj
@@ -1,23 +1,19 @@
-
netstandard2.0
- Microsoft.ML.OnnxTransformer
true
+ true
ML.NET component for Microsoft.ML.OnnxRuntime.Managed library
-
- all
-
-
- all
-
-
+
+
+
+
@@ -26,4 +22,5 @@
OnnxMl.cs
+
diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
index 154a3eeb6c..68d9290676 100644
--- a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
+++ b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
@@ -299,7 +299,11 @@ private List GetOnnxVariablesFromMetadata(IReadOnlyDictionary<
else
{
// No user-specified shape is found, so the shape loaded from ONNX model file is used.
- info = new OnnxVariableInfo(name, meta.Dimensions.ToList(), meta.ElementType, dataViewType, caster);
+ // If its not a tensor then its a Sequence or Map, dimensions should be null and type is always NamedOnnxValue.
+ if (!meta.IsTensor)
+ info = new OnnxVariableInfo(name, null, typeof(NamedOnnxValue), dataViewType, caster);
+ else
+ info = new OnnxVariableInfo(name, meta.Dimensions.ToList(), meta.ElementType, dataViewType, caster);
}
onnxVariableInfos.Add(info);
diff --git a/src/Microsoft.ML.PCA/Microsoft.ML.PCA.csproj b/src/Microsoft.ML.PCA/Microsoft.ML.PCA.csproj
index f7e24f9457..41a772450c 100644
--- a/src/Microsoft.ML.PCA/Microsoft.ML.PCA.csproj
+++ b/src/Microsoft.ML.PCA/Microsoft.ML.PCA.csproj
@@ -2,7 +2,6 @@
netstandard2.0
- Microsoft.ML
diff --git a/src/Microsoft.ML.Parquet/Microsoft.ML.Parquet.csproj b/src/Microsoft.ML.Parquet/Microsoft.ML.Parquet.csproj
index f41344c124..df7c11bb8b 100644
--- a/src/Microsoft.ML.Parquet/Microsoft.ML.Parquet.csproj
+++ b/src/Microsoft.ML.Parquet/Microsoft.ML.Parquet.csproj
@@ -1,9 +1,8 @@
-
netstandard2.0
- Microsoft.ML.Parquet
+ true
ML.NET components for Apache Parquet support.
@@ -14,12 +13,8 @@
-
- all
-
-
- all
-
+
+
diff --git a/src/Microsoft.ML.Recommender/Microsoft.ML.Recommender.csproj b/src/Microsoft.ML.Recommender/Microsoft.ML.Recommender.csproj
index 980e5719da..080bda9500 100644
--- a/src/Microsoft.ML.Recommender/Microsoft.ML.Recommender.csproj
+++ b/src/Microsoft.ML.Recommender/Microsoft.ML.Recommender.csproj
@@ -1,32 +1,19 @@
-
+
netstandard2.0
- Microsoft.ML.Recommender
true
+ true
LIBMF, the core computation library for matrix factorization in ML.NET
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
-
- all
-
-
- all
-
+
+
-
-
-
-
-
-
-
-
-
+
\ No newline at end of file
diff --git a/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj b/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj
index db62720cdd..1698d12a23 100644
--- a/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj
+++ b/src/Microsoft.ML.SamplesUtils/Microsoft.ML.SamplesUtils.csproj
@@ -1,9 +1,8 @@
-
netstandard2.0
- Microsoft.ML.SampleUtils
+ true
Sample utils for Microsoft.ML.Samples
+
+
+
+ CP0004
+ Microsoft.ML, Version=1.0.0.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51
+ lib/netstandard2.0/Microsoft.ML.dll
+ right
+ true
+
+
\ No newline at end of file
diff --git a/src/Microsoft.ML/Microsoft.ML.csproj b/src/Microsoft.ML/Microsoft.ML.csproj
index 9b8fdf7538..e32176d1ad 100644
--- a/src/Microsoft.ML/Microsoft.ML.csproj
+++ b/src/Microsoft.ML/Microsoft.ML.csproj
@@ -1,44 +1,30 @@
-
-
+
+
+
netstandard2.0
-
-
+
+ false
+
+ <_FindDependencies>false
+
+ false
+
+ true
$(NoWarn);NU5127;NU5128
+ true
ML.NET is a cross-platform open-source machine learning framework which makes machine learning accessible to .NET developers.
README.md
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
-
- true
- all
-
-
- true
- all
-
-
- true
- all
-
-
- true
- all
-
-
- true
- all
-
-
- true
- all
-
+
+
+
+
+
+
@@ -52,21 +38,6 @@
-
-
- $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
\ No newline at end of file
diff --git a/src/Native/CpuMathNative/CMakeLists.txt b/src/Native/CpuMathNative/CMakeLists.txt
index 3141e6c1e0..3eb8a52384 100644
--- a/src/Native/CpuMathNative/CMakeLists.txt
+++ b/src/Native/CpuMathNative/CMakeLists.txt
@@ -13,4 +13,9 @@ endif()
add_library(CpuMathNative SHARED ${SOURCES} ${RESOURCES})
+if (MSVC AND NOT MSVC_VERSION LESS 1900)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
+endif()
+
install_library_and_symbols (CpuMathNative)
diff --git a/src/Native/FastTreeNative/CMakeLists.txt b/src/Native/FastTreeNative/CMakeLists.txt
index e03e2baaa2..a908d7c82a 100644
--- a/src/Native/FastTreeNative/CMakeLists.txt
+++ b/src/Native/FastTreeNative/CMakeLists.txt
@@ -17,4 +17,9 @@ endif()
add_library(FastTreeNative SHARED ${SOURCES} ${RESOURCES})
+if (MSVC AND NOT MSVC_VERSION LESS 1900)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
+endif()
+
install_library_and_symbols (FastTreeNative)
diff --git a/src/Native/LdaNative/CMakeLists.txt b/src/Native/LdaNative/CMakeLists.txt
index efc816c5cc..2159463a15 100644
--- a/src/Native/LdaNative/CMakeLists.txt
+++ b/src/Native/LdaNative/CMakeLists.txt
@@ -16,6 +16,11 @@ set(SOURCES
add_library(LdaNative SHARED ${SOURCES} ${RESOURCES})
+if (MSVC AND NOT MSVC_VERSION LESS 1900)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
+endif()
+
if (${ARCHITECTURE} STREQUAL "arm")
if (WIN32)
target_link_libraries(LdaNative shell32.lib)
diff --git a/src/Native/MatrixFactorizationNative/CMakeLists.txt b/src/Native/MatrixFactorizationNative/CMakeLists.txt
index 4f45ccacd8..092e2c3d75 100644
--- a/src/Native/MatrixFactorizationNative/CMakeLists.txt
+++ b/src/Native/MatrixFactorizationNative/CMakeLists.txt
@@ -43,6 +43,11 @@ endif()
add_library(MatrixFactorizationNative SHARED ${SOURCES} ${RESOURCES})
+if (MSVC AND NOT MSVC_VERSION LESS 1900)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
+endif()
+
if (${ARCHITECTURE} STREQUAL "arm")
if (WIN32)
target_link_libraries(MatrixFactorizationNative shell32.lib)
diff --git a/src/Native/MklProxyNative/CMakeLists.txt b/src/Native/MklProxyNative/CMakeLists.txt
index caea9450cb..7b939f3cf2 100644
--- a/src/Native/MklProxyNative/CMakeLists.txt
+++ b/src/Native/MklProxyNative/CMakeLists.txt
@@ -18,6 +18,11 @@ endif()
add_library(MklProxyNative SHARED ${SOURCES} ${RESOURCES})
target_link_libraries(MklProxyNative PUBLIC ${MKL_LIBRARY})
+if (MSVC AND NOT MSVC_VERSION LESS 1900)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
+endif()
+
if(APPLE)
set_target_properties(MklProxyNative PROPERTIES INSTALL_RPATH "@loader_path")
endif()
diff --git a/src/Native/OneDalNative/CMakeLists.txt b/src/Native/OneDalNative/CMakeLists.txt
index c40fa9abd7..7028f3324b 100644
--- a/src/Native/OneDalNative/CMakeLists.txt
+++ b/src/Native/OneDalNative/CMakeLists.txt
@@ -24,5 +24,9 @@ endif()
set(SOURCES OneDalAlgorithms.cpp)
add_library(OneDalNative SHARED ${SOURCES} ${RESOURCES})
+if (MSVC AND NOT MSVC_VERSION LESS 1900)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
+endif()
target_link_libraries(OneDalNative PUBLIC ${WRAPPER_LINK_OPTIONS})
install_library_and_symbols(OneDalNative)
diff --git a/src/Native/Stdafx.h b/src/Native/Stdafx.h
index 4008ebd012..e1b37c7092 100644
--- a/src/Native/Stdafx.h
+++ b/src/Native/Stdafx.h
@@ -7,6 +7,7 @@
#include
#include
#include
+#include
#define UNUSED(x) (void)(x)
#define DEBUG_ONLY(x) (void)(x)
diff --git a/src/Native/SymSgdNative/CMakeLists.txt b/src/Native/SymSgdNative/CMakeLists.txt
index f40d1a46b4..01652d2aab 100644
--- a/src/Native/SymSgdNative/CMakeLists.txt
+++ b/src/Native/SymSgdNative/CMakeLists.txt
@@ -39,6 +39,10 @@ endif()
add_definitions(-DUSE_OMP)
add_library(SymSgdNative SHARED ${SOURCES} ${RESOURCES})
+if (MSVC AND NOT MSVC_VERSION LESS 1900)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
+endif()
target_link_libraries(SymSgdNative PUBLIC ${MKL_LIBRARY} PUBLIC ${OPENMP_LIBRARY})
if(APPLE)
diff --git a/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj b/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
deleted file mode 100644
index e003f2a5d2..0000000000
--- a/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
+++ /dev/null
@@ -1,131 +0,0 @@
-
-
- netstandard2.0
-
-
-
-
-
- $(ArtifactsObjDir)DnnImageModels
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/test/Directory.Build.props b/test/Directory.Build.props
index eefaafc559..51d5e2eb15 100644
--- a/test/Directory.Build.props
+++ b/test/Directory.Build.props
@@ -18,7 +18,7 @@
CS1591: Missing XML comment for publicly visible type or member 'Type_or_Member'
CS1712: Type parameter 'parameter' has no matching typeparam tag in the XML comment on 'Type_or_Member' (but other type parameters do)
-->
- $(NoWarn),1573,1591,1712
+ $(NoWarn);1573;1591;1712
@@ -44,7 +44,7 @@
false
Analyzer
diff --git a/test/Microsoft.Data.Analysis.Interactive.Tests/Microsoft.Data.Analysis.Interactive.Tests.csproj b/test/Microsoft.Data.Analysis.Interactive.Tests/Microsoft.Data.Analysis.Interactive.Tests.csproj
index bf2bf806e4..c49ffd82d1 100644
--- a/test/Microsoft.Data.Analysis.Interactive.Tests/Microsoft.Data.Analysis.Interactive.Tests.csproj
+++ b/test/Microsoft.Data.Analysis.Interactive.Tests/Microsoft.Data.Analysis.Interactive.Tests.csproj
@@ -1,4 +1,5 @@
+
net6.0
$(NoWarn);MSML_ExtendBaseTestClass
@@ -12,4 +13,5 @@
+
diff --git a/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs b/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs
index 185ab835bb..8961b724d9 100644
--- a/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs
+++ b/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs
@@ -128,7 +128,7 @@ RecordBatch CreateRecordBatch(string prependColumnNamesWith = "")
}
[Fact]
- public void TestEmptyDataFrameRecordBatch()
+ public async void TestEmptyDataFrameRecordBatch()
{
PrimitiveDataFrameColumn ageColumn = new PrimitiveDataFrameColumn("Age");
PrimitiveDataFrameColumn lengthColumn = new PrimitiveDataFrameColumn("CharCount");
@@ -142,7 +142,7 @@ public void TestEmptyDataFrameRecordBatch()
foundARecordBatch = true;
MemoryStream stream = new MemoryStream();
ArrowStreamWriter writer = new ArrowStreamWriter(stream, recordBatch.Schema);
- writer.WriteRecordBatchAsync(recordBatch).GetAwaiter().GetResult();
+ await writer.WriteRecordBatchAsync(recordBatch);
stream.Position = 0;
ArrowStreamReader reader = new ArrowStreamReader(stream);
diff --git a/test/Microsoft.Data.Analysis.Tests/ArrowStringColumnTests.cs b/test/Microsoft.Data.Analysis.Tests/ArrowStringColumnTests.cs
new file mode 100644
index 0000000000..c2f2c9040c
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/ArrowStringColumnTests.cs
@@ -0,0 +1,106 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Apache.Arrow;
+using Xunit;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public class ArrowStringColumnTests
+ {
+
+ [Fact]
+ public void TestBasicArrowStringColumn()
+ {
+ StringArray strArray = new StringArray.Builder().Append("foo").Append("bar").Build();
+ Memory dataMemory = new byte[] { 102, 111, 111, 98, 97, 114 };
+ Memory nullMemory = new byte[] { 0, 0, 0, 0 };
+ Memory offsetMemory = new byte[] { 0, 0, 0, 0, 3, 0, 0, 0, 6, 0, 0, 0 };
+
+ ArrowStringDataFrameColumn stringColumn = new ArrowStringDataFrameColumn("String", dataMemory, offsetMemory, nullMemory, strArray.Length, strArray.NullCount);
+ Assert.Equal(2, stringColumn.Length);
+ Assert.Equal("foo", stringColumn[0]);
+ Assert.Equal("bar", stringColumn[1]);
+ }
+
+ [Fact]
+ public void TestArrowStringColumnWithNulls()
+ {
+ string data = "joemark";
+ byte[] bytes = Encoding.UTF8.GetBytes(data);
+ Memory dataMemory = new Memory(bytes);
+ Memory nullMemory = new byte[] { 0b1101 };
+ Memory offsetMemory = new byte[] { 0, 0, 0, 0, 3, 0, 0, 0, 3, 0, 0, 0, 7, 0, 0, 0, 7, 0, 0, 0 };
+ ArrowStringDataFrameColumn stringColumn = new ArrowStringDataFrameColumn("String", dataMemory, offsetMemory, nullMemory, 4, 1);
+
+ Assert.Equal(4, stringColumn.Length);
+ Assert.Equal("joe", stringColumn[0]);
+ Assert.Null(stringColumn[1]);
+ Assert.Equal("mark", stringColumn[2]);
+ Assert.Equal("", stringColumn[3]);
+
+ List ret = stringColumn[0, 4];
+ Assert.Equal("joe", ret[0]);
+ Assert.Null(ret[1]);
+ Assert.Equal("mark", ret[2]);
+ Assert.Equal("", ret[3]);
+ }
+
+ [Fact]
+ public void TestArrowStringColumnClone()
+ {
+ StringArray strArray = new StringArray.Builder().Append("foo").Append("bar").Build();
+ Memory dataMemory = new byte[] { 102, 111, 111, 98, 97, 114 };
+ Memory nullMemory = new byte[] { 0, 0, 0, 0 };
+ Memory offsetMemory = new byte[] { 0, 0, 0, 0, 3, 0, 0, 0, 6, 0, 0, 0 };
+
+ ArrowStringDataFrameColumn stringColumn = new ArrowStringDataFrameColumn("String", dataMemory, offsetMemory, nullMemory, strArray.Length, strArray.NullCount);
+
+ DataFrameColumn clone = stringColumn.Clone(numberOfNullsToAppend: 5);
+ Assert.Equal(7, clone.Length);
+ Assert.Equal(stringColumn[0], clone[0]);
+ Assert.Equal(stringColumn[1], clone[1]);
+ for (int i = 2; i < 7; i++)
+ Assert.Null(clone[i]);
+ }
+
+ [Fact]
+ public void TestArrowStringApply()
+ {
+ ArrowStringDataFrameColumn column = DataFrameTests.CreateArrowStringColumn(10);
+ ArrowStringDataFrameColumn ret = column.Apply((string cur) =>
+ {
+ if (cur != null)
+ {
+ return cur + "123";
+ }
+ return null;
+ });
+ for (long i = 0; i < column.Length; i++)
+ {
+ if (column[i] != null)
+ {
+ Assert.Equal(column[i] + "123", ret[i]);
+ }
+ else
+ {
+ Assert.Null(ret[i]);
+ }
+ }
+ Assert.Equal(1, ret.NullCount);
+
+ // Test null counts
+ ret = column.Apply((string cur) =>
+ {
+ return null;
+ });
+ Assert.Equal(column.Length, ret.NullCount);
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameJoinTests.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameJoinExtensionsTests.cs
similarity index 78%
rename from test/Microsoft.Data.Analysis.Tests/DataFrameJoinTests.cs
rename to test/Microsoft.Data.Analysis.Tests/DataFrameJoinExtensionsTests.cs
index a465ee70f5..ed39dff49f 100644
--- a/test/Microsoft.Data.Analysis.Tests/DataFrameJoinTests.cs
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameJoinExtensionsTests.cs
@@ -8,10 +8,10 @@
namespace Microsoft.Data.Analysis.Tests
{
- public class DataFrameJoinTests
+ public class DataFrameJoinExtensionsTests
{
[Fact]
- public void DataFrameJoinTests_GetSortedListsIntersection_EmptyCollections_EmptyResult()
+ public void GetSortedListsIntersection_EmptyCollections_EmptyResult()
{
// Arrange
@@ -24,11 +24,11 @@ public void DataFrameJoinTests_GetSortedListsIntersection_EmptyCollections_Empty
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
- public void DataFrameJoinTests_GetSortedListsIntersection_EmptyCollections_FirstIsNotEmpty_EmptyResult()
+ public void GetSortedListsIntersection_EmptyCollections_FirstIsNotEmpty_EmptyResult()
{
// Arrange
@@ -47,11 +47,11 @@ public void DataFrameJoinTests_GetSortedListsIntersection_EmptyCollections_First
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
- public void DataFrameJoinTests_GetSortedListsIntersection_EmptyCollections_SecondIsNotEmpty_EmptyResult()
+ public void GetSortedListsIntersection_EmptyCollections_SecondIsNotEmpty_EmptyResult()
{
// Arrange
@@ -70,11 +70,11 @@ public void DataFrameJoinTests_GetSortedListsIntersection_EmptyCollections_Secon
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
- public void DataFrameJoinTests_GetSortedListsIntersection_SortedCollections_WithoutIntersection_Success()
+ public void GetSortedListsIntersection_SortedCollections_WithoutIntersection_Success()
{
// Arrange
@@ -101,11 +101,11 @@ public void DataFrameJoinTests_GetSortedListsIntersection_SortedCollections_With
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
- public void DataFrameJoinTests_GetSortedListsIntersection_SortedCollections_WithIntersection_Success()
+ public void GetSortedListsIntersection_SortedCollections_WithIntersection_Success()
{
// Arrange
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.BinaryOperations.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.BinaryOperations.cs
new file mode 100644
index 0000000000..c077bd201d
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.BinaryOperations.cs
@@ -0,0 +1,477 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Apache.Arrow;
+using Xunit;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public partial class DataFrameTests
+ {
+ [Fact]
+ public void TestBinaryOperations()
+ {
+ DataFrame df = MakeDataFrameWithTwoColumns(12);
+ IReadOnlyList listOfInts = new List() { 5, 5 };
+
+ // The following binary ops return a copy
+ var ret = df.Add(5);
+ Assert.Equal(0, df[0, 0]);
+ Assert.Equal(5, ret[0, 0]);
+ ret = df.Add(listOfInts);
+ Assert.Equal(0, df[0, 0]);
+ Assert.Equal(5, ret[0, 0]);
+ ret = df.Subtract(5);
+ Assert.Equal(0, df[0, 0]);
+ Assert.Equal(-5, ret[0, 0]);
+ ret = df.Subtract(listOfInts);
+ Assert.Equal(0, df[0, 0]);
+ Assert.Equal(-5, ret[0, 0]);
+ ret = df.Multiply(5);
+ Assert.Equal(1, df[1, 0]);
+ Assert.Equal(5, ret[1, 0]);
+ ret = df.Multiply(listOfInts);
+ Assert.Equal(1, df[1, 0]);
+ Assert.Equal(5, ret[1, 0]);
+ ret = df.Divide(5);
+ Assert.Equal(5, df[5, 0]);
+ Assert.Equal(1, ret[5, 0]);
+ ret = df.Divide(listOfInts);
+ Assert.Equal(5, df[5, 0]);
+ Assert.Equal(1, ret[5, 0]);
+ ret = df.Modulo(5);
+ Assert.Equal(5, df[5, 0]);
+ Assert.Equal(0, ret[5, 0]);
+ ret = df.Modulo(listOfInts);
+ Assert.Equal(5, df[5, 0]);
+ Assert.Equal(0, ret[5, 0]);
+
+ Assert.Equal(true, df.ElementwiseGreaterThanOrEqual(5)[7, 0]);
+ Assert.Equal(true, df.ElementwiseGreaterThanOrEqual(listOfInts)[7, 0]);
+ Assert.Equal(true, df.ElementwiseLessThanOrEqual(5)[4, 0]);
+ Assert.Equal(true, df.ElementwiseLessThanOrEqual(listOfInts)[4, 0]);
+ Assert.Equal(false, df.ElementwiseGreaterThan(5)[5, 0]);
+ Assert.Equal(false, df.ElementwiseGreaterThan(listOfInts)[5, 0]);
+ Assert.Equal(false, df.ElementwiseLessThan(5)[5, 0]);
+ Assert.Equal(false, df.ElementwiseLessThan(listOfInts)[5, 0]);
+ // The following binary ops are in place
+ Assert.Equal(5, df.Add(5, inPlace: true)[0, 0]);
+ Assert.Equal(10, df.Add(listOfInts, inPlace: true)[0, 0]);
+ Assert.Equal(5, df.Subtract(5, inPlace: true)[0, 0]);
+ Assert.Equal(0, df.Subtract(listOfInts, inPlace: true)[0, 0]);
+ Assert.Equal(5, df.Multiply(5, inPlace: true)[1, 0]);
+ Assert.Equal(25, df.Multiply(listOfInts, inPlace: true)[1, 0]);
+ Assert.Equal(5, df.Divide(5, inPlace: true)[1, 0]);
+ Assert.Equal(1, df.Divide(listOfInts, inPlace: true)[1, 0]);
+ Assert.Equal(1, df.Modulo(5, inPlace: true)[1, 0]);
+ Assert.Equal(1, df.Modulo(listOfInts, inPlace: true)[1, 0]);
+ Assert.Equal(2, df.LeftShift(1)[1, 0]);
+ Assert.Equal(1, df.RightShift(1)[2, 0]);
+ }
+
+ [Fact]
+ public void TestBinaryOperationsWithColumns()
+ {
+ int length = 10;
+ var df1 = MakeDataFrameWithNumericColumns(length);
+ var df2 = MakeDataFrameWithNumericColumns(length);
+
+ DataFrameColumn newColumn;
+ DataFrameColumn verify;
+ for (int i = 0; i < df1.Columns.Count; i++)
+ {
+ newColumn = df1.Columns[df1.Columns[i].Name] + df2.Columns[df2.Columns[i].Name];
+ verify = newColumn.ElementwiseEquals(df1.Columns[i] * 2);
+ Assert.Equal(true, verify[0]);
+
+ newColumn = df1.Columns[df1.Columns[i].Name] - df2.Columns[df2.Columns[i].Name];
+ verify = newColumn.ElementwiseEquals(0);
+ Assert.Equal(true, verify[0]);
+
+ newColumn = df1.Columns[df1.Columns[i].Name] * df2.Columns[df2.Columns[i].Name];
+ verify = newColumn.ElementwiseEquals(df1.Columns[i] * df1.Columns[i]);
+ Assert.Equal(true, verify[0]);
+
+ var df1Column = df1.Columns[i] + 1;
+ var df2Column = df2.Columns[i] + 1;
+ newColumn = df1Column / df2Column;
+ verify = newColumn.ElementwiseEquals(1);
+ Assert.Equal(true, verify[0]);
+
+ newColumn = df1Column % df2Column;
+ verify = newColumn.ElementwiseEquals(0);
+ Assert.Equal(true, verify[0]);
+
+ verify = df1.Columns[df1.Columns[i].Name].ElementwiseEquals(df2.Columns[df2.Columns[i].Name]);
+ Assert.True(verify.All());
+
+ verify = df1.Columns[df1.Columns[i].Name].ElementwiseNotEquals(df2.Columns[df2.Columns[i].Name]);
+ Assert.False(verify.Any());
+
+ verify = df1.Columns[df1.Columns[i].Name].ElementwiseGreaterThanOrEqual(df2.Columns[df2.Columns[i].Name]);
+ Assert.True(verify.All());
+
+ verify = df1.Columns[df1.Columns[i].Name].ElementwiseLessThanOrEqual(df2.Columns[df2.Columns[i].Name]);
+ Assert.True(verify.All());
+
+ verify = df1.Columns[df1.Columns[i].Name].ElementwiseGreaterThan(df2.Columns[df2.Columns[i].Name]);
+ Assert.False(verify.Any());
+
+ verify = df1.Columns[df1.Columns[i].Name].ElementwiseLessThan(df2.Columns[df2.Columns[i].Name]);
+ Assert.False(verify.Any());
+ }
+ }
+
+ [Fact]
+ public void TestBinaryOperationsWithConversions()
+ {
+ DataFrame df = DataFrameTests.MakeDataFrameWithTwoColumns(10);
+
+ // Add a double to an int column
+ DataFrame dfd = df.Add(5.0f);
+ var dtype = dfd.Columns[0].DataType;
+ Assert.True(dtype == typeof(double));
+
+ // Add a decimal to an int column
+ DataFrame dfm = df.Add(5.0m);
+ dtype = dfm.Columns[0].DataType;
+ Assert.True(dtype == typeof(decimal));
+
+ // int + bool should throw
+ Assert.Throws(() => df.Add(true));
+
+ var dataFrameColumn1 = new DoubleDataFrameColumn("Double1", Enumerable.Range(0, 10).Select(x => (double)x));
+ df.Columns[0] = dataFrameColumn1;
+ // Double + comparison ops should throw
+ Assert.Throws(() => df.And(true));
+ }
+
+ [Fact]
+ public void TestBinaryOperationsOnBoolColumn()
+ {
+ var df = new DataFrame();
+ var dataFrameColumn1 = new BooleanDataFrameColumn("Bool1", Enumerable.Range(0, 10).Select(x => true));
+ var dataFrameColumn2 = new BooleanDataFrameColumn("Bool2", Enumerable.Range(0, 10).Select(x => true));
+ df.Columns.Insert(0, dataFrameColumn1);
+ df.Columns.Insert(1, dataFrameColumn2);
+
+ // bool + int should throw
+ Assert.Throws(() => df.Add(5));
+ // Left shift should throw
+ Assert.Throws(() => df.LeftShift(5));
+
+ IReadOnlyList listOfBools = new List() { true, false };
+ // boolean and And should work
+ var newdf = df.And(true);
+ Assert.Equal(true, newdf[4, 0]);
+ var newdf1 = df.And(listOfBools);
+ Assert.Equal(false, newdf1[4, 1]);
+
+ newdf = df.Or(true);
+ Assert.Equal(true, newdf[4, 0]);
+ newdf1 = df.Or(listOfBools);
+ Assert.Equal(true, newdf1[4, 1]);
+
+ newdf = df.Xor(true);
+ Assert.Equal(false, newdf[4, 0]);
+ newdf1 = df.Xor(listOfBools);
+ Assert.Equal(true, newdf1[4, 1]);
+ }
+
+ [Fact]
+ public void TestBinaryOperationsOnDateTimeColumn()
+ {
+ var df = new DataFrame();
+ var dataFrameColumn1 = new DateTimeDataFrameColumn("DateTime1", Enumerable.Range(0, 5).Select(x => SampleDateTime.AddDays(x)));
+ // Make the second data frame column have one value that is different
+ var dataFrameColumn2 = new DateTimeDataFrameColumn("DateTime2", Enumerable.Range(0, 4).Select(x => SampleDateTime.AddDays(x)));
+ dataFrameColumn2.Append(SampleDateTime.AddDays(6));
+ df.Columns.Insert(0, dataFrameColumn1);
+ df.Columns.Insert(1, dataFrameColumn2);
+
+ // DateTime + int should throw
+ Assert.Throws(() => df.Add(5));
+ // Left shift should throw
+ Assert.Throws(() => df.LeftShift(5));
+ // Right shift should throw
+ Assert.Throws(() => df.RightShift(5));
+
+ // And should throw
+ Assert.Throws(() => df.And(true));
+ // Or should throw
+ Assert.Throws(() => df.Or(true));
+ // Xor should throw
+ Assert.Throws(() => df.Xor(true));
+
+ var equalsResult = dataFrameColumn1.ElementwiseEquals(dataFrameColumn2);
+ Assert.True(equalsResult[0]);
+ Assert.False(equalsResult[4]);
+
+ var equalsToScalarResult = df["DateTime1"].ElementwiseEquals(SampleDateTime);
+ Assert.True(equalsToScalarResult[0]);
+ Assert.False(equalsToScalarResult[1]);
+
+ var notEqualsResult = dataFrameColumn1.ElementwiseNotEquals(dataFrameColumn2);
+ Assert.False(notEqualsResult[0]);
+ Assert.True(notEqualsResult[4]);
+
+ var notEqualsToScalarResult = df["DateTime1"].ElementwiseNotEquals(SampleDateTime);
+ Assert.False(notEqualsToScalarResult[0]);
+ Assert.True(notEqualsToScalarResult[1]);
+ }
+
+ [Fact]
+ public void TestBinaryOperationsOnArrowStringColumn()
+ {
+ var df = new DataFrame();
+ var strArrayBuilder = new StringArray.Builder();
+ for (int i = 0; i < 10; i++)
+ {
+ strArrayBuilder.Append(i.ToString());
+ }
+ StringArray strArray = strArrayBuilder.Build();
+
+ ArrowStringDataFrameColumn stringColumn = new ArrowStringDataFrameColumn("String", strArray.ValueBuffer.Memory, strArray.ValueOffsetsBuffer.Memory, strArray.NullBitmapBuffer.Memory, strArray.Length, strArray.NullCount);
+ df.Columns.Insert(0, stringColumn);
+
+ DataFrameColumn newCol = stringColumn.ElementwiseEquals(4);
+ Assert.Equal(true, newCol[4]);
+ Assert.Equal(false, newCol[0]);
+ Assert.Equal(false, newCol[5]);
+
+ newCol = stringColumn.ElementwiseEquals("4");
+ Assert.Equal(true, newCol[4]);
+ Assert.Equal(false, newCol[0]);
+
+ newCol = stringColumn.ElementwiseEquals("foo");
+ Assert.False(newCol.All());
+ newCol = stringColumn.ElementwiseEquals(null);
+ Assert.False(newCol.All());
+
+ ArrowStringDataFrameColumn stringColumnCopy = new ArrowStringDataFrameColumn("String", strArray.ValueBuffer.Memory, strArray.ValueOffsetsBuffer.Memory, strArray.NullBitmapBuffer.Memory, strArray.Length, strArray.NullCount);
+ newCol = stringColumn.ElementwiseEquals(stringColumnCopy);
+ Assert.True(newCol.All());
+
+ DataFrameColumn stringColumnCopyAsBaseColumn = stringColumnCopy;
+ newCol = stringColumn.ElementwiseEquals(stringColumnCopyAsBaseColumn);
+ Assert.True(newCol.All());
+
+ newCol = stringColumn.ElementwiseNotEquals(5);
+ Assert.Equal(true, newCol[0]);
+ Assert.Equal(false, newCol[5]);
+
+ newCol = stringColumn.ElementwiseNotEquals("5");
+ Assert.Equal(true, newCol[0]);
+ Assert.Equal(false, newCol[5]);
+
+ newCol = stringColumn.ElementwiseNotEquals("foo");
+ Assert.True(newCol.All());
+ newCol = stringColumn.ElementwiseNotEquals(null);
+ Assert.True(newCol.All());
+
+ newCol = stringColumn.ElementwiseNotEquals(stringColumnCopy);
+ Assert.False(newCol.All());
+
+ newCol = stringColumn.ElementwiseNotEquals(stringColumnCopyAsBaseColumn);
+ Assert.False(newCol.All());
+ }
+
+ [Fact]
+ public void TestBinaryOperationsOnStringColumn()
+ {
+ var df = new DataFrame();
+ DataFrameColumn stringColumn = new StringDataFrameColumn("String", Enumerable.Range(0, 10).Select(x => x.ToString()));
+ df.Columns.Insert(0, stringColumn);
+
+ DataFrameColumn newCol = stringColumn.ElementwiseEquals(5);
+ Assert.Equal(true, newCol[5]);
+ Assert.Equal(false, newCol[0]);
+
+ newCol = (stringColumn as StringDataFrameColumn).ElementwiseEquals("5");
+ Assert.Equal(true, newCol[5]);
+ Assert.Equal(false, newCol[0]);
+
+ DataFrameColumn stringColumnCopy = new StringDataFrameColumn("String", Enumerable.Range(0, 10).Select(x => x.ToString()));
+ newCol = stringColumn.ElementwiseEquals(stringColumnCopy);
+ Assert.Equal(true, newCol[5]);
+ Assert.Equal(true, newCol[0]);
+
+ StringDataFrameColumn typedStringColumn = stringColumn as StringDataFrameColumn;
+ StringDataFrameColumn typedStringColumnCopy = stringColumnCopy as StringDataFrameColumn;
+ newCol = typedStringColumn.ElementwiseEquals(typedStringColumnCopy);
+ Assert.True(newCol.All());
+
+ newCol = stringColumn.ElementwiseNotEquals(5);
+ Assert.Equal(false, newCol[5]);
+ Assert.Equal(true, newCol[0]);
+
+ newCol = typedStringColumn.ElementwiseNotEquals("5");
+ Assert.Equal(false, newCol[5]);
+ Assert.Equal(true, newCol[0]);
+
+ newCol = stringColumn.ElementwiseNotEquals(stringColumnCopy);
+ Assert.Equal(false, newCol[5]);
+ Assert.Equal(false, newCol[0]);
+
+ newCol = typedStringColumn.ElementwiseNotEquals(typedStringColumnCopy);
+ Assert.False(newCol.All());
+
+ newCol = typedStringColumn.Add("suffix");
+ for (int i = 0; i < newCol.Length; i++)
+ {
+ Assert.Equal(newCol[i], typedStringColumn[i] + "suffix");
+ }
+ DataFrameColumn addString = typedStringColumn + "suffix";
+ for (int i = 0; i < addString.Length; i++)
+ {
+ Assert.Equal(addString[i], typedStringColumn[i] + "suffix");
+ }
+ Assert.True(newCol.ElementwiseEquals(addString).All());
+ addString = "prefix" + typedStringColumn;
+ for (int i = 0; i < addString.Length; i++)
+ {
+ Assert.Equal(addString[i], "prefix" + typedStringColumn[i]);
+ }
+ }
+
+ [Fact]
+ public void TestBinaryOperatorsWithConversions()
+ {
+ var df = MakeDataFrameWithNumericColumns(10);
+
+ DataFrame tempDf = df + 1;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + (double)1);
+ tempDf = df + 1.1;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1);
+ tempDf = df + 1.1m;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1m);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = df - 1.1;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] - 1.1);
+ tempDf = df - 1.1m;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] - 1.1m);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = df * 1.1;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1);
+ tempDf = df * 1.1m;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1m);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = df / 1.1;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] / 1.1);
+ tempDf = df / 1.1m;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] / 1.1m);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = df % 1.1;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] % 1.1);
+ tempDf = df % 1.1m;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] % 1.1m);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = 1 + df;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + (double)1);
+ tempDf = 1.1 + df;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1);
+ tempDf = 1.1m + df;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1m);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = 1.1 - df;
+ Assert.Equal(tempDf[0, 0], 1.1 - (byte)df[0, 0]);
+ tempDf = 1.1m - df;
+ Assert.Equal(tempDf[0, 0], 1.1m - (byte)df[0, 0]);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = 1.1 * df;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1);
+ tempDf = 1.1m * df;
+ Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1m);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ // To prevent a divide by zero
+ var plusOne = df + 1;
+ tempDf = 1.1 / plusOne;
+ Assert.Equal(tempDf[0, 0], 1.1 / (double)plusOne[0, 0]);
+ var plusDecimal = df + 1.1m;
+ tempDf = 1.1m / plusDecimal;
+ Assert.Equal(tempDf[0, 0], (1.1m) / (decimal)plusDecimal[0, 0]);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ tempDf = 1.1 % plusOne;
+ Assert.Equal(tempDf[0, 0], 1.1 % (double)plusOne[0, 0]);
+ tempDf = 1.1m % plusDecimal;
+ Assert.Equal(tempDf[0, 0], 1.1m % (decimal)plusDecimal[0, 0]);
+ Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
+
+ Assert.Equal((byte)0, df[0, 0]);
+ }
+
+ [Fact]
+ public void TestBinaryOperationsOnColumns()
+ {
+ Int32DataFrameColumn column = new Int32DataFrameColumn("Int", Enumerable.Range(0, 10));
+ Assert.ThrowsAny(() => column.Add(5.5, inPlace: true));
+ Assert.ThrowsAny(() => column.ReverseAdd(5.5, inPlace: true));
+ string str = "A String";
+ Assert.ThrowsAny(() => column.Add(str, inPlace: true));
+ Assert.ThrowsAny(() => column.ReverseAdd(str, inPlace: true));
+ }
+
+ [Fact]
+ public void TestBinaryOperationsOnExplodedNumericColumns()
+ {
+ DataFrame df = MakeDataFrameWithNumericAndBoolColumns(10, withNulls: false);
+ Int32DataFrameColumn ints = df.Columns["Int"] as Int32DataFrameColumn;
+ Int32DataFrameColumn res = ints.Add(1).Subtract(1).Multiply(10).Divide(10).LeftShift(2).RightShift(2);
+ Assert.True(res.ElementwiseEquals(ints).All());
+ Assert.True(res.ElementwiseGreaterThanOrEqual(ints).All());
+ Assert.True(res.ElementwiseLessThanOrEqual(ints).All());
+ Assert.False(res.ElementwiseNotEquals(ints).All());
+ Assert.False(res.ElementwiseGreaterThan(ints).All());
+ Assert.False(res.ElementwiseLessThan(ints).All());
+
+ // Test inPlace
+ Int32DataFrameColumn inPlace = ints.Add(1, inPlace: true).Subtract(1, inPlace: true).Multiply(10, inPlace: true).Divide(10, inPlace: true).LeftShift(2, inPlace: true).RightShift(2, inPlace: true).Add(100, inPlace: true);
+ Assert.True(inPlace.ElementwiseEquals(ints).All());
+ Assert.True(inPlace.ElementwiseGreaterThanOrEqual(ints).All());
+ Assert.True(inPlace.ElementwiseLessThanOrEqual(ints).All());
+ Assert.False(inPlace.ElementwiseNotEquals(ints).All());
+ Assert.False(inPlace.ElementwiseGreaterThan(ints).All());
+ Assert.False(inPlace.ElementwiseLessThan(ints).All());
+
+ Assert.False(inPlace.ElementwiseEquals(res).All());
+ Assert.True(inPlace.ElementwiseGreaterThanOrEqual(res).All());
+ Assert.False(inPlace.ElementwiseLessThanOrEqual(res).All());
+ Assert.True(inPlace.ElementwiseNotEquals(res).All());
+ Assert.True(inPlace.ElementwiseGreaterThan(res).All());
+ Assert.False(inPlace.ElementwiseLessThan(res).All());
+
+ // Test Bool column
+ BooleanDataFrameColumn bools = df.Columns["Bool"] as BooleanDataFrameColumn;
+ BooleanDataFrameColumn allFalse = bools.Or(true).And(true).Xor(true);
+ Assert.True(allFalse.ElementwiseEquals(false).All());
+
+ // Test inPlace
+ BooleanDataFrameColumn inPlaceAllFalse = bools.Or(true, inPlace: true).And(true, inPlace: true).Xor(true, inPlace: true);
+ Assert.True(inPlaceAllFalse.ElementwiseEquals(bools).All());
+
+ // Test Reverse Operations
+ Int32DataFrameColumn reverse = ints.ReverseAdd(1).ReverseSubtract(1).ReverseMultiply(-1);
+ Assert.True(reverse.ElementwiseEquals(ints).All());
+
+ // Test inPlace
+ Int32DataFrameColumn reverseInPlace = ints.ReverseAdd(1, inPlace: true).ReverseSubtract(1, inPlace: true).ReverseMultiply(-1, inPlace: true).ReverseDivide(100, inPlace: true);
+ Assert.True(reverseInPlace.ElementwiseEquals(ints).All());
+ Assert.False(reverseInPlace.ElementwiseEquals(reverse).All());
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Computations.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Computations.cs
new file mode 100644
index 0000000000..d62048aa89
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Computations.cs
@@ -0,0 +1,477 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public partial class DataFrameTests
+ {
+ [Fact]
+ public void TestComputations()
+ {
+ DataFrame df = MakeDataFrameWithAllMutableColumnTypes(10);
+ df["Int"][0] = -10;
+ Assert.Equal(-10, df.Columns["Int"][0]);
+
+ DataFrameColumn absColumn = df.Columns["Int"].Abs();
+ Assert.Equal(10, absColumn[0]);
+ Assert.Equal(-10, df.Columns["Int"][0]);
+ df.Columns["Int"].Abs(true);
+ Assert.Equal(10, df.Columns["Int"][0]);
+
+ Assert.Throws(() => df.Columns["Byte"].All());
+ Assert.Throws(() => df.Columns["Byte"].Any());
+ Assert.Throws(() => df.Columns["Char"].All());
+ Assert.Throws(() => df.Columns["Char"].Any());
+ Assert.Throws(() => df.Columns["DateTime"].All());
+ Assert.Throws(() => df.Columns["DateTime"].Any());
+ Assert.Throws(() => df.Columns["Decimal"].All());
+ Assert.Throws(() => df.Columns["Decimal"].Any());
+ Assert.Throws(() => df.Columns["Double"].All());
+ Assert.Throws(() => df.Columns["Double"].Any());
+ Assert.Throws(() => df.Columns["Float"].All());
+ Assert.Throws(() => df.Columns["Float"].Any());
+ Assert.Throws(() => df.Columns["Int"].All());
+ Assert.Throws(() => df.Columns["Int"].Any());
+ Assert.Throws(() => df.Columns["Long"].All());
+ Assert.Throws(() => df.Columns["Long"].Any());
+ Assert.Throws(() => df.Columns["Sbyte"].All());
+ Assert.Throws(() => df.Columns["Sbyte"].Any());
+ Assert.Throws(() => df.Columns["Short"].All());
+ Assert.Throws(() => df.Columns["Short"].Any());
+ Assert.Throws(() => df.Columns["Uint"].All());
+ Assert.Throws(() => df.Columns["Uint"].Any());
+ Assert.Throws(() => df.Columns["Ulong"].All());
+ Assert.Throws(() => df.Columns["Ulong"].Any());
+ Assert.Throws(() => df.Columns["Ushort"].All());
+ Assert.Throws(() => df.Columns["Ushort"].Any());
+
+ bool any = df.Columns["Bool"].Any();
+ bool all = df.Columns["Bool"].All();
+ Assert.True(any);
+ Assert.False(all);
+
+ // Test the computation results
+ df.Columns["Double"][0] = 100.0;
+ DataFrameColumn doubleColumn = df.Columns["Double"].CumulativeMax();
+ for (int i = 0; i < doubleColumn.Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(doubleColumn[i]);
+ else
+ Assert.Equal(100.0, (double)doubleColumn[i]);
+ }
+ Assert.Equal(1.0, df.Columns["Double"][1]);
+ df.Columns["Double"].CumulativeMax(true);
+ for (int i = 0; i < df.Columns["Double"].Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(df.Columns["Double"][i]);
+ else
+ Assert.Equal(100.0, (double)df.Columns["Double"][i]);
+ }
+
+ df.Columns["Float"][0] = -10.0f;
+ DataFrameColumn floatColumn = df.Columns["Float"].CumulativeMin();
+ for (int i = 0; i < floatColumn.Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(floatColumn[i]);
+ else
+ Assert.Equal(-10.0f, (float)floatColumn[i]);
+ }
+ Assert.Equal(9.0f, df.Columns["Float"][9]);
+ df.Columns["Float"].CumulativeMin(true);
+ for (int i = 0; i < df.Columns["Float"].Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(df.Columns["Float"][i]);
+ else
+ Assert.Equal(-10.0f, (float)df.Columns["Float"][i]);
+ }
+
+ DataFrameColumn uintColumn = df.Columns["Uint"].CumulativeProduct();
+ Assert.Equal((uint)0, uintColumn[8]);
+ Assert.Equal((uint)8, df.Columns["Uint"][8]);
+ df.Columns["Uint"].CumulativeProduct(true);
+ Assert.Equal((uint)0, df.Columns["Uint"][9]);
+
+ DataFrameColumn ushortColumn = df.Columns["Ushort"].CumulativeSum();
+ Assert.Equal((ushort)40, ushortColumn[9]);
+ Assert.Equal((ushort)9, df.Columns["Ushort"][9]);
+ df.Columns["Ushort"].CumulativeSum(true);
+ Assert.Equal((ushort)40, df.Columns["Ushort"][9]);
+
+ Assert.Equal(100.0, df.Columns["Double"].Max());
+ Assert.Equal(-10.0f, df.Columns["Float"].Min());
+ Assert.Equal((uint)0, df.Columns["Uint"].Product());
+ Assert.Equal((ushort)130, df.Columns["Ushort"].Sum());
+
+ df.Columns["Double"][0] = 100.1;
+ Assert.Equal(100.1, df.Columns["Double"][0]);
+ DataFrameColumn roundColumn = df.Columns["Double"].Round();
+ Assert.Equal(100.0, roundColumn[0]);
+ Assert.Equal(100.1, df.Columns["Double"][0]);
+ df.Columns["Double"].Round(true);
+ Assert.Equal(100.0, df.Columns["Double"][0]);
+
+ // Test that none of the numeric column types throw
+ for (int i = 0; i < df.Columns.Count; i++)
+ {
+ DataFrameColumn column = df.Columns[i];
+ if (column.DataType == typeof(bool))
+ {
+ Assert.Throws(() => column.CumulativeMax());
+ Assert.Throws(() => column.CumulativeMin());
+ Assert.Throws(() => column.CumulativeProduct());
+ Assert.Throws(() => column.CumulativeSum());
+ Assert.Throws(() => column.Max());
+ Assert.Throws(() => column.Min());
+ Assert.Throws(() => column.Product());
+ Assert.Throws(() => column.Sum());
+ continue;
+ }
+ else if (column.DataType == typeof(string))
+ {
+ Assert.Throws(() => column.CumulativeMax());
+ Assert.Throws(() => column.CumulativeMin());
+ Assert.Throws(() => column.CumulativeProduct());
+ Assert.Throws(() => column.CumulativeSum());
+ Assert.Throws(() => column.Max());
+ Assert.Throws(() => column.Min());
+ Assert.Throws(() => column.Product());
+ Assert.Throws(() => column.Sum());
+ continue;
+ }
+ else if (column.DataType == typeof(DateTime))
+ {
+ column.CumulativeMax();
+ column.CumulativeMin();
+ column.Max();
+ column.Min();
+
+ Assert.Throws(() => column.CumulativeProduct());
+ Assert.Throws(() => column.CumulativeSum());
+ Assert.Throws(() => column.Product());
+ Assert.Throws(() => column.Sum());
+ continue;
+ }
+
+ column.CumulativeMax();
+ column.CumulativeMin();
+ column.CumulativeProduct();
+ column.CumulativeSum();
+ column.Max();
+ column.Min();
+ column.Product();
+ column.Sum();
+ }
+ }
+
+ [Fact]
+ public void TestComputationsIncludingDateTime()
+ {
+ DataFrame df = MakeDataFrameWithNumericStringAndDateTimeColumns(10);
+ df["Int"][0] = -10;
+ Assert.Equal(-10, df.Columns["Int"][0]);
+
+ DataFrameColumn absColumn = df.Columns["Int"].Abs();
+ Assert.Equal(10, absColumn[0]);
+ Assert.Equal(-10, df.Columns["Int"][0]);
+ df.Columns["Int"].Abs(true);
+ Assert.Equal(10, df.Columns["Int"][0]);
+
+ Assert.Throws(() => df.Columns["Byte"].All());
+ Assert.Throws(() => df.Columns["Byte"].Any());
+ Assert.Throws(() => df.Columns["Char"].All());
+ Assert.Throws(() => df.Columns["Char"].Any());
+ Assert.Throws(() => df.Columns["Decimal"].All());
+ Assert.Throws(() => df.Columns["Decimal"].Any());
+ Assert.Throws(() => df.Columns["Double"].All());
+ Assert.Throws(() => df.Columns["Double"].Any());
+ Assert.Throws(() => df.Columns["Float"].All());
+ Assert.Throws(() => df.Columns["Float"].Any());
+ Assert.Throws(() => df.Columns["Int"].All());
+ Assert.Throws(() => df.Columns["Int"].Any());
+ Assert.Throws(() => df.Columns["Long"].All());
+ Assert.Throws(() => df.Columns["Long"].Any());
+ Assert.Throws(() => df.Columns["Sbyte"].All());
+ Assert.Throws(() => df.Columns["Sbyte"].Any());
+ Assert.Throws(() => df.Columns["Short"].All());
+ Assert.Throws(() => df.Columns["Short"].Any());
+ Assert.Throws(() => df.Columns["Uint"].All());
+ Assert.Throws(() => df.Columns["Uint"].Any());
+ Assert.Throws(() => df.Columns["Ulong"].All());
+ Assert.Throws(() => df.Columns["Ulong"].Any());
+ Assert.Throws(() => df.Columns["Ushort"].All());
+ Assert.Throws(() => df.Columns["Ushort"].Any());
+ Assert.Throws(() => df.Columns["DateTime"].All());
+ Assert.Throws(() => df.Columns["DateTime"].Any());
+
+ // Test the computation results
+ var maxDate = SampleDateTime.AddDays(100);
+ df.Columns["DateTime"][0] = maxDate;
+ DataFrameColumn dateTimeColumn = df.Columns["DateTime"].CumulativeMax();
+ for (int i = 0; i < dateTimeColumn.Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(dateTimeColumn[i]);
+ else
+ Assert.Equal(maxDate, (DateTime)dateTimeColumn[i]);
+ }
+ Assert.Equal(maxDate, dateTimeColumn.Max());
+
+ df.Columns["Double"][0] = 100.0;
+ DataFrameColumn doubleColumn = df.Columns["Double"].CumulativeMax();
+ for (int i = 0; i < doubleColumn.Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(doubleColumn[i]);
+ else
+ Assert.Equal(100.0, (double)doubleColumn[i]);
+ }
+ Assert.Equal(1.0, df.Columns["Double"][1]);
+ df.Columns["Double"].CumulativeMax(true);
+ for (int i = 0; i < df.Columns["Double"].Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(df.Columns["Double"][i]);
+ else
+ Assert.Equal(100.0, (double)df.Columns["Double"][i]);
+ }
+
+ df.Columns["Float"][0] = -10.0f;
+ DataFrameColumn floatColumn = df.Columns["Float"].CumulativeMin();
+ for (int i = 0; i < floatColumn.Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(floatColumn[i]);
+ else
+ Assert.Equal(-10.0f, (float)floatColumn[i]);
+ }
+ Assert.Equal(9.0f, df.Columns["Float"][9]);
+ df.Columns["Float"].CumulativeMin(true);
+ for (int i = 0; i < df.Columns["Float"].Length; i++)
+ {
+ if (i == 5)
+ Assert.Null(df.Columns["Float"][i]);
+ else
+ Assert.Equal(-10.0f, (float)df.Columns["Float"][i]);
+ }
+
+ DataFrameColumn uintColumn = df.Columns["Uint"].CumulativeProduct();
+ Assert.Equal((uint)0, uintColumn[8]);
+ Assert.Equal((uint)8, df.Columns["Uint"][8]);
+ df.Columns["Uint"].CumulativeProduct(true);
+ Assert.Equal((uint)0, df.Columns["Uint"][9]);
+
+ DataFrameColumn ushortColumn = df.Columns["Ushort"].CumulativeSum();
+ Assert.Equal((ushort)40, ushortColumn[9]);
+ Assert.Equal((ushort)9, df.Columns["Ushort"][9]);
+ df.Columns["Ushort"].CumulativeSum(true);
+ Assert.Equal((ushort)40, df.Columns["Ushort"][9]);
+
+ Assert.Equal(100.0, df.Columns["Double"].Max());
+ Assert.Equal(-10.0f, df.Columns["Float"].Min());
+ Assert.Equal((uint)0, df.Columns["Uint"].Product());
+ Assert.Equal((ushort)130, df.Columns["Ushort"].Sum());
+
+ df.Columns["Double"][0] = 100.1;
+ Assert.Equal(100.1, df.Columns["Double"][0]);
+ DataFrameColumn roundColumn = df.Columns["Double"].Round();
+ Assert.Equal(100.0, roundColumn[0]);
+ Assert.Equal(100.1, df.Columns["Double"][0]);
+ df.Columns["Double"].Round(true);
+ Assert.Equal(100.0, df.Columns["Double"][0]);
+
+ // Test that none of the numeric column types throw
+ for (int i = 0; i < df.Columns.Count; i++)
+ {
+ DataFrameColumn column = df.Columns[i];
+ if (column.DataType == typeof(bool))
+ {
+ Assert.Throws(() => column.CumulativeMax());
+ Assert.Throws(() => column.CumulativeMin());
+ Assert.Throws(() => column.CumulativeProduct());
+ Assert.Throws(() => column.CumulativeSum());
+ Assert.Throws(() => column.Max());
+ Assert.Throws(() => column.Min());
+ Assert.Throws(() => column.Product());
+ Assert.Throws(() => column.Sum());
+ continue;
+ }
+ else if (column.DataType == typeof(string))
+ {
+ Assert.Throws(() => column.CumulativeMax());
+ Assert.Throws(() => column.CumulativeMin());
+ Assert.Throws(() => column.CumulativeProduct());
+ Assert.Throws(() => column.CumulativeSum());
+ Assert.Throws(() => column.Max());
+ Assert.Throws(() => column.Min());
+ Assert.Throws(() => column.Product());
+ Assert.Throws(() => column.Sum());
+ continue;
+ }
+ else if (column.DataType == typeof(DateTime))
+ {
+ Assert.Throws(() => column.CumulativeProduct());
+ Assert.Throws(() => column.CumulativeSum());
+ Assert.Throws(() => column.Product());
+ Assert.Throws(() => column.Sum());
+ continue;
+ }
+ column.CumulativeMax();
+ column.CumulativeMin();
+ column.CumulativeProduct();
+ column.CumulativeSum();
+ column.Max();
+ column.Min();
+ column.Product();
+ column.Sum();
+ }
+ }
+
+ [Fact]
+ public void TestIntComputations_MaxMin_WithNulls()
+ {
+ var column = new Int32DataFrameColumn("Int", new int?[]
+ {
+ null,
+ 2,
+ 1,
+ 4,
+ 3,
+ null
+ });
+
+ Assert.Equal(1, column.Min());
+ Assert.Equal(4, column.Max());
+ }
+
+ [Fact]
+ public void TestIntSum_OnColumnWithNullsOnly()
+ {
+ var column = new Int32DataFrameColumn("Int", new int?[] { null, null });
+ Assert.Null(column.Sum());
+ }
+
+ [Fact]
+ public void TestIntSum_OnEmptyColumn()
+ {
+ var column = new Int32DataFrameColumn("Int");
+ Assert.Null(column.Sum());
+ }
+
+ [Fact]
+ public void TestIntComputations_MaxMin_OnEmptyColumn()
+ {
+ var column = new Int32DataFrameColumn("Int");
+
+ Assert.Null(column.Min());
+ Assert.Null(column.Max());
+ }
+
+ [Fact]
+ public void TestDateTimeComputations_MaxMin_OnEmptyColumn()
+ {
+ var column = new DateTimeDataFrameColumn("DateTime");
+
+ Assert.Null(column.Min());
+ Assert.Null(column.Max());
+ }
+
+ [Fact]
+ public void TestDateTimeComputations_MaxMin_WithNulls()
+ {
+ var dateTimeColumn = new DateTimeDataFrameColumn("DateTime", new DateTime?[]
+ {
+ null,
+ new DateTime(2022, 1, 1),
+ new DateTime(2020, 1, 1),
+ new DateTime(2023, 1, 1),
+ new DateTime(2021, 1, 1),
+ null
+ });
+
+ Assert.Equal(new DateTime(2020, 1, 1), dateTimeColumn.Min());
+ Assert.Equal(new DateTime(2023, 1, 1), dateTimeColumn.Max());
+ }
+
+ [Theory]
+ [InlineData(5, 10)]
+ [InlineData(-15, 10)]
+ [InlineData(-5, 10)]
+ public void TestComputations_WithNegativeNumbers_MaxMin_Calculated(int startingFrom, int length)
+ {
+ // Arrange
+
+ IEnumerable range = Enumerable.Range(startingFrom, length);
+
+ int max = range.Max();
+ int min = range.Min();
+
+ DataFrame df = MakeDataFrameWithNumericColumns(length, withNulls: false, startingFrom);
+
+ var byteColumn = (PrimitiveDataFrameColumn)df.Columns["Byte"];
+ var decimalColumn = (PrimitiveDataFrameColumn)df.Columns["Decimal"];
+ var doubleColumn = (PrimitiveDataFrameColumn)df.Columns["Double"];
+ var floatColumn = (PrimitiveDataFrameColumn)df.Columns["Float"];
+ var intColumn = (PrimitiveDataFrameColumn)df.Columns["Int"];
+ var longColumn = (PrimitiveDataFrameColumn)df.Columns["Long"];
+ var sbyteColumn = (PrimitiveDataFrameColumn)df.Columns["Sbyte"];
+ var shortColumn = (PrimitiveDataFrameColumn)df.Columns["Short"];
+ var uintColumn = (PrimitiveDataFrameColumn)df.Columns["Uint"];
+ var ulongColumn = (PrimitiveDataFrameColumn)df.Columns["Ulong"];
+ var ushortColumn = (PrimitiveDataFrameColumn)df.Columns["Ushort"];
+
+ // Act, Assert
+
+ // We need to iterate over all range with conversion to byte due to negative numbers issue
+ Assert.Equal((byte)byteColumn.Max(), range.Select(x => (byte)x).Max());
+
+ Assert.Equal((decimal)decimalColumn.Max(), (decimal)max);
+ Assert.Equal((double)doubleColumn.Max(), (double)max);
+ Assert.Equal((float)floatColumn.Max(), (float)max);
+ Assert.Equal((int)intColumn.Max(), (int)max);
+ Assert.Equal((long)longColumn.Max(), (long)max);
+ Assert.Equal((sbyte)sbyteColumn.Max(), (sbyte)max);
+ Assert.Equal((short)shortColumn.Max(), (short)max);
+
+ // We need to iterate over all range with conversion to uint due to negative numbers issue
+ Assert.Equal((uint)uintColumn.Max(), range.Select(x => (uint)x).Max());
+
+ // We need to iterate over all range with conversion to ulong due to negative numbers issue
+ Assert.Equal((ulong)ulongColumn.Max(), range.Select(x => (ulong)x).Max());
+
+ // We need to iterate over all range with conversion to ushort due to negative numbers issue
+ Assert.Equal((ushort)ushortColumn.Max(), range.Select(x => (ushort)x).Max());
+
+ // We need to iterate over all range with conversion to byte due to negative numbers issue
+ Assert.Equal((byte)byteColumn.Min(), range.Select(x => (byte)x).Min());
+
+ Assert.Equal((decimal)decimalColumn.Min(), (decimal)min);
+ Assert.Equal((double)doubleColumn.Min(), (double)min);
+ Assert.Equal((float)floatColumn.Min(), (float)min);
+ Assert.Equal((int)intColumn.Min(), (int)min);
+ Assert.Equal((long)longColumn.Min(), (long)min);
+ Assert.Equal((sbyte)sbyteColumn.Min(), (sbyte)min);
+ Assert.Equal((short)shortColumn.Min(), (short)min);
+
+ // We need to iterate over all range with conversion to uint due to negative numbers issue
+ Assert.Equal((uint)uintColumn.Min(), range.Select(x => (uint)x).Min());
+
+ // We need to iterate over all range with conversion to ulong due to negative numbers issue
+ Assert.Equal((ulong)ulongColumn.Min(), range.Select(x => (ulong)x).Min());
+
+ // We need to iterate over all range with conversion to ushort due to negative numbers issue
+ Assert.Equal((ushort)ushortColumn.Min(), range.Select(x => (ushort)x).Min());
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Filter.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Filter.cs
new file mode 100644
index 0000000000..765d78b5dd
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Filter.cs
@@ -0,0 +1,63 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public partial class DataFrameTests
+ {
+ [Fact]
+ public void TestColumnFilter()
+ {
+ DataFrame df = MakeDataFrameWithNumericColumns(10);
+ DataFrameColumn filtered = df.Columns["Int"].Filter(3, 7);
+ Assert.Equal(4, filtered.Length);
+ Assert.Equal(3, filtered[0]);
+ Assert.Equal(4, filtered[1]);
+ Assert.Equal(6, filtered[2]);
+ Assert.Equal(7, filtered[3]);
+ }
+
+ [Fact]
+ public void TestDataFrameFilter()
+ {
+ DataFrame df = MakeDataFrameWithAllMutableColumnTypes(10);
+ DataFrame boolColumnFiltered = df[df.Columns["Bool"].ElementwiseEquals(true)];
+ List verify = new List { 0, 2, 4, 6, 8 };
+ Assert.Equal(5, boolColumnFiltered.Rows.Count);
+ for (int i = 0; i < boolColumnFiltered.Columns.Count; i++)
+ {
+ DataFrameColumn column = boolColumnFiltered.Columns[i];
+ if (column.Name == "Char" || column.Name == "Bool" || column.Name == "String" || column.Name == "DateTime")
+ continue;
+ for (int j = 0; j < column.Length; j++)
+ {
+ Assert.Equal(verify[j].ToString(), column[j].ToString());
+ }
+ }
+ DataFrame intEnumerableFiltered = df[Enumerable.Range(0, 10)];
+ DataFrame boolEnumerableFiltered = df[Enumerable.Range(0, 10).Select(x => true)];
+ DataFrame longEnumerableFiltered = df[Enumerable.Range(0, 10).Select(x => (long)x)];
+ Assert.Equal(intEnumerableFiltered.Columns.Count, df.Columns.Count);
+ Assert.Equal(boolEnumerableFiltered.Columns.Count, df.Columns.Count);
+ Assert.Equal(longEnumerableFiltered.Columns.Count, df.Columns.Count);
+ for (int i = 0; i < intEnumerableFiltered.Columns.Count; i++)
+ {
+ DataFrameColumn intFilteredColumn = intEnumerableFiltered.Columns[i];
+ DataFrameColumn dfColumn = df.Columns[i];
+ DataFrameColumn boolFilteredColumn = boolEnumerableFiltered.Columns[i];
+ DataFrameColumn longFilteredColumn = longEnumerableFiltered.Columns[i];
+ Assert.True(intFilteredColumn.ElementwiseEquals(dfColumn).All());
+ Assert.True(boolFilteredColumn.ElementwiseEquals(dfColumn).All());
+ Assert.True(longFilteredColumn.ElementwiseEquals(dfColumn).All());
+ }
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Join.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Join.cs
new file mode 100644
index 0000000000..e782296dd9
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Join.cs
@@ -0,0 +1,162 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public partial class DataFrameTests
+ {
+ [Fact]
+ public void TestJoin()
+ {
+ DataFrame left = MakeDataFrameWithAllMutableColumnTypes(10);
+ DataFrame right = MakeDataFrameWithAllMutableColumnTypes(5);
+
+ // Tests with right.Rows.Count < left.Rows.Count
+ // Left join
+ DataFrame join = left.Join(right);
+ Assert.Equal(join.Rows.Count, left.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Null(join.Columns["Int_right"][6]);
+ VerifyJoin(join, left, right, JoinAlgorithm.Left);
+
+ // Right join
+ join = left.Join(right, joinAlgorithm: JoinAlgorithm.Right);
+ Assert.Equal(join.Rows.Count, right.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(join.Columns["Int_right"][3], right.Columns["Int"][3]);
+ Assert.Null(join.Columns["Int_right"][2]);
+ VerifyJoin(join, left, right, JoinAlgorithm.Right);
+
+ // Outer join
+ join = left.Join(right, joinAlgorithm: JoinAlgorithm.FullOuter);
+ Assert.Equal(join.Rows.Count, left.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Null(join.Columns["Int_right"][6]);
+ VerifyJoin(join, left, right, JoinAlgorithm.FullOuter);
+
+ // Inner join
+ join = left.Join(right, joinAlgorithm: JoinAlgorithm.Inner);
+ Assert.Equal(join.Rows.Count, right.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(join.Columns["Int_right"][3], right.Columns["Int"][3]);
+ Assert.Null(join.Columns["Int_right"][2]);
+ VerifyJoin(join, left, right, JoinAlgorithm.Inner);
+
+ // Tests with right.Rows.Count > left.Rows.Count
+ // Left join
+ right = MakeDataFrameWithAllMutableColumnTypes(15);
+ join = left.Join(right);
+ Assert.Equal(join.Rows.Count, left.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(join.Columns["Int_right"][6], right.Columns["Int"][6]);
+ VerifyJoin(join, left, right, JoinAlgorithm.Left);
+
+ // Right join
+ join = left.Join(right, joinAlgorithm: JoinAlgorithm.Right);
+ Assert.Equal(join.Rows.Count, right.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(join.Columns["Int_right"][2], right.Columns["Int"][2]);
+ Assert.Null(join.Columns["Int_left"][12]);
+ VerifyJoin(join, left, right, JoinAlgorithm.Right);
+
+ // Outer join
+ join = left.Join(right, joinAlgorithm: JoinAlgorithm.FullOuter);
+ Assert.Equal(join.Rows.Count, right.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Null(join.Columns["Int_left"][12]);
+ VerifyJoin(join, left, right, JoinAlgorithm.FullOuter);
+
+ // Inner join
+ join = left.Join(right, joinAlgorithm: JoinAlgorithm.Inner);
+ Assert.Equal(join.Rows.Count, left.Rows.Count);
+ Assert.Equal(join.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(join.Columns["Int_right"][2], right.Columns["Int"][2]);
+ VerifyJoin(join, left, right, JoinAlgorithm.Inner);
+ }
+
+ private void VerifyJoin(DataFrame join, DataFrame left, DataFrame right, JoinAlgorithm joinAlgorithm)
+ {
+ Int64DataFrameColumn mapIndices = new Int64DataFrameColumn("map", join.Rows.Count);
+ for (long i = 0; i < join.Rows.Count; i++)
+ {
+ mapIndices[i] = i;
+ }
+ for (int i = 0; i < join.Columns.Count; i++)
+ {
+ DataFrameColumn joinColumn = join.Columns[i];
+ DataFrameColumn isEqual;
+
+ if (joinAlgorithm == JoinAlgorithm.Left)
+ {
+ if (i < left.Columns.Count)
+ {
+ DataFrameColumn leftColumn = left.Columns[i];
+ isEqual = joinColumn.ElementwiseEquals(leftColumn);
+ }
+ else
+ {
+ int columnIndex = i - left.Columns.Count;
+ DataFrameColumn rightColumn = right.Columns[columnIndex];
+ DataFrameColumn compareColumn = rightColumn.Length <= join.Rows.Count ? rightColumn.Clone(numberOfNullsToAppend: join.Rows.Count - rightColumn.Length) : rightColumn.Clone(mapIndices);
+ isEqual = joinColumn.ElementwiseEquals(compareColumn);
+ }
+ }
+ else if (joinAlgorithm == JoinAlgorithm.Right)
+ {
+ if (i < left.Columns.Count)
+ {
+ DataFrameColumn leftColumn = left.Columns[i];
+ DataFrameColumn compareColumn = leftColumn.Length <= join.Rows.Count ? leftColumn.Clone(numberOfNullsToAppend: join.Rows.Count - leftColumn.Length) : leftColumn.Clone(mapIndices);
+ isEqual = joinColumn.ElementwiseEquals(compareColumn);
+ }
+ else
+ {
+ int columnIndex = i - left.Columns.Count;
+ DataFrameColumn rightColumn = right.Columns[columnIndex];
+ isEqual = joinColumn.ElementwiseEquals(rightColumn);
+ }
+ }
+ else if (joinAlgorithm == JoinAlgorithm.Inner)
+ {
+ if (i < left.Columns.Count)
+ {
+ DataFrameColumn leftColumn = left.Columns[i];
+ isEqual = joinColumn.ElementwiseEquals(leftColumn.Clone(mapIndices));
+ }
+ else
+ {
+ int columnIndex = i - left.Columns.Count;
+ DataFrameColumn rightColumn = right.Columns[columnIndex];
+ isEqual = joinColumn.ElementwiseEquals(rightColumn.Clone(mapIndices));
+ }
+ }
+ else
+ {
+ if (i < left.Columns.Count)
+ {
+ DataFrameColumn leftColumn = left.Columns[i];
+ isEqual = joinColumn.ElementwiseEquals(leftColumn.Clone(numberOfNullsToAppend: join.Rows.Count - leftColumn.Length));
+ }
+ else
+ {
+ int columnIndex = i - left.Columns.Count;
+ DataFrameColumn rightColumn = right.Columns[columnIndex];
+ isEqual = joinColumn.ElementwiseEquals(rightColumn.Clone(numberOfNullsToAppend: join.Rows.Count - rightColumn.Length));
+ }
+ }
+ for (int j = 0; j < join.Rows.Count; j++)
+ {
+ Assert.Equal(true, isEqual[j]);
+ }
+ }
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Merge.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Merge.cs
new file mode 100644
index 0000000000..b507e846e8
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Merge.cs
@@ -0,0 +1,807 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public partial class DataFrameTests
+ {
+ [Theory]
+ [InlineData(1, 2)]
+ [InlineData(2, 1)]
+ public void TestDataCorrectnessForInnerMerge(int leftCount, int rightCount)
+ {
+ DataFrame left = MakeDataFrameWithNumericColumns(leftCount, false);
+ DataFrameColumn leftStringColumn = new StringDataFrameColumn("String", Enumerable.Range(0, leftCount).Select(x => "Left"));
+ left.Columns.Insert(left.Columns.Count, leftStringColumn);
+
+ DataFrame right = MakeDataFrameWithNumericColumns(rightCount, false);
+ DataFrameColumn rightStringColumn = new StringDataFrameColumn("String", Enumerable.Range(0, rightCount).Select(x => "Right"));
+ right.Columns.Insert(right.Columns.Count, rightStringColumn);
+
+ DataFrame merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Inner);
+
+ Assert.Equal("Left", (string)merge.Columns["String_left"][0]);
+ Assert.Equal("Right", (string)merge.Columns["String_right"][0]);
+ }
+
+ [Fact]
+ public void TestMerge()
+ {
+ DataFrame left = MakeDataFrameWithAllMutableColumnTypes(10);
+ DataFrame right = MakeDataFrameWithAllMutableColumnTypes(5);
+
+ // Tests with right.Rows.Count < left.Rows.Count
+ // Left merge
+ DataFrame merge = left.Merge(right, "Int", "Int");
+ Assert.Equal(10, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Null(merge.Columns["Int_right"][6]);
+ Assert.Null(merge.Columns["Int_left"][5]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.Left);
+
+ // Right merge
+ merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Right);
+ Assert.Equal(5, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(merge.Columns["Int_right"][3], right.Columns["Int"][3]);
+ Assert.Null(merge.Columns["Int_right"][2]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.Right);
+
+ // Outer merge
+ merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.FullOuter);
+ Assert.Equal(merge.Rows.Count, left.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Null(merge.Columns["Int_right"][6]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.FullOuter);
+
+ // Inner merge
+ merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Inner);
+ Assert.Equal(merge.Rows.Count, right.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(merge.Columns["Int_right"][2], right.Columns["Int"][3]);
+ Assert.Null(merge.Columns["Int_right"][4]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.Inner);
+
+ // Tests with right.Rows.Count > left.Rows.Count
+ // Left merge
+ right = MakeDataFrameWithAllMutableColumnTypes(15);
+ merge = left.Merge(right, "Int", "Int");
+ Assert.Equal(merge.Rows.Count, left.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(merge.Columns["Int_right"][6], right.Columns["Int"][6]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.Left);
+
+ // Right merge
+ merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Right);
+ Assert.Equal(merge.Rows.Count, right.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(merge.Columns["Int_right"][2], right.Columns["Int"][2]);
+ Assert.Null(merge.Columns["Int_left"][12]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.Right);
+
+ // Outer merge
+ merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.FullOuter);
+ Assert.Equal(16, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Null(merge.Columns["Int_left"][12]);
+ Assert.Null(merge.Columns["Int_left"][15]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.FullOuter);
+
+ // Inner merge
+ merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Inner);
+ Assert.Equal(9, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ Assert.Equal(merge.Columns["Int_right"][2], right.Columns["Int"][2]);
+ VerifyMerge(merge, left, right, JoinAlgorithm.Inner);
+ }
+
+ private void MatchRowsOnMergedDataFrame(DataFrame merge, DataFrame left, DataFrame right, long mergeRow, long? leftRow, long? rightRow)
+ {
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ DataFrameRow dataFrameMergeRow = merge.Rows[mergeRow];
+ int columnIndex = 0;
+ foreach (object value in dataFrameMergeRow)
+ {
+ object compare = null;
+ if (columnIndex < left.Columns.Count)
+ {
+ if (leftRow != null)
+ {
+ compare = left.Rows[leftRow.Value][columnIndex];
+ }
+ }
+ else
+ {
+ int rightColumnIndex = columnIndex - left.Columns.Count;
+ if (rightRow != null)
+ {
+ compare = right.Rows[rightRow.Value][rightColumnIndex];
+ }
+ }
+ Assert.Equal(value, compare);
+ columnIndex++;
+ }
+ }
+
+ [Theory]
+ [InlineData(10, 5, JoinAlgorithm.Left)]
+ [InlineData(5, 10, JoinAlgorithm.Right)]
+ public void TestMergeEdgeCases_LeftOrRight(int leftLength, int rightLength, JoinAlgorithm joinAlgorithm)
+ {
+ DataFrame left = MakeDataFrameWithAllMutableColumnTypes(leftLength);
+ if (leftLength > 5)
+ {
+ left["Int"][8] = null;
+ }
+ DataFrame right = MakeDataFrameWithAllMutableColumnTypes(rightLength);
+ if (rightLength > 5)
+ {
+ right["Int"][8] = null;
+ }
+
+ DataFrame merge = left.Merge(right, "Int", "Int", joinAlgorithm: joinAlgorithm);
+ Assert.Equal(10, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+ int[] matchedFullRows = new int[] { 0, 1, 3, 4 };
+ for (long i = 0; i < matchedFullRows.Length; i++)
+ {
+ int rowIndex = matchedFullRows[i];
+ MatchRowsOnMergedDataFrame(merge, left, right, rowIndex, rowIndex, rowIndex);
+ }
+
+ int[] matchedLeftOrRightRowsNullOtherRows = new int[] { 2, 5, 6, 7, 8, 9 };
+ for (long i = 0; i < matchedLeftOrRightRowsNullOtherRows.Length; i++)
+ {
+ int rowIndex = matchedLeftOrRightRowsNullOtherRows[i];
+ if (leftLength > 5)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, rowIndex, rowIndex, null);
+ }
+ else
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, rowIndex, null, rowIndex);
+ }
+ }
+ }
+
+ [Fact]
+ public void TestMergeEdgeCases_Inner()
+ {
+ DataFrame left = MakeDataFrameWithAllMutableColumnTypes(5);
+ DataFrame right = MakeDataFrameWithAllMutableColumnTypes(10);
+ left["Int"][3] = null;
+ right["Int"][6] = null;
+ // Creates this case:
+ /*
+ * Left: Right:
+ * 0 0
+ * 1 1
+ * null(2) 2
+ * null(3) 3
+ * 4 4
+ * null(5)
+ * null(6)
+ * 7
+ * 8
+ * 9
+ */
+ /*
+ * Merge will result in a DataFrame like:
+ * Int_Left Int_Right
+ * 0 0
+ * 1 1
+ * 4 4
+ * null(2) null(5)
+ * null(3) null(5)
+ * null(2) null(6)
+ * null(3) null(6)
+ */
+
+ DataFrame merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Inner);
+ Assert.Equal(7, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ int[] mergeRows = new int[] { 0, 1, 2, 3, 4, 5, 6 };
+ int[] leftRows = new int[] { 0, 1, 4, 2, 3, 2, 3 };
+ int[] rightRows = new int[] { 0, 1, 4, 5, 5, 6, 6 };
+ for (long i = 0; i < mergeRows.Length; i++)
+ {
+ int rowIndex = mergeRows[i];
+ int leftRowIndex = leftRows[i];
+ int rightRowIndex = rightRows[i];
+ MatchRowsOnMergedDataFrame(merge, left, right, rowIndex, leftRowIndex, rightRowIndex);
+ }
+ }
+
+ [Fact]
+ public void TestMergeEdgeCases_Outer()
+ {
+ DataFrame left = MakeDataFrameWithAllMutableColumnTypes(5);
+ left["Int"][3] = null;
+ DataFrame right = MakeDataFrameWithAllMutableColumnTypes(5);
+ right["Int"][1] = 5;
+ right["Int"][3] = null;
+ right["Int"][4] = 6;
+
+ // Creates this case:
+ /*
+ * Left: Right: RowIndex:
+ * 0 0 0
+ * 1 5 1
+ * null null 2
+ * null(3) null(3) 3
+ * 4 6 4
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * Int_left: Int_right: Merged: Index:
+ * 0 0 0 - 0 0
+ * 1 null 1 - N 1
+ * null null 2 - 2 2
+ * null null(3) 2 - 3 3
+ * null(3) null 3 - 2 4
+ * null(3) null(3) 3 - 3 5
+ * 4 null 4 - N 6
+ * null 5 N - 1 7
+ * null 6 N - 4 8
+ */
+
+ DataFrame merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.FullOuter);
+ Assert.Equal(9, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ int[] mergeRows = new int[] { 0, 2, 3, 4, 5 };
+ int[] leftRows = new int[] { 0, 2, 2, 3, 3 };
+ int[] rightRows = new int[] { 0, 2, 3, 2, 3 };
+ for (long i = 0; i < mergeRows.Length; i++)
+ {
+ int rowIndex = mergeRows[i];
+ int leftRowIndex = leftRows[i];
+ int rightRowIndex = rightRows[i];
+ MatchRowsOnMergedDataFrame(merge, left, right, rowIndex, leftRowIndex, rightRowIndex);
+ }
+
+ mergeRows = new int[] { 1, 6 };
+ leftRows = new int[] { 1, 4 };
+ for (long i = 0; i < mergeRows.Length; i++)
+ {
+ int rowIndex = mergeRows[i];
+ int leftRowIndex = leftRows[i];
+ MatchRowsOnMergedDataFrame(merge, left, right, rowIndex, leftRowIndex, null);
+ }
+
+ mergeRows = new int[] { 7, 8 };
+ rightRows = new int[] { 1, 4 };
+ for (long i = 0; i < mergeRows.Length; i++)
+ {
+ int rowIndex = mergeRows[i];
+ int rightRowIndex = rightRows[i];
+ MatchRowsOnMergedDataFrame(merge, left, right, rowIndex, null, rightRowIndex);
+ }
+ }
+
+ [Fact]
+ public void TestMerge_ByTwoColumns_Complex_LeftJoin()
+ {
+ //Test left merge by to int type columns
+
+ //Arrange
+ var left = new DataFrame();
+ left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2, 3, 4, 5 }));
+ left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 0, 1, 1, 2, 2, 3 }));
+ left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 3, 1, 2, 1, 2, 1 }));
+
+ var right = new DataFrame();
+ right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2, 3 }));
+ right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 1, 2 }));
+ right.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 2, 1, 1 }));
+
+ // Creates this case:
+ /* -------------------------
+ * Left | Right
+ * I G1 G2 | I G1 G2
+ * -------------------------
+ * 0 0 3 | 0 1 1
+ * 1 1 1 | 1 1 2
+ * 2 1 2 | 2 1 1
+ * 3 2 1 | 3 2 1
+ * 4 2 2
+ * 5 3 1
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * IL G1 G2 IR Merged:
+ * -------------------------
+ * 0 0 3 0 - N
+ * 1 1 1 0 1 1 1 - 0
+ * 1 1 1 2 1 1 1 - 2
+ * 2 1 2 1 1 2 2 - 1
+ * 3 2 1 3 2 1 3 - 3
+ * 4 2 2 4 - N
+ * 5 3 1 5 - N
+ */
+
+ //Act
+ var merge = left.Merge(right, new[] { "G1", "G2" }, new[] { "G1", "G2" });
+
+ //Assert
+ var expectedMerged = new (int? Left, int? Right)[] {
+ (0, null),
+ (1, 0),
+ (1, 2),
+ (2, 1),
+ (3, 3),
+ (4, null),
+ (5, null)
+ };
+
+ Assert.Equal(expectedMerged.Length, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ for (long i = 0; i < expectedMerged.Length; i++)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right);
+ }
+
+ }
+
+ [Fact]
+ public void TestMerge_ByTwoColumns_Simple_ManyToMany_LeftJoin()
+ {
+ //Test left merge by to int type columns
+
+ //Arrange
+ var left = new DataFrame();
+ left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 3 }));
+ left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 3 }));
+
+ var right = new DataFrame();
+ right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 0 }));
+ right.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 0 }));
+
+ // Creates this case:
+ /* ---------------------------
+ * Left | Right
+ * I G1 G2 | I G1 G2
+ * ---------------------------
+ * 0 1 1 | 0 1 1
+ * 1 1 1 | 1 1 1
+ * 2 3 3 | 2 0 0
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * IL G1 G2 IR Merged:
+ * -------------------------
+ * 0 1 1 0 1 1 0 - 0
+ * 0 1 1 1 1 1 0 - 1
+ * 1 1 1 0 1 1 1 - 0
+ * 1 1 1 1 1 1 1 - 1
+ * 2 3 3 2 - N
+ */
+
+ //Act
+ var merge = left.Merge(right, new[] { "G1", "G2" }, new[] { "G1", "G2" });
+
+ //Assert
+ var expectedMerged = new (int? Left, int? Right)[] {
+ (0, 0),
+ (0, 1),
+ (1, 0),
+ (1, 1),
+ (2, null)
+ };
+
+ Assert.Equal(expectedMerged.Length, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ for (long i = 0; i < expectedMerged.Length; i++)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right);
+ }
+ }
+
+ [Fact]
+ public void TestMerge_ByTwoColumns_Simple_ManyToMany_RightJoin()
+ {
+ //Test left merge by to int type columns
+
+ //Arrange
+ var left = new DataFrame();
+ left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 3 }));
+ left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 3 }));
+
+ var right = new DataFrame();
+ right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 0 }));
+ right.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 0 }));
+
+ // Creates this case:
+ /* ---------------------------
+ * Left | Right
+ * I G1 G2 | I G1 G2
+ * ---------------------------
+ * 0 1 1 | 0 1 1
+ * 1 1 1 | 1 1 1
+ * 2 3 3 | 2 0 0
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * IL G1 G2 IR Merged:
+ * -------------------------
+ * 0 1 1 0 1 1 0 - 0
+ * 1 1 1 0 1 1 1 - 0
+ * 0 1 1 1 1 1 0 - 1
+ * 1 1 1 1 1 1 1 - 1
+ * 2 0 0 N - 2
+ */
+
+ //Act
+ var merge = left.Merge(right, new[] { "G1", "G2" }, new[] { "G1", "G2" }, joinAlgorithm: JoinAlgorithm.Right);
+
+ //Assert
+ var expectedMerged = new (int? Left, int? Right)[] {
+ (0, 0),
+ (1, 0),
+ (0, 1),
+ (1, 1),
+ (null, 2)
+ };
+
+ Assert.Equal(expectedMerged.Length, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ for (long i = 0; i < expectedMerged.Length; i++)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right);
+ }
+ }
+
+ [Fact]
+ public void TestMerge_ByTwoColumns_Simple_ManyToMany_InnerJoin()
+ {
+ //Test left merge by to int type columns
+
+ //Arrange
+ var left = new DataFrame();
+ left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 3 }));
+ left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 3 }));
+
+ var right = new DataFrame();
+ right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 0 }));
+ right.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 0 }));
+
+ // Creates this case:
+ /* ---------------------------
+ * Left | Right
+ * I G1 G2 | I G1 G2
+ * ---------------------------
+ * 0 1 1 | 0 1 1
+ * 1 1 1 | 1 1 1
+ * 2 3 3 | 2 0 0
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * IL G1 G2 IR Merged:
+ * -------------------------
+ * 0 1 1 0 1 1 0 - 0
+ * 1 1 1 0 1 1 1 - 0
+ * 0 1 1 1 1 1 0 - 1
+ * 1 1 1 1 1 1 1 - 1
+ */
+
+ //Act
+ var merge = left.Merge(right, new[] { "G1", "G2" }, new[] { "G1", "G2" }, joinAlgorithm: JoinAlgorithm.Inner);
+
+ //Assert
+ var expectedMerged = new (int? Left, int? Right)[] {
+ (0, 0),
+ (1, 0),
+ (0, 1),
+ (1, 1)
+ };
+
+ Assert.Equal(expectedMerged.Length, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ for (long i = 0; i < expectedMerged.Length; i++)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right);
+ }
+ }
+
+ [Fact]
+ public void TestMerge_ByTwoColumns_Simple_ManyToMany_OuterJoin()
+ {
+ //Test left merge by to int type columns
+
+ //Arrange
+ var left = new DataFrame();
+ left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 3 }));
+ left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 3 }));
+
+ var right = new DataFrame();
+ right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 0 }));
+ right.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 0 }));
+
+ // Creates this case:
+ /* ---------------------------
+ * Left | Right
+ * I G1 G2 | I G1 G2
+ * ---------------------------
+ * 0 1 1 | 0 1 1
+ * 1 1 1 | 1 1 1
+ * 2 3 3 | 2 0 0
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * IL G1 G2 IR Merged:
+ * -------------------------
+ * 0 1 1 0 1 1 0 - 0
+ * 0 1 1 1 1 1 0 - 1
+ * 1 1 1 0 1 1 1 - 0
+ * 1 1 1 1 1 1 1 - 1
+ * 2 3 3 2 - N
+ * 2 0 0 N - 2
+ */
+
+ //Act
+ var merge = left.Merge(right, new[] { "G1", "G2" }, new[] { "G1", "G2" }, joinAlgorithm: JoinAlgorithm.FullOuter);
+
+ //Assert
+ var expectedMerged = new (int? Left, int? Right)[] {
+ (0, 0),
+ (0, 1),
+ (1, 0),
+ (1, 1),
+ (2, null),
+ (null, 2)
+ };
+
+ Assert.Equal(expectedMerged.Length, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ for (long i = 0; i < expectedMerged.Length; i++)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right);
+ }
+ }
+
+ [Fact]
+ public void TestMerge_ByThreeColumns_OneToOne_LeftJoin()
+ {
+ //Test merge by LEFT join of int and string columns
+
+ //Arrange
+ var left = new DataFrame();
+ left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 2, 1 }));
+ left.Columns.Add(new StringDataFrameColumn("G3", new[] { "A", "B", "C" }));
+
+ var right = new DataFrame();
+ right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 0, 1, 1 }));
+ right.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 2 }));
+ right.Columns.Add(new StringDataFrameColumn("G3", new[] { "Z", "Y", "B" }));
+
+ // Creates this case:
+ /* -----------------------------
+ * Left | Right
+ * I G1 G2 G3 | I G1 G2 G3
+ * ------------------------------
+ * 0 1 1 A | 0 0 1 Z
+ * 1 1 2 B | 1 1 1 Y
+ * 2 2 1 C | 2 1 2 B
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * IL G1 G2 G3 IR Merged:
+ * -------------------------
+ * 0 1 1 A 0 - N
+ * 1 1 2 B 2 1 2 B 1 - 2
+ * 2 2 1 C 2 - N
+ */
+
+ //Act
+ var merge = left.Merge(right, new[] { "G1", "G2", "G3" }, new[] { "G1", "G2", "G3" });
+
+ //Assert
+ var expectedMerged = new (int? Left, int? Right)[] {
+ (0, null),
+ (1, 2),
+ (2, null)
+ };
+
+ Assert.Equal(expectedMerged.Length, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ for (long i = 0; i < expectedMerged.Length; i++)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right);
+ }
+ }
+
+ [Fact]
+ public void TestMerge_ByThreeColumns_OneToOne_RightJoin()
+ {
+ //Test merge by RIGHT join of int and string columns
+
+ //Arrange
+ var left = new DataFrame();
+ left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 2 }));
+ left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 2, 1 }));
+ left.Columns.Add(new StringDataFrameColumn("G3", new[] { "A", "B", "C" }));
+
+ var right = new DataFrame();
+ right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 }));
+ right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 0, 1, 1 }));
+ right.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 2 }));
+ right.Columns.Add(new StringDataFrameColumn("G3", new[] { "Z", "Y", "B" }));
+
+ // Creates this case:
+ /* -----------------------------
+ * Left | Right
+ * I G1 G2 G3 | I G1 G2 G3
+ * ------------------------------
+ * 0 1 1 A | 0 0 1 Z
+ * 1 1 2 B | 1 1 1 Y
+ * 2 2 1 C | 2 1 2 B
+ */
+
+ /*
+ * Merge will result in a DataFrame like:
+ * IL G1 G2 G3 IR Merged:
+ * -------------------------
+ * 0 0 1 Z N - 0
+ * 1 1 1 Y N - 1
+ * 1 1 2 B 2 1 2 B 1 - 2
+ */
+
+ //Act
+ var merge = left.Merge(right, new[] { "G1", "G2", "G3" }, new[] { "G1", "G2", "G3" }, joinAlgorithm: JoinAlgorithm.Right);
+
+ //Assert
+ var expectedMerged = new (int? Left, int? Right)[] {
+ (null, 0),
+ (null, 1),
+ (1, 2)
+ };
+
+ Assert.Equal(expectedMerged.Length, merge.Rows.Count);
+ Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count);
+
+ for (long i = 0; i < expectedMerged.Length; i++)
+ {
+ MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right);
+ }
+ }
+
+ [Fact]
+ public void TestMerge_Issue5778()
+ {
+ DataFrame left = MakeDataFrameWithAllMutableColumnTypes(2, false);
+ DataFrame right = MakeDataFrameWithAllMutableColumnTypes(1);
+
+ DataFrame merge = left.Merge(right, "Int", "Int");
+
+ Assert.Equal(2, merge.Rows.Count);
+ Assert.Equal(0, (int)merge.Columns["Int_left"][0]);
+ Assert.Equal(1, (int)merge.Columns["Int_left"][1]);
+ MatchRowsOnMergedDataFrame(merge, left, right, 0, 0, 0);
+ MatchRowsOnMergedDataFrame(merge, left, right, 1, 1, 0);
+ }
+
+ [Fact]
+ //Issue 6127
+ public void TestMerge_CorrectColumnTypes()
+ {
+ DataFrame left = MakeDataFrameWithAllMutableColumnTypes(2, false);
+ DataFrame right = MakeDataFrameWithAllMutableColumnTypes(1);
+
+ DataFrame merge = left.Merge(right, "Int", "Int");
+
+ Assert.NotNull(merge.Columns.GetBooleanColumn("Bool_left"));
+ Assert.NotNull(merge.Columns.GetBooleanColumn("Bool_right"));
+
+ Assert.NotNull(merge.Columns.GetDecimalColumn("Decimal_left"));
+ Assert.NotNull(merge.Columns.GetDecimalColumn("Decimal_right"));
+
+ Assert.NotNull(merge.Columns.GetSingleColumn("Float_left"));
+ Assert.NotNull(merge.Columns.GetSingleColumn("Float_right"));
+
+ Assert.NotNull(merge.Columns.GetDoubleColumn("Double_left"));
+ Assert.NotNull(merge.Columns.GetDoubleColumn("Double_right"));
+
+ Assert.NotNull(merge.Columns.GetByteColumn("Byte_left"));
+ Assert.NotNull(merge.Columns.GetByteColumn("Byte_right"));
+
+ Assert.NotNull(merge.Columns.GetCharColumn("Char_left"));
+ Assert.NotNull(merge.Columns.GetCharColumn("Char_right"));
+
+ Assert.NotNull(merge.Columns.GetInt16Column("Short_left"));
+ Assert.NotNull(merge.Columns.GetInt16Column("Short_right"));
+
+ Assert.NotNull(merge.Columns.GetUInt16Column("Ushort_left"));
+ Assert.NotNull(merge.Columns.GetUInt16Column("Ushort_right"));
+
+ Assert.NotNull(merge.Columns.GetInt32Column("Int_left"));
+ Assert.NotNull(merge.Columns.GetInt32Column("Int_right"));
+
+ Assert.NotNull(merge.Columns.GetUInt32Column("Uint_left"));
+ Assert.NotNull(merge.Columns.GetUInt32Column("Uint_right"));
+
+ Assert.NotNull(merge.Columns.GetInt64Column("Long_left"));
+ Assert.NotNull(merge.Columns.GetInt64Column("Long_right"));
+
+ Assert.NotNull(merge.Columns.GetUInt64Column("Ulong_left"));
+ Assert.NotNull(merge.Columns.GetUInt64Column("Ulong_right"));
+
+ Assert.NotNull(merge.Columns.GetDateTimeColumn("DateTime_left"));
+ Assert.NotNull(merge.Columns.GetDateTimeColumn("DateTime_right"));
+ }
+
+ private void VerifyMerge(DataFrame merge, DataFrame left, DataFrame right, JoinAlgorithm joinAlgorithm)
+ {
+ if (joinAlgorithm == JoinAlgorithm.Left || joinAlgorithm == JoinAlgorithm.Inner)
+ {
+ HashSet intersection = new HashSet();
+ for (int i = 0; i < merge.Columns["Int_left"].Length; i++)
+ {
+ if (merge.Columns["Int_left"][i] == null)
+ continue;
+ intersection.Add((int)merge.Columns["Int_left"][i]);
+ }
+ for (int i = 0; i < left.Columns["Int"].Length; i++)
+ {
+ if (left.Columns["Int"][i] != null && intersection.Contains((int)left.Columns["Int"][i]))
+ intersection.Remove((int)left.Columns["Int"][i]);
+ }
+ Assert.Empty(intersection);
+ }
+ else if (joinAlgorithm == JoinAlgorithm.Right)
+ {
+ HashSet intersection = new HashSet();
+ for (int i = 0; i < merge.Columns["Int_right"].Length; i++)
+ {
+ if (merge.Columns["Int_right"][i] == null)
+ continue;
+ intersection.Add((int)merge.Columns["Int_right"][i]);
+ }
+ for (int i = 0; i < right.Columns["Int"].Length; i++)
+ {
+ if (right.Columns["Int"][i] != null && intersection.Contains((int)right.Columns["Int"][i]))
+ intersection.Remove((int)right.Columns["Int"][i]);
+ }
+ Assert.Empty(intersection);
+ }
+ else if (joinAlgorithm == JoinAlgorithm.FullOuter)
+ {
+ VerifyMerge(merge, left, right, JoinAlgorithm.Left);
+ VerifyMerge(merge, left, right, JoinAlgorithm.Right);
+ }
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Sort.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Sort.cs
new file mode 100644
index 0000000000..f198ff003d
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Sort.cs
@@ -0,0 +1,139 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public partial class DataFrameTests
+ {
+ [Fact]
+ public void TestSplitAndSort()
+ {
+ DataFrame df = MakeDataFrameWithAllMutableColumnTypes(20);
+ df.Columns["Int"][0] = 100000;
+ df.Columns["Int"][df.Rows.Count - 1] = -1;
+ df.Columns["Int"][5] = 200000;
+ DataFrame dfTest;
+ DataFrame dfTrain = SplitTrainTest(df, 0.8f, out dfTest);
+
+ // Sort by "Int" in ascending order
+ var sortedDf = dfTrain.OrderBy("Int");
+ Assert.Null(sortedDf.Columns["Int"][sortedDf.Rows.Count - 1]);
+ Assert.Equal(1, sortedDf.Columns["Int"][0]);
+ Assert.Equal(100000, sortedDf.Columns["Int"][sortedDf.Rows.Count - 3]);
+ Assert.Equal(200000, sortedDf.Columns["Int"][sortedDf.Rows.Count - 2]);
+ }
+
+ [Fact]
+ public void TestStringColumnSort()
+ {
+ // StringDataFrameColumn specific sort tests
+ StringDataFrameColumn strColumn = new StringDataFrameColumn("String", 0);
+ Assert.Equal(0, strColumn.NullCount);
+ for (int i = 0; i < 5; i++)
+ {
+ strColumn.Append(null);
+ }
+ Assert.Equal(5, strColumn.NullCount);
+ // Should handle all nulls
+ StringDataFrameColumn sortedStrColumn = strColumn.Sort() as StringDataFrameColumn;
+ Assert.Equal(5, sortedStrColumn.NullCount);
+ Assert.Null(sortedStrColumn[0]);
+
+ for (int i = 0; i < 5; i++)
+ {
+ strColumn.Append(i.ToString());
+ }
+ Assert.Equal(5, strColumn.NullCount);
+
+ // Ascending sort
+ sortedStrColumn = strColumn.Sort() as StringDataFrameColumn;
+ Assert.Equal("0", sortedStrColumn[0]);
+ Assert.Null(sortedStrColumn[9]);
+
+ // Descending sort
+ sortedStrColumn = strColumn.Sort(false) as StringDataFrameColumn;
+ Assert.Equal("4", sortedStrColumn[0]);
+ Assert.Null(sortedStrColumn[9]);
+ }
+
+ [Theory]
+ [InlineData(5)]
+ [InlineData(12)]
+ [InlineData(100)]
+ [InlineData(1000)]
+ public void TestPrimitiveColumnSort(int numberOfNulls)
+ {
+ // Primitive Column Sort
+ Int32DataFrameColumn intColumn = new Int32DataFrameColumn("Int", 0);
+ Assert.Equal(0, intColumn.NullCount);
+ intColumn.AppendMany(null, numberOfNulls);
+ Assert.Equal(numberOfNulls, intColumn.NullCount);
+
+ // Should handle all nulls
+ PrimitiveDataFrameColumn sortedIntColumn = intColumn.Sort();
+ Assert.Equal(numberOfNulls, sortedIntColumn.NullCount);
+ Assert.Null(sortedIntColumn[0]);
+
+ for (int i = 0; i < 5; i++)
+ {
+ intColumn.Append(i);
+ }
+ Assert.Equal(numberOfNulls, intColumn.NullCount);
+
+ // Ascending sort
+ sortedIntColumn = intColumn.Sort();
+ Assert.Equal(0, sortedIntColumn[0]);
+ Assert.Null(sortedIntColumn[9]);
+
+ // Descending sort
+ sortedIntColumn = intColumn.Sort(ascending: false);
+ Assert.Equal(4, sortedIntColumn[0]);
+ Assert.Null(sortedIntColumn[9]);
+ }
+
+ [Fact]
+ public void TestSortWithDifferentNullCountsInColumns()
+ {
+ DataFrame dataFrame = MakeDataFrameWithAllMutableColumnTypes(10);
+ dataFrame["Int"][3] = null;
+ dataFrame["String"][3] = null;
+ DataFrame sorted = dataFrame.OrderBy("Int");
+ void Verify(DataFrame sortedDataFrame)
+ {
+ Assert.Equal(10, sortedDataFrame.Rows.Count);
+ DataFrameRow lastRow = sortedDataFrame.Rows[sortedDataFrame.Rows.Count - 1];
+ DataFrameRow penultimateRow = sortedDataFrame.Rows[sortedDataFrame.Rows.Count - 2];
+ foreach (object value in lastRow)
+ {
+ Assert.Null(value);
+ }
+
+ for (int i = 0; i < sortedDataFrame.Columns.Count; i++)
+ {
+ string columnName = sortedDataFrame.Columns[i].Name;
+ if (columnName != "String" && columnName != "Int")
+ {
+ Assert.Equal(dataFrame[columnName][3], penultimateRow[i]);
+ }
+ else if (columnName == "String" || columnName == "Int")
+ {
+ Assert.Null(penultimateRow[i]);
+ }
+ }
+ }
+
+ Verify(sorted);
+
+ sorted = dataFrame.OrderBy("String");
+ Verify(sorted);
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Utils.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Utils.cs
new file mode 100644
index 0000000000..c08a96d8b5
--- /dev/null
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.Utils.cs
@@ -0,0 +1,231 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Apache.Arrow;
+using Microsoft.ML.Data;
+
+namespace Microsoft.Data.Analysis.Tests
+{
+ public partial class DataFrameTests
+ {
+ public static DataFrame MakeDataFrameWithTwoColumns(int length, bool withNulls = true)
+ {
+ DataFrameColumn dataFrameColumn1 = new Int32DataFrameColumn("Int1", Enumerable.Range(0, length).Select(x => x));
+ DataFrameColumn dataFrameColumn2 = new Int32DataFrameColumn("Int2", Enumerable.Range(10, length).Select(x => x));
+ if (withNulls)
+ {
+ dataFrameColumn1[length / 2] = null;
+ dataFrameColumn2[length / 2] = null;
+ }
+ DataFrame dataFrame = new DataFrame();
+ dataFrame.Columns.Insert(0, dataFrameColumn1);
+ dataFrame.Columns.Insert(1, dataFrameColumn2);
+ return dataFrame;
+ }
+
+ public static ArrowStringDataFrameColumn CreateArrowStringColumn(int length, bool withNulls = true)
+ {
+ byte[] dataMemory = new byte[length * 3];
+ byte[] nullMemory = new byte[BitUtility.ByteCount(length)];
+ byte[] offsetMemory = new byte[(length + 1) * 4];
+
+ // Initialize offset with 0 as the first value
+ offsetMemory[0] = 0;
+ offsetMemory[1] = 0;
+ offsetMemory[2] = 0;
+ offsetMemory[3] = 0;
+
+ // Append "foo" length times, with a possible `null` in the middle
+ int validStringsIndex = 0;
+ for (int i = 0; i < length; i++)
+ {
+ if (withNulls && i == length / 2)
+ {
+ BitUtility.SetBit(nullMemory, i, false);
+ }
+ else
+ {
+ int dataMemoryIndex = validStringsIndex * 3;
+ dataMemory[dataMemoryIndex++] = 102;
+ dataMemory[dataMemoryIndex++] = 111;
+ dataMemory[dataMemoryIndex++] = 111;
+ BitUtility.SetBit(nullMemory, i, true);
+
+ validStringsIndex++;
+ }
+
+ // write the current length to (index + 1)
+ int offsetIndex = (i + 1) * 4;
+ int offsetValue = 3 * validStringsIndex;
+ byte[] offsetValueBytes = BitConverter.GetBytes(offsetValue);
+ offsetMemory[offsetIndex++] = offsetValueBytes[0];
+ offsetMemory[offsetIndex++] = offsetValueBytes[1];
+ offsetMemory[offsetIndex++] = offsetValueBytes[2];
+ offsetMemory[offsetIndex++] = offsetValueBytes[3];
+ }
+
+ int nullCount = withNulls ? 1 : 0;
+ return new ArrowStringDataFrameColumn("ArrowString", dataMemory, offsetMemory, nullMemory, length, nullCount);
+ }
+
+ public static VBufferDataFrameColumn CreateVBufferDataFrameColumn(int length)
+ {
+ var buffers = Enumerable.Repeat(new VBuffer(5, new[] { 0, 1, 2, 3, 4 }), length).ToArray();
+ return new VBufferDataFrameColumn("VBuffer", buffers);
+ }
+
+ public static DataFrame MakeDataFrameWithAllColumnTypes(int length, bool withNulls = true)
+ {
+ DataFrame df = MakeDataFrameWithAllMutableAndArrowColumnTypes(length, withNulls);
+
+ var vBufferColumn = CreateVBufferDataFrameColumn(length);
+ df.Columns.Insert(df.Columns.Count, vBufferColumn);
+
+ return df;
+ }
+
+ public static DataFrame MakeDataFrameWithAllMutableAndArrowColumnTypes(int length, bool withNulls = true)
+ {
+ DataFrame df = MakeDataFrameWithAllMutableColumnTypes(length, withNulls);
+ DataFrameColumn arrowStringColumn = CreateArrowStringColumn(length, withNulls);
+ df.Columns.Insert(df.Columns.Count, arrowStringColumn);
+
+ return df;
+ }
+
+ public static DataFrame MakeDataFrameWithAllMutableColumnTypes(int length, bool withNulls = true)
+ {
+ DataFrame df = MakeDataFrameWithNumericStringAndDateTimeColumns(length, withNulls);
+ DataFrameColumn boolColumn = new BooleanDataFrameColumn("Bool", Enumerable.Range(0, length).Select(x => x % 2 == 0));
+ df.Columns.Insert(df.Columns.Count, boolColumn);
+ if (withNulls)
+ {
+ boolColumn[length / 2] = null;
+ }
+ return df;
+ }
+
+ public static DataFrame MakeDataFrameWithNumericAndBoolColumns(int length, bool withNulls = true)
+ {
+ DataFrame df = MakeDataFrameWithNumericColumns(length, withNulls);
+ DataFrameColumn boolColumn = new BooleanDataFrameColumn("Bool", Enumerable.Range(0, length).Select(x => x % 2 == 0));
+ df.Columns.Insert(df.Columns.Count, boolColumn);
+ if (withNulls)
+ {
+ boolColumn[length / 2] = null;
+ }
+ return df;
+ }
+
+ public static DataFrame MakeDataFrameWithNumericAndStringColumns(int length, bool withNulls = true)
+ {
+ DataFrame df = MakeDataFrameWithNumericColumns(length, withNulls);
+ DataFrameColumn stringColumn = new StringDataFrameColumn("String", Enumerable.Range(0, length).Select(x => x.ToString()));
+ df.Columns.Insert(df.Columns.Count, stringColumn);
+ if (withNulls)
+ {
+ stringColumn[length / 2] = null;
+ }
+
+ DataFrameColumn charColumn = new CharDataFrameColumn("Char", Enumerable.Range(0, length).Select(x => (char)(x + 65)));
+ df.Columns.Insert(df.Columns.Count, charColumn);
+ if (withNulls)
+ {
+ charColumn[length / 2] = null;
+ }
+ return df;
+ }
+
+ internal static DateTime SampleDateTime = new DateTime(2021, 06, 04);
+ public static DataFrame MakeDataFrameWithNumericStringAndDateTimeColumns(int length, bool withNulls = true)
+ {
+ DataFrame df = MakeDataFrameWithNumericAndStringColumns(length, withNulls);
+
+ DataFrameColumn dateTimeColumn = new DateTimeDataFrameColumn("DateTime", Enumerable.Range(0, length).Select(x => SampleDateTime.AddDays(x)));
+ df.Columns.Insert(df.Columns.Count, dateTimeColumn);
+ if (withNulls)
+ {
+ dateTimeColumn[length / 2] = null;
+ }
+ return df;
+ }
+
+ public static DataFrame MakeDataFrameWithNumericColumns(int length, bool withNulls = true, int startingFrom = 0)
+ {
+ IEnumerable range = Enumerable.Range(startingFrom, length);
+
+ var byteColumn = new ByteDataFrameColumn("Byte", range.Select(x => (byte)x));
+ var decimalColumn = new DecimalDataFrameColumn("Decimal", range.Select(x => (decimal)x));
+ var doubleColumn = new DoubleDataFrameColumn("Double", range.Select(x => (double)x));
+ var floatColumn = new SingleDataFrameColumn("Float", range.Select(x => (float)x));
+ var intColumn = new Int32DataFrameColumn("Int", range.Select(x => x));
+ var longColumn = new Int64DataFrameColumn("Long", range.Select(x => (long)x));
+ var sbyteColumn = new SByteDataFrameColumn("Sbyte", range.Select(x => (sbyte)x));
+ var shortColumn = new Int16DataFrameColumn("Short", range.Select(x => (short)x));
+ var uintColumn = new UInt32DataFrameColumn("Uint", range.Select(x => (uint)x));
+ var ulongColumn = new UInt64DataFrameColumn("Ulong", range.Select(x => (ulong)x));
+ var ushortColumn = new UInt16DataFrameColumn("Ushort", range.Select(x => (ushort)x));
+
+ var columnsList = new List
+ {
+ byteColumn,
+ decimalColumn,
+ doubleColumn,
+ floatColumn,
+ intColumn,
+ longColumn,
+ sbyteColumn,
+ shortColumn,
+ uintColumn,
+ ulongColumn,
+ ushortColumn
+ };
+
+ var dataFrame = new DataFrame(columnsList);
+
+ if (withNulls)
+ {
+ for (var i = 0; i < dataFrame.Columns.Count; i++)
+ {
+ dataFrame.Columns[i][length / 2] = null;
+ }
+ }
+
+ return dataFrame;
+ }
+
+ public static DataFrame MakeDataFrame(int length, bool withNulls = true)
+ where T1 : unmanaged
+ where T2 : unmanaged
+ {
+ DataFrameColumn baseColumn1 = DataFrameColumn.Create("Column1", Enumerable.Range(0, length).Select(x => (T1)Convert.ChangeType(x % 2 == 0 ? 0 : 1, typeof(T1))));
+ DataFrameColumn baseColumn2 = DataFrameColumn.Create("Column2", Enumerable.Range(0, length).Select(x => (T2)Convert.ChangeType(x % 2 == 0 ? 0 : 1, typeof(T2))));
+ DataFrame dataFrame = new DataFrame(new List { baseColumn1, baseColumn2 });
+
+ if (withNulls)
+ {
+ for (int i = 0; i < dataFrame.Columns.Count; i++)
+ {
+ dataFrame.Columns[i][length / 2] = null;
+ }
+ }
+
+ return dataFrame;
+ }
+
+ public DataFrame SplitTrainTest(DataFrame input, float testRatio, out DataFrame Test)
+ {
+ IEnumerable randomIndices = Enumerable.Range(0, (int)input.Rows.Count);
+ IEnumerable trainIndices = randomIndices.Take((int)(input.Rows.Count * testRatio));
+ IEnumerable testIndices = randomIndices.Skip((int)(input.Rows.Count * testRatio));
+ Test = input[testIndices];
+ return input[trainIndices];
+ }
+ }
+}
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs
index d5540edf23..1e67caf85f 100644
--- a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs
@@ -6,276 +6,14 @@
using System.Collections.Generic;
using System.Linq;
using System.Text;
-using Apache.Arrow;
using Microsoft.ML;
using Microsoft.ML.Data;
-using Microsoft.ML.TestFramework.Attributes;
using Xunit;
namespace Microsoft.Data.Analysis.Tests
{
public partial class DataFrameTests
{
- public static DataFrame MakeDataFrameWithTwoColumns(int length, bool withNulls = true)
- {
- DataFrameColumn dataFrameColumn1 = new Int32DataFrameColumn("Int1", Enumerable.Range(0, length).Select(x => x));
- DataFrameColumn dataFrameColumn2 = new Int32DataFrameColumn("Int2", Enumerable.Range(10, length).Select(x => x));
- if (withNulls)
- {
- dataFrameColumn1[length / 2] = null;
- dataFrameColumn2[length / 2] = null;
- }
- DataFrame dataFrame = new DataFrame();
- dataFrame.Columns.Insert(0, dataFrameColumn1);
- dataFrame.Columns.Insert(1, dataFrameColumn2);
- return dataFrame;
- }
-
- public static ArrowStringDataFrameColumn CreateArrowStringColumn(int length, bool withNulls = true)
- {
- byte[] dataMemory = new byte[length * 3];
- byte[] nullMemory = new byte[BitUtility.ByteCount(length)];
- byte[] offsetMemory = new byte[(length + 1) * 4];
-
- // Initialize offset with 0 as the first value
- offsetMemory[0] = 0;
- offsetMemory[1] = 0;
- offsetMemory[2] = 0;
- offsetMemory[3] = 0;
-
- // Append "foo" length times, with a possible `null` in the middle
- int validStringsIndex = 0;
- for (int i = 0; i < length; i++)
- {
- if (withNulls && i == length / 2)
- {
- BitUtility.SetBit(nullMemory, i, false);
- }
- else
- {
- int dataMemoryIndex = validStringsIndex * 3;
- dataMemory[dataMemoryIndex++] = 102;
- dataMemory[dataMemoryIndex++] = 111;
- dataMemory[dataMemoryIndex++] = 111;
- BitUtility.SetBit(nullMemory, i, true);
-
- validStringsIndex++;
- }
-
- // write the current length to (index + 1)
- int offsetIndex = (i + 1) * 4;
- int offsetValue = 3 * validStringsIndex;
- byte[] offsetValueBytes = BitConverter.GetBytes(offsetValue);
- offsetMemory[offsetIndex++] = offsetValueBytes[0];
- offsetMemory[offsetIndex++] = offsetValueBytes[1];
- offsetMemory[offsetIndex++] = offsetValueBytes[2];
- offsetMemory[offsetIndex++] = offsetValueBytes[3];
- }
-
- int nullCount = withNulls ? 1 : 0;
- return new ArrowStringDataFrameColumn("ArrowString", dataMemory, offsetMemory, nullMemory, length, nullCount);
- }
-
- public static VBufferDataFrameColumn CreateVBufferDataFrameColumn(int length)
- {
- var buffers = Enumerable.Repeat(new VBuffer(5, new[] { 0, 1, 2, 3, 4 }), length).ToArray();
- return new VBufferDataFrameColumn("VBuffer", buffers);
- }
-
- public static DataFrame MakeDataFrameWithAllColumnTypes(int length, bool withNulls = true)
- {
- DataFrame df = MakeDataFrameWithAllMutableAndArrowColumnTypes(length, withNulls);
-
- var vBufferColumn = CreateVBufferDataFrameColumn(length);
- df.Columns.Insert(df.Columns.Count, vBufferColumn);
-
- return df;
- }
-
- public static DataFrame MakeDataFrameWithAllMutableAndArrowColumnTypes(int length, bool withNulls = true)
- {
- DataFrame df = MakeDataFrameWithAllMutableColumnTypes(length, withNulls);
- DataFrameColumn arrowStringColumn = CreateArrowStringColumn(length, withNulls);
- df.Columns.Insert(df.Columns.Count, arrowStringColumn);
-
- return df;
- }
-
- public static DataFrame MakeDataFrameWithAllMutableColumnTypes(int length, bool withNulls = true)
- {
- DataFrame df = MakeDataFrameWithNumericStringAndDateTimeColumns(length, withNulls);
- DataFrameColumn boolColumn = new BooleanDataFrameColumn("Bool", Enumerable.Range(0, length).Select(x => x % 2 == 0));
- df.Columns.Insert(df.Columns.Count, boolColumn);
- if (withNulls)
- {
- boolColumn[length / 2] = null;
- }
- return df;
- }
-
- public static DataFrame MakeDataFrameWithNumericAndBoolColumns(int length, bool withNulls = true)
- {
- DataFrame df = MakeDataFrameWithNumericColumns(length, withNulls);
- DataFrameColumn boolColumn = new BooleanDataFrameColumn("Bool", Enumerable.Range(0, length).Select(x => x % 2 == 0));
- df.Columns.Insert(df.Columns.Count, boolColumn);
- if (withNulls)
- {
- boolColumn[length / 2] = null;
- }
- return df;
- }
-
- public static DataFrame MakeDataFrameWithNumericAndStringColumns(int length, bool withNulls = true)
- {
- DataFrame df = MakeDataFrameWithNumericColumns(length, withNulls);
- DataFrameColumn stringColumn = new StringDataFrameColumn("String", Enumerable.Range(0, length).Select(x => x.ToString()));
- df.Columns.Insert(df.Columns.Count, stringColumn);
- if (withNulls)
- {
- stringColumn[length / 2] = null;
- }
-
- DataFrameColumn charColumn = new CharDataFrameColumn("Char", Enumerable.Range(0, length).Select(x => (char)(x + 65)));
- df.Columns.Insert(df.Columns.Count, charColumn);
- if (withNulls)
- {
- charColumn[length / 2] = null;
- }
- return df;
- }
-
- internal static DateTime SampleDateTime = new DateTime(2021, 06, 04);
- public static DataFrame MakeDataFrameWithNumericStringAndDateTimeColumns(int length, bool withNulls = true)
- {
- DataFrame df = MakeDataFrameWithNumericAndStringColumns(length, withNulls);
-
- DataFrameColumn dateTimeColumn = new DateTimeDataFrameColumn("DateTime", Enumerable.Range(0, length).Select(x => SampleDateTime.AddDays(x)));
- df.Columns.Insert(df.Columns.Count, dateTimeColumn);
- if (withNulls)
- {
- dateTimeColumn[length / 2] = null;
- }
- return df;
- }
-
- public static DataFrame MakeDataFrameWithNumericColumns(int length, bool withNulls = true, int startingFrom = 0)
- {
- IEnumerable range = Enumerable.Range(startingFrom, length);
-
- var byteColumn = new ByteDataFrameColumn("Byte", range.Select(x => (byte)x));
- var decimalColumn = new DecimalDataFrameColumn("Decimal", range.Select(x => (decimal)x));
- var doubleColumn = new DoubleDataFrameColumn("Double", range.Select(x => (double)x));
- var floatColumn = new SingleDataFrameColumn("Float", range.Select(x => (float)x));
- var intColumn = new Int32DataFrameColumn("Int", range.Select(x => x));
- var longColumn = new Int64DataFrameColumn("Long", range.Select(x => (long)x));
- var sbyteColumn = new SByteDataFrameColumn("Sbyte", range.Select(x => (sbyte)x));
- var shortColumn = new Int16DataFrameColumn("Short", range.Select(x => (short)x));
- var uintColumn = new UInt32DataFrameColumn("Uint", range.Select(x => (uint)x));
- var ulongColumn = new UInt64DataFrameColumn("Ulong", range.Select(x => (ulong)x));
- var ushortColumn = new UInt16DataFrameColumn("Ushort", range.Select(x => (ushort)x));
-
- var columnsList = new List
- {
- byteColumn,
- decimalColumn,
- doubleColumn,
- floatColumn,
- intColumn,
- longColumn,
- sbyteColumn,
- shortColumn,
- uintColumn,
- ulongColumn,
- ushortColumn
- };
-
- var dataFrame = new DataFrame(columnsList);
-
- if (withNulls)
- {
- for (var i = 0; i < dataFrame.Columns.Count; i++)
- {
- dataFrame.Columns[i][length / 2] = null;
- }
- }
-
- return dataFrame;
- }
-
- public static DataFrame MakeDataFrame(int length, bool withNulls = true)
- where T1 : unmanaged
- where T2 : unmanaged
- {
- DataFrameColumn baseColumn1 = DataFrameColumn.Create("Column1", Enumerable.Range(0, length).Select(x => (T1)Convert.ChangeType(x % 2 == 0 ? 0 : 1, typeof(T1))));
- DataFrameColumn baseColumn2 = DataFrameColumn.Create("Column2", Enumerable.Range(0, length).Select(x => (T2)Convert.ChangeType(x % 2 == 0 ? 0 : 1, typeof(T2))));
- DataFrame dataFrame = new DataFrame(new List { baseColumn1, baseColumn2 });
-
- if (withNulls)
- {
- for (int i = 0; i < dataFrame.Columns.Count; i++)
- {
- dataFrame.Columns[i][length / 2] = null;
- }
- }
-
- return dataFrame;
- }
-
- public DataFrame SplitTrainTest(DataFrame input, float testRatio, out DataFrame Test)
- {
- IEnumerable randomIndices = Enumerable.Range(0, (int)input.Rows.Count);
- IEnumerable trainIndices = randomIndices.Take((int)(input.Rows.Count * testRatio));
- IEnumerable testIndices = randomIndices.Skip((int)(input.Rows.Count * testRatio));
- Test = input[testIndices];
- return input[trainIndices];
- }
-
- [Fact]
- public void TestVBufferColumn_Creation()
- {
- var vBufferColumn = CreateVBufferDataFrameColumn(10);
-
- Assert.Equal(10, vBufferColumn.Length);
- Assert.Equal(5, vBufferColumn[0].GetValues().Length);
- Assert.Equal(0, vBufferColumn[0].GetValues()[0]);
- }
-
- [Fact]
- public void TestVBufferColumn_Indexer()
- {
- var buffer = new VBuffer(5, new[] { 4, 3, 2, 1, 0 });
-
- var vBufferColumn = new VBufferDataFrameColumn("VBuffer", 1);
- vBufferColumn[0] = buffer;
-
- Assert.Equal(1, vBufferColumn.Length);
- Assert.Equal(5, vBufferColumn[0].GetValues().Length);
- Assert.Equal(0, vBufferColumn[0].GetValues()[4]);
- }
-
- [X64Fact("32-bit doesn't allow to allocate more than 2 Gb")]
- public void TestVBufferColumn_Indexer_MoreThanMaxInt()
- {
- var originalValues = new[] { 4, 3, 2, 1, 0 };
-
- var length = VBufferDataFrameColumn.MaxCapacity + 3;
-
- var vBufferColumn = new VBufferDataFrameColumn("VBuffer", length);
- long index = length - 2;
-
- vBufferColumn[index] = new VBuffer(5, originalValues);
-
- var values = vBufferColumn[index].GetValues();
-
- Assert.Equal(length, vBufferColumn.Length);
- Assert.Equal(5, values.Length);
-
- for (int i = 0; i < values.Length; i++)
- {
- Assert.Equal(originalValues[i], values[i]);
- }
- }
-
[Fact]
public void TestIndexer()
{
@@ -463,418 +201,6 @@ public void RenameColumnWithRenameColumnTests()
Assert.True(ReferenceEquals(city, renamedColumn));
}
- [Fact]
- public void TestBinaryOperations()
- {
- DataFrame df = MakeDataFrameWithTwoColumns(12);
- IReadOnlyList listOfInts = new List() { 5, 5 };
-
- // The following binary ops return a copy
- var ret = df.Add(5);
- Assert.Equal(0, df[0, 0]);
- Assert.Equal(5, ret[0, 0]);
- ret = df.Add(listOfInts);
- Assert.Equal(0, df[0, 0]);
- Assert.Equal(5, ret[0, 0]);
- ret = df.Subtract(5);
- Assert.Equal(0, df[0, 0]);
- Assert.Equal(-5, ret[0, 0]);
- ret = df.Subtract(listOfInts);
- Assert.Equal(0, df[0, 0]);
- Assert.Equal(-5, ret[0, 0]);
- ret = df.Multiply(5);
- Assert.Equal(1, df[1, 0]);
- Assert.Equal(5, ret[1, 0]);
- ret = df.Multiply(listOfInts);
- Assert.Equal(1, df[1, 0]);
- Assert.Equal(5, ret[1, 0]);
- ret = df.Divide(5);
- Assert.Equal(5, df[5, 0]);
- Assert.Equal(1, ret[5, 0]);
- ret = df.Divide(listOfInts);
- Assert.Equal(5, df[5, 0]);
- Assert.Equal(1, ret[5, 0]);
- ret = df.Modulo(5);
- Assert.Equal(5, df[5, 0]);
- Assert.Equal(0, ret[5, 0]);
- ret = df.Modulo(listOfInts);
- Assert.Equal(5, df[5, 0]);
- Assert.Equal(0, ret[5, 0]);
-
- Assert.Equal(true, df.ElementwiseGreaterThanOrEqual(5)[7, 0]);
- Assert.Equal(true, df.ElementwiseGreaterThanOrEqual(listOfInts)[7, 0]);
- Assert.Equal(true, df.ElementwiseLessThanOrEqual(5)[4, 0]);
- Assert.Equal(true, df.ElementwiseLessThanOrEqual(listOfInts)[4, 0]);
- Assert.Equal(false, df.ElementwiseGreaterThan(5)[5, 0]);
- Assert.Equal(false, df.ElementwiseGreaterThan(listOfInts)[5, 0]);
- Assert.Equal(false, df.ElementwiseLessThan(5)[5, 0]);
- Assert.Equal(false, df.ElementwiseLessThan(listOfInts)[5, 0]);
- // The following binary ops are in place
- Assert.Equal(5, df.Add(5, inPlace: true)[0, 0]);
- Assert.Equal(10, df.Add(listOfInts, inPlace: true)[0, 0]);
- Assert.Equal(5, df.Subtract(5, inPlace: true)[0, 0]);
- Assert.Equal(0, df.Subtract(listOfInts, inPlace: true)[0, 0]);
- Assert.Equal(5, df.Multiply(5, inPlace: true)[1, 0]);
- Assert.Equal(25, df.Multiply(listOfInts, inPlace: true)[1, 0]);
- Assert.Equal(5, df.Divide(5, inPlace: true)[1, 0]);
- Assert.Equal(1, df.Divide(listOfInts, inPlace: true)[1, 0]);
- Assert.Equal(1, df.Modulo(5, inPlace: true)[1, 0]);
- Assert.Equal(1, df.Modulo(listOfInts, inPlace: true)[1, 0]);
- Assert.Equal(2, df.LeftShift(1)[1, 0]);
- Assert.Equal(1, df.RightShift(1)[2, 0]);
- }
-
- [Fact]
- public void TestBinaryOperationsWithColumns()
- {
- int length = 10;
- var df1 = MakeDataFrameWithNumericColumns(length);
- var df2 = MakeDataFrameWithNumericColumns(length);
-
- DataFrameColumn newColumn;
- DataFrameColumn verify;
- for (int i = 0; i < df1.Columns.Count; i++)
- {
- newColumn = df1.Columns[df1.Columns[i].Name] + df2.Columns[df2.Columns[i].Name];
- verify = newColumn.ElementwiseEquals(df1.Columns[i] * 2);
- Assert.Equal(true, verify[0]);
-
- newColumn = df1.Columns[df1.Columns[i].Name] - df2.Columns[df2.Columns[i].Name];
- verify = newColumn.ElementwiseEquals(0);
- Assert.Equal(true, verify[0]);
-
- newColumn = df1.Columns[df1.Columns[i].Name] * df2.Columns[df2.Columns[i].Name];
- verify = newColumn.ElementwiseEquals(df1.Columns[i] * df1.Columns[i]);
- Assert.Equal(true, verify[0]);
-
- var df1Column = df1.Columns[i] + 1;
- var df2Column = df2.Columns[i] + 1;
- newColumn = df1Column / df2Column;
- verify = newColumn.ElementwiseEquals(1);
- Assert.Equal(true, verify[0]);
-
- newColumn = df1Column % df2Column;
- verify = newColumn.ElementwiseEquals(0);
- Assert.Equal(true, verify[0]);
-
- verify = df1.Columns[df1.Columns[i].Name].ElementwiseEquals(df2.Columns[df2.Columns[i].Name]);
- Assert.True(verify.All());
-
- verify = df1.Columns[df1.Columns[i].Name].ElementwiseNotEquals(df2.Columns[df2.Columns[i].Name]);
- Assert.False(verify.Any());
-
- verify = df1.Columns[df1.Columns[i].Name].ElementwiseGreaterThanOrEqual(df2.Columns[df2.Columns[i].Name]);
- Assert.True(verify.All());
-
- verify = df1.Columns[df1.Columns[i].Name].ElementwiseLessThanOrEqual(df2.Columns[df2.Columns[i].Name]);
- Assert.True(verify.All());
-
- verify = df1.Columns[df1.Columns[i].Name].ElementwiseGreaterThan(df2.Columns[df2.Columns[i].Name]);
- Assert.False(verify.Any());
-
- verify = df1.Columns[df1.Columns[i].Name].ElementwiseLessThan(df2.Columns[df2.Columns[i].Name]);
- Assert.False(verify.Any());
- }
- }
-
- [Fact]
- public void TestBinaryOperationsWithConversions()
- {
- DataFrame df = DataFrameTests.MakeDataFrameWithTwoColumns(10);
-
- // Add a double to an int column
- DataFrame dfd = df.Add(5.0f);
- var dtype = dfd.Columns[0].DataType;
- Assert.True(dtype == typeof(double));
-
- // Add a decimal to an int column
- DataFrame dfm = df.Add(5.0m);
- dtype = dfm.Columns[0].DataType;
- Assert.True(dtype == typeof(decimal));
-
- // int + bool should throw
- Assert.Throws(() => df.Add(true));
-
- var dataFrameColumn1 = new DoubleDataFrameColumn("Double1", Enumerable.Range(0, 10).Select(x => (double)x));
- df.Columns[0] = dataFrameColumn1;
- // Double + comparison ops should throw
- Assert.Throws(() => df.And(true));
- }
-
- [Fact]
- public void TestBinaryOperationsOnBoolColumn()
- {
- var df = new DataFrame();
- var dataFrameColumn1 = new BooleanDataFrameColumn("Bool1", Enumerable.Range(0, 10).Select(x => true));
- var dataFrameColumn2 = new BooleanDataFrameColumn("Bool2", Enumerable.Range(0, 10).Select(x => true));
- df.Columns.Insert(0, dataFrameColumn1);
- df.Columns.Insert(1, dataFrameColumn2);
-
- // bool + int should throw
- Assert.Throws(() => df.Add(5));
- // Left shift should throw
- Assert.Throws(() => df.LeftShift(5));
-
- IReadOnlyList listOfBools = new List() { true, false };
- // boolean and And should work
- var newdf = df.And(true);
- Assert.Equal(true, newdf[4, 0]);
- var newdf1 = df.And(listOfBools);
- Assert.Equal(false, newdf1[4, 1]);
-
- newdf = df.Or(true);
- Assert.Equal(true, newdf[4, 0]);
- newdf1 = df.Or(listOfBools);
- Assert.Equal(true, newdf1[4, 1]);
-
- newdf = df.Xor(true);
- Assert.Equal(false, newdf[4, 0]);
- newdf1 = df.Xor(listOfBools);
- Assert.Equal(true, newdf1[4, 1]);
- }
-
- [Fact]
- public void TestBinaryOperationsOnDateTimeColumn()
- {
- var df = new DataFrame();
- var dataFrameColumn1 = new DateTimeDataFrameColumn("DateTime1", Enumerable.Range(0, 5).Select(x => SampleDateTime.AddDays(x)));
- // Make the second data frame column have one value that is different
- var dataFrameColumn2 = new DateTimeDataFrameColumn("DateTime2", Enumerable.Range(0, 4).Select(x => SampleDateTime.AddDays(x)));
- dataFrameColumn2.Append(SampleDateTime.AddDays(6));
- df.Columns.Insert(0, dataFrameColumn1);
- df.Columns.Insert(1, dataFrameColumn2);
-
- // DateTime + int should throw
- Assert.Throws(() => df.Add(5));
- // Left shift should throw
- Assert.Throws(() => df.LeftShift(5));
- // Right shift should throw
- Assert.Throws(() => df.RightShift(5));
-
- // And should throw
- Assert.Throws(() => df.And(true));
- // Or should throw
- Assert.Throws(() => df.Or(true));
- // Xor should throw
- Assert.Throws(() => df.Xor(true));
-
- var equalsResult = dataFrameColumn1.ElementwiseEquals(dataFrameColumn2);
- Assert.True(equalsResult[0]);
- Assert.False(equalsResult[4]);
-
- var equalsToScalarResult = df["DateTime1"].ElementwiseEquals(SampleDateTime);
- Assert.True(equalsToScalarResult[0]);
- Assert.False(equalsToScalarResult[1]);
-
- var notEqualsResult = dataFrameColumn1.ElementwiseNotEquals(dataFrameColumn2);
- Assert.False(notEqualsResult[0]);
- Assert.True(notEqualsResult[4]);
-
- var notEqualsToScalarResult = df["DateTime1"].ElementwiseNotEquals(SampleDateTime);
- Assert.False(notEqualsToScalarResult[0]);
- Assert.True(notEqualsToScalarResult[1]);
- }
-
- [Fact]
- public void TestBinaryOperationsOnArrowStringColumn()
- {
- var df = new DataFrame();
- var strArrayBuilder = new StringArray.Builder();
- for (int i = 0; i < 10; i++)
- {
- strArrayBuilder.Append(i.ToString());
- }
- StringArray strArray = strArrayBuilder.Build();
-
- ArrowStringDataFrameColumn stringColumn = new ArrowStringDataFrameColumn("String", strArray.ValueBuffer.Memory, strArray.ValueOffsetsBuffer.Memory, strArray.NullBitmapBuffer.Memory, strArray.Length, strArray.NullCount);
- df.Columns.Insert(0, stringColumn);
-
- DataFrameColumn newCol = stringColumn.ElementwiseEquals(4);
- Assert.Equal(true, newCol[4]);
- Assert.Equal(false, newCol[0]);
- Assert.Equal(false, newCol[5]);
-
- newCol = stringColumn.ElementwiseEquals("4");
- Assert.Equal(true, newCol[4]);
- Assert.Equal(false, newCol[0]);
-
- newCol = stringColumn.ElementwiseEquals("foo");
- Assert.False(newCol.All());
- newCol = stringColumn.ElementwiseEquals(null);
- Assert.False(newCol.All());
-
- ArrowStringDataFrameColumn stringColumnCopy = new ArrowStringDataFrameColumn("String", strArray.ValueBuffer.Memory, strArray.ValueOffsetsBuffer.Memory, strArray.NullBitmapBuffer.Memory, strArray.Length, strArray.NullCount);
- newCol = stringColumn.ElementwiseEquals(stringColumnCopy);
- Assert.True(newCol.All());
-
- DataFrameColumn stringColumnCopyAsBaseColumn = stringColumnCopy;
- newCol = stringColumn.ElementwiseEquals(stringColumnCopyAsBaseColumn);
- Assert.True(newCol.All());
-
- newCol = stringColumn.ElementwiseNotEquals(5);
- Assert.Equal(true, newCol[0]);
- Assert.Equal(false, newCol[5]);
-
- newCol = stringColumn.ElementwiseNotEquals("5");
- Assert.Equal(true, newCol[0]);
- Assert.Equal(false, newCol[5]);
-
- newCol = stringColumn.ElementwiseNotEquals("foo");
- Assert.True(newCol.All());
- newCol = stringColumn.ElementwiseNotEquals(null);
- Assert.True(newCol.All());
-
- newCol = stringColumn.ElementwiseNotEquals(stringColumnCopy);
- Assert.False(newCol.All());
-
- newCol = stringColumn.ElementwiseNotEquals(stringColumnCopyAsBaseColumn);
- Assert.False(newCol.All());
- }
-
- [Fact]
- public void TestBinaryOperationsOnStringColumn()
- {
- var df = new DataFrame();
- DataFrameColumn stringColumn = new StringDataFrameColumn("String", Enumerable.Range(0, 10).Select(x => x.ToString()));
- df.Columns.Insert(0, stringColumn);
-
- DataFrameColumn newCol = stringColumn.ElementwiseEquals(5);
- Assert.Equal(true, newCol[5]);
- Assert.Equal(false, newCol[0]);
-
- newCol = (stringColumn as StringDataFrameColumn).ElementwiseEquals("5");
- Assert.Equal(true, newCol[5]);
- Assert.Equal(false, newCol[0]);
-
- DataFrameColumn stringColumnCopy = new StringDataFrameColumn("String", Enumerable.Range(0, 10).Select(x => x.ToString()));
- newCol = stringColumn.ElementwiseEquals(stringColumnCopy);
- Assert.Equal(true, newCol[5]);
- Assert.Equal(true, newCol[0]);
-
- StringDataFrameColumn typedStringColumn = stringColumn as StringDataFrameColumn;
- StringDataFrameColumn typedStringColumnCopy = stringColumnCopy as StringDataFrameColumn;
- newCol = typedStringColumn.ElementwiseEquals(typedStringColumnCopy);
- Assert.True(newCol.All());
-
- newCol = stringColumn.ElementwiseNotEquals(5);
- Assert.Equal(false, newCol[5]);
- Assert.Equal(true, newCol[0]);
-
- newCol = typedStringColumn.ElementwiseNotEquals("5");
- Assert.Equal(false, newCol[5]);
- Assert.Equal(true, newCol[0]);
-
- newCol = stringColumn.ElementwiseNotEquals(stringColumnCopy);
- Assert.Equal(false, newCol[5]);
- Assert.Equal(false, newCol[0]);
-
- newCol = typedStringColumn.ElementwiseNotEquals(typedStringColumnCopy);
- Assert.False(newCol.All());
-
- newCol = typedStringColumn.Add("suffix");
- for (int i = 0; i < newCol.Length; i++)
- {
- Assert.Equal(newCol[i], typedStringColumn[i] + "suffix");
- }
- DataFrameColumn addString = typedStringColumn + "suffix";
- for (int i = 0; i < addString.Length; i++)
- {
- Assert.Equal(addString[i], typedStringColumn[i] + "suffix");
- }
- Assert.True(newCol.ElementwiseEquals(addString).All());
- addString = "prefix" + typedStringColumn;
- for (int i = 0; i < addString.Length; i++)
- {
- Assert.Equal(addString[i], "prefix" + typedStringColumn[i]);
- }
- }
-
- [Fact]
- public void TestBinaryOperatorsWithConversions()
- {
- var df = MakeDataFrameWithNumericColumns(10);
-
- DataFrame tempDf = df + 1;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + (double)1);
- tempDf = df + 1.1;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1);
- tempDf = df + 1.1m;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1m);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = df - 1.1;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] - 1.1);
- tempDf = df - 1.1m;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] - 1.1m);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = df * 1.1;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1);
- tempDf = df * 1.1m;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1m);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = df / 1.1;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] / 1.1);
- tempDf = df / 1.1m;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] / 1.1m);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = df % 1.1;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] % 1.1);
- tempDf = df % 1.1m;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] % 1.1m);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = 1 + df;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + (double)1);
- tempDf = 1.1 + df;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1);
- tempDf = 1.1m + df;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] + 1.1m);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = 1.1 - df;
- Assert.Equal(tempDf[0, 0], 1.1 - (byte)df[0, 0]);
- tempDf = 1.1m - df;
- Assert.Equal(tempDf[0, 0], 1.1m - (byte)df[0, 0]);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = 1.1 * df;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1);
- tempDf = 1.1m * df;
- Assert.Equal(tempDf[0, 0], (byte)df[0, 0] * 1.1m);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- // To prevent a divide by zero
- var plusOne = df + 1;
- tempDf = 1.1 / plusOne;
- Assert.Equal(tempDf[0, 0], 1.1 / (double)plusOne[0, 0]);
- var plusDecimal = df + 1.1m;
- tempDf = 1.1m / plusDecimal;
- Assert.Equal(tempDf[0, 0], (1.1m) / (decimal)plusDecimal[0, 0]);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- tempDf = 1.1 % plusOne;
- Assert.Equal(tempDf[0, 0], 1.1 % (double)plusOne[0, 0]);
- tempDf = 1.1m % plusDecimal;
- Assert.Equal(tempDf[0, 0], 1.1m % (decimal)plusDecimal[0, 0]);
- Assert.True(typeof(decimal) == tempDf.Columns["Int"].DataType);
-
- Assert.Equal((byte)0, df[0, 0]);
- }
-
- [Fact]
- public void TestBinaryOperationsOnColumns()
- {
- Int32DataFrameColumn column = new Int32DataFrameColumn("Int", Enumerable.Range(0, 10));
- Assert.ThrowsAny(() => column.Add(5.5, inPlace: true));
- Assert.ThrowsAny(() => column.ReverseAdd(5.5, inPlace: true));
- string str = "A String";
- Assert.ThrowsAny(() => column.Add(str, inPlace: true));
- Assert.ThrowsAny(() => column.ReverseAdd(str, inPlace: true));
- }
-
[Fact]
public void TestColumnReverseOrderState()
{
@@ -893,467 +219,6 @@ public void TestProjectionAndAppend()
Assert.Equal(16, df.Columns["Int3"][2]);
}
- [Fact]
- public void TestComputations()
- {
- DataFrame df = MakeDataFrameWithAllMutableColumnTypes(10);
- df["Int"][0] = -10;
- Assert.Equal(-10, df.Columns["Int"][0]);
-
- DataFrameColumn absColumn = df.Columns["Int"].Abs();
- Assert.Equal(10, absColumn[0]);
- Assert.Equal(-10, df.Columns["Int"][0]);
- df.Columns["Int"].Abs(true);
- Assert.Equal(10, df.Columns["Int"][0]);
-
- Assert.Throws(() => df.Columns["Byte"].All());
- Assert.Throws(() => df.Columns["Byte"].Any());
- Assert.Throws(() => df.Columns["Char"].All());
- Assert.Throws(() => df.Columns["Char"].Any());
- Assert.Throws(() => df.Columns["DateTime"].All());
- Assert.Throws(() => df.Columns["DateTime"].Any());
- Assert.Throws(() => df.Columns["Decimal"].All());
- Assert.Throws(() => df.Columns["Decimal"].Any());
- Assert.Throws(() => df.Columns["Double"].All());
- Assert.Throws(() => df.Columns["Double"].Any());
- Assert.Throws(() => df.Columns["Float"].All());
- Assert.Throws(() => df.Columns["Float"].Any());
- Assert.Throws(() => df.Columns["Int"].All());
- Assert.Throws(() => df.Columns["Int"].Any());
- Assert.Throws(() => df.Columns["Long"].All());
- Assert.Throws(() => df.Columns["Long"].Any());
- Assert.Throws(() => df.Columns["Sbyte"].All());
- Assert.Throws(() => df.Columns["Sbyte"].Any());
- Assert.Throws