Extract comments to separate file

This commit is contained in:
nimrodkor
2025-07-03 10:48:16 +03:00
parent c1d711e4e5
commit e6440b93e2
729 changed files with 52722 additions and 53366 deletions

View File

@@ -0,0 +1,46 @@
[
{
"discussion_id": "511436398",
"pr_number": 3511,
"pr_file": "lib/request.d.ts",
"created_at": "2020-10-24T12:46:35+00:00",
"commented_code": "* @param {function} listener - Callback to run when the request is being validated.\n * @param {boolean} prepend - If set, prepends listener instead of appending.\n */\n on(event: \"validate\", listener: (request: Request<D, E>) => void, prepend?: boolean): Request<D, E>;\n on(event: \"validate\", listener: (request: Request<D, E>, doneCallback?: () => void) => void, prepend?: boolean): Request<D, E>;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "511436398",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3511,
"pr_file": "lib/request.d.ts",
"discussion_id": "511436398",
"commented_code": "@@ -54,23 +54,23 @@ export class Request<D, E> {\n * @param {function} listener - Callback to run when the request is being validated.\n * @param {boolean} prepend - If set, prepends listener instead of appending.\n */\n- on(event: \"validate\", listener: (request: Request<D, E>) => void, prepend?: boolean): Request<D, E>;\n+ on(event: \"validate\", listener: (request: Request<D, E>, doneCallback?: () => void) => void, prepend?: boolean): Request<D, E>;",
"comment_created_at": "2020-10-24T12:46:35+00:00",
"comment_author": "jeromecovington",
"comment_body": "I added `doneCallback?` as an optional second param, trying to be sensitive to not breaking anybody's current code that may not use the param. Although I'm divided on whether or not it should probably actually be a required argument, as I'm not sure anyone's code using `onAsync()` listeners should technically be correct _without_ calling `done()`.",
"pr_file_module": null
}
]
},
{
"discussion_id": "145172099",
"pr_number": 1711,
"pr_file": "lib/services/s3.d.ts",
"created_at": "2017-10-17T15:45:27+00:00",
"commented_code": "getSignedUrl(operation: string, params: any): string;\n\n /**\n * Returns a 'thenable' promise that will be resolved with a pre-signed URL for a given operation name.\n */\n getSignedUrlPromise(operation: string, params: any): Promise<void>;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "145172099",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.d.ts",
"discussion_id": "145172099",
"commented_code": "@@ -13,6 +13,11 @@ export class S3Customizations extends Service {\n getSignedUrl(operation: string, params: any): string;\n \n /**\n+ * Returns a 'thenable' promise that will be resolved with a pre-signed URL for a given operation name.\n+ */\n+ getSignedUrlPromise(operation: string, params: any): Promise<void>;",
"comment_created_at": "2017-10-17T15:45:27+00:00",
"comment_author": "jeskew",
"comment_body": "Shouldn't the return value be `Promise<string>`?",
"pr_file_module": null
}
]
}
]

View File

@@ -35,51 +35,3 @@ on(event: "validate", listener: (request: Request<D, E>, doneCallback: () => voi
```
Accurate type signatures improve API usability, enable better IDE support, and reduce runtime errors by catching incorrect usage patterns at compile time.
[
{
"discussion_id": "511436398",
"pr_number": 3511,
"pr_file": "lib/request.d.ts",
"created_at": "2020-10-24T12:46:35+00:00",
"commented_code": "* @param {function} listener - Callback to run when the request is being validated.\n * @param {boolean} prepend - If set, prepends listener instead of appending.\n */\n on(event: \"validate\", listener: (request: Request<D, E>) => void, prepend?: boolean): Request<D, E>;\n on(event: \"validate\", listener: (request: Request<D, E>, doneCallback?: () => void) => void, prepend?: boolean): Request<D, E>;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "511436398",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3511,
"pr_file": "lib/request.d.ts",
"discussion_id": "511436398",
"commented_code": "@@ -54,23 +54,23 @@ export class Request<D, E> {\n * @param {function} listener - Callback to run when the request is being validated.\n * @param {boolean} prepend - If set, prepends listener instead of appending.\n */\n- on(event: \"validate\", listener: (request: Request<D, E>) => void, prepend?: boolean): Request<D, E>;\n+ on(event: \"validate\", listener: (request: Request<D, E>, doneCallback?: () => void) => void, prepend?: boolean): Request<D, E>;",
"comment_created_at": "2020-10-24T12:46:35+00:00",
"comment_author": "jeromecovington",
"comment_body": "I added `doneCallback?` as an optional second param, trying to be sensitive to not breaking anybody's current code that may not use the param. Although I'm divided on whether or not it should probably actually be a required argument, as I'm not sure anyone's code using `onAsync()` listeners should technically be correct _without_ calling `done()`.",
"pr_file_module": null
}
]
},
{
"discussion_id": "145172099",
"pr_number": 1711,
"pr_file": "lib/services/s3.d.ts",
"created_at": "2017-10-17T15:45:27+00:00",
"commented_code": "getSignedUrl(operation: string, params: any): string;\n\n /**\n * Returns a 'thenable' promise that will be resolved with a pre-signed URL for a given operation name.\n */\n getSignedUrlPromise(operation: string, params: any): Promise<void>;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "145172099",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.d.ts",
"discussion_id": "145172099",
"commented_code": "@@ -13,6 +13,11 @@ export class S3Customizations extends Service {\n getSignedUrl(operation: string, params: any): string;\n \n /**\n+ * Returns a 'thenable' promise that will be resolved with a pre-signed URL for a given operation name.\n+ */\n+ getSignedUrlPromise(operation: string, params: any): Promise<void>;",
"comment_created_at": "2017-10-17T15:45:27+00:00",
"comment_author": "jeskew",
"comment_body": "Shouldn't the return value be `Promise<string>`?",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,46 @@
[
{
"discussion_id": "85628177",
"pr_number": 1189,
"pr_file": "lib/config.d.ts",
"created_at": "2016-10-29T00:12:56+00:00",
"commented_code": "/// <reference types=\"node\" />\n\nimport * as http from 'http';\nimport * as https from 'https';\nimport {AWSError} from './error';\nimport {Credentials} from './credentials';\nexport class Config {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "85628177",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1189,
"pr_file": "lib/config.d.ts",
"discussion_id": "85628177",
"commented_code": "@@ -0,0 +1,268 @@\n+/// <reference types=\"node\" />\n+\n+import * as http from 'http';\n+import * as https from 'https';\n+import {AWSError} from './error';\n+import {Credentials} from './credentials';\n+export class Config {",
"comment_created_at": "2016-10-29T00:12:56+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "The `Config` class also needs to have each service identifier as an optional instance variable. Otherwise the compiler will complain if a user tries to define service-specific parameters and options:\n\n```\nAWS.config.s3 = {params: {Bucket: 'myBucket'}, useDualstack: true};\n```\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "94810141",
"pr_number": 1285,
"pr_file": "lib/config.d.ts",
"created_at": "2017-01-05T17:18:54+00:00",
"commented_code": "* @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Whether unknown keys can be set on the configuration object.\n */\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & {[key: string]: any}, allowUnknownKeys: true): void;\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions & {[key: string]: any}, allowUnknownKeys: true): void;\n /**\n * Updates the current configuration object with new options.\n * \n * @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Defaults to false. Whether unknown keys can be set on the configuration object.\n */\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions, allowUnknownKeys?: false): void;\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions, allowUnknownKeys?: false): void;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "94810141",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1285,
"pr_file": "lib/config.d.ts",
"discussion_id": "94810141",
"commented_code": "@@ -63,14 +63,14 @@ export class Config extends ConfigBase {\n * @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Whether unknown keys can be set on the configuration object.\n */\n- update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & {[key: string]: any}, allowUnknownKeys: true): void;\n+ update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions & {[key: string]: any}, allowUnknownKeys: true): void;\n /**\n * Updates the current configuration object with new options.\n * \n * @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Defaults to false. Whether unknown keys can be set on the configuration object.\n */\n- update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions, allowUnknownKeys?: false): void;\n+ update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions, allowUnknownKeys?: false): void;",
"comment_created_at": "2017-01-05T17:18:54+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you add `CredentialsOptions` to the `update` method in `ConfigBase` as well? This should be allowed in service configuration in addition to the global config.",
"pr_file_module": null
}
]
}
]

View File

@@ -35,51 +35,3 @@ AWS.config.s3 = {params: {Bucket: 'myBucket'}, useDualstack: true};
```
This approach prevents TypeScript compiler errors when users set valid configurations and maintains consistency between global and service-specific configuration options.
[
{
"discussion_id": "85628177",
"pr_number": 1189,
"pr_file": "lib/config.d.ts",
"created_at": "2016-10-29T00:12:56+00:00",
"commented_code": "/// <reference types=\"node\" />\n\nimport * as http from 'http';\nimport * as https from 'https';\nimport {AWSError} from './error';\nimport {Credentials} from './credentials';\nexport class Config {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "85628177",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1189,
"pr_file": "lib/config.d.ts",
"discussion_id": "85628177",
"commented_code": "@@ -0,0 +1,268 @@\n+/// <reference types=\"node\" />\n+\n+import * as http from 'http';\n+import * as https from 'https';\n+import {AWSError} from './error';\n+import {Credentials} from './credentials';\n+export class Config {",
"comment_created_at": "2016-10-29T00:12:56+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "The `Config` class also needs to have each service identifier as an optional instance variable. Otherwise the compiler will complain if a user tries to define service-specific parameters and options:\n\n```\nAWS.config.s3 = {params: {Bucket: 'myBucket'}, useDualstack: true};\n```\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "94810141",
"pr_number": 1285,
"pr_file": "lib/config.d.ts",
"created_at": "2017-01-05T17:18:54+00:00",
"commented_code": "* @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Whether unknown keys can be set on the configuration object.\n */\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & {[key: string]: any}, allowUnknownKeys: true): void;\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions & {[key: string]: any}, allowUnknownKeys: true): void;\n /**\n * Updates the current configuration object with new options.\n * \n * @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Defaults to false. Whether unknown keys can be set on the configuration object.\n */\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions, allowUnknownKeys?: false): void;\n update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions, allowUnknownKeys?: false): void;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "94810141",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1285,
"pr_file": "lib/config.d.ts",
"discussion_id": "94810141",
"commented_code": "@@ -63,14 +63,14 @@ export class Config extends ConfigBase {\n * @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Whether unknown keys can be set on the configuration object.\n */\n- update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & {[key: string]: any}, allowUnknownKeys: true): void;\n+ update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions & {[key: string]: any}, allowUnknownKeys: true): void;\n /**\n * Updates the current configuration object with new options.\n * \n * @param {ConfigurationOptions} options - a map of option keys and values.\n * @param {boolean} allowUnknownKeys - Defaults to false. Whether unknown keys can be set on the configuration object.\n */\n- update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions, allowUnknownKeys?: false): void;\n+ update(options: ConfigurationOptions & ConfigurationServicePlaceholders & APIVersions & CredentialsOptions, allowUnknownKeys?: false): void;",
"comment_created_at": "2017-01-05T17:18:54+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you add `CredentialsOptions` to the `update` method in `ConfigBase` as well? This should be allowed in service configuration in addition to the global config.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,160 @@
[
{
"discussion_id": "508372275",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"created_at": "2020-10-20T10:02:52+00:00",
"commented_code": "var AWS = require('../core');\nvar pipeThrough = AWS.util.pipeThrough;\nvar TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n undefined;\nrequire('../http');\n\n/**\n * @api private\n */\nAWS.FetchClient = AWS.util.inherit({\n handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n var self = this;\n var endpoint = httpRequest.endpoint;\n if (!httpOptions) httpOptions = {};\n\n var emitter = new EventEmitter();\n\n // Construct href\n var href = endpoint.protocol + '//' + endpoint.hostname;\n if (endpoint.port !== 80 && endpoint.port !== 443) {\n href += ':' + endpoint.port;\n }\n href += httpRequest.path;\n\n // Construct headers\n var headers = new Headers();\n AWS.util.each(httpRequest.headers, function (key, value) {\n // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n headers.set(key, value);\n }\n });\n\n // Send cookies?\n var credentials = 'omit';\n if (httpOptions.xhrWithCredentials) {\n credentials = 'include';\n }\n\n // Construct request\n // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n var req = {\n method: httpRequest.method,\n headers: headers,\n credentials: credentials\n };\n if (httpRequest.body) req.body = httpRequest.body;\n var request = new Request(href, req);\n\n // Send request\n fetch(request).then(function(response) {\n if (!response.ok) throw Error(response.statusText);\n return response;\n }).then(function(response) {\n emitter.statusCode = response.status;\n emitter.headers = self.parseHeaders(response.headers);\n emitter.abort = function() { response.body.cancel(); };\n\n httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n httpRequest.stream = emitter; // attach emitter to httpRequest\n emitter.didCallback = false; // Required for httpDone event\n\n emitter.on('error', function() {\n if (emitter.didCallback) return; emitter.didCallback = true;\n errCallback.apply(emitter, arguments);\n });\n\n if (emitter.didCallback) return;\n callback(emitter);\n emitter.emit(\n 'headers',\n emitter.statusCode,\n emitter.headers,\n response.statusText\n );\n }, function(err) {\n if (emitter.didCallback) return; emitter.didCallback = true;\n errCallback(AWS.util.error(new Error('Network Failure'), {\n code: 'NetworkingError'\n }));\n }).catch(function(err) {\n if (emitter.didCallback) return; emitter.didCallback = true;\n errCallback(AWS.util.error(err, {\n code: 'BadReplyError'\n }));\n });\n\n return emitter;\n },\n\n writeBody: function writeBody(emitter, response) {\n var body = response.body;\n var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "508372275",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T10:02:52+00:00",
"comment_author": "jimmywarting",
"comment_body": "just a heads up: if the response is encoded with gzip, br, deflate or anything then that will be what the content-length will also be (not the actual file size) so when you read the all the chunks then you will only increase the loadedBytes with uncompressed data.\r\n\r\nwhich means loadedBytes might be higher than content-length in some scenarios",
"pr_file_module": null
},
{
"comment_id": "508382288",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T10:19:05+00:00",
"comment_author": "heri16",
"comment_body": "This issue is very tricky indeed due to the fetch implementation, and I don't have a solution yet for `Content-Encoding` compression.\r\n\r\nSee discussion here:\r\nhttps://github.com/w3c/ServiceWorker/issues/339#issuecomment-372304884",
"pr_file_module": null
},
{
"comment_id": "508384763",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T10:23:04+00:00",
"comment_author": "heri16",
"comment_body": "Any suggestions are welcomed. :smiling_face_with_three_hearts: ",
"pr_file_module": null
},
{
"comment_id": "508413111",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T11:10:34+00:00",
"comment_author": "jimmywarting",
"comment_body": "There is also this observer issue: https://github.com/whatwg/fetch/issues/607 but nothing have happened on that front for a long time now.\r\n\r\nYou could read the content-encoding as well and if it's null or \"identity\" then the content-length is the equivalent to the actual file size and it can be ok to use the progress observer. it's also possible to send a `accept-encoding: identity` request header to ask for raw bytes. but i might best be done with a HEAD request\r\nI don't know exactly how the aws sdk/api looks like, but there looks to be a size metadata when you fetch the file list that may be accurate.\r\n\r\nI would probably just ditch the hole progress stuff and leave that up to the developer to handle b/c it can be unreliable. or give them the option to choose between XHR and Fetch (at least until https://github.com/whatwg/fetch/issues/607 have been implemented)\r\n\r\nAlso if you use the size option in streamsaver than you don't need to be subscribed to any progress event listener since browser native UI will show one for you. but it needs to be the uncompressed size as well - can't be the same content-length\r\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "61352499",
"pr_number": 945,
"pr_file": "lib/http/node.js",
"created_at": "2016-04-27T23:13:35+00:00",
"commented_code": "writeBody: function writeBody(stream, httpRequest) {\n var body = httpRequest.body;\n\n if (body && WritableStream && ReadableStream) { // progress support\n if (!(body instanceof Stream)) body = AWS.util.buffer.toStream(body);\n body.pipe(this.progressStream(stream, httpRequest));\n }\n var totalBytes = parseInt(httpRequest.headers['Content-Length'], 10);\n var loadedBytes = 0;\n\n if (body instanceof Stream) {\n // for progress support of streaming content\n // tap the data event of the stream in addition to piping\n body.on('data', function(chunk) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "61352499",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 945,
"pr_file": "lib/http/node.js",
"discussion_id": "61352499",
"commented_code": "@@ -76,15 +75,40 @@ AWS.NodeHttpClient = AWS.util.inherit({\n \n writeBody: function writeBody(stream, httpRequest) {\n var body = httpRequest.body;\n-\n- if (body && WritableStream && ReadableStream) { // progress support\n- if (!(body instanceof Stream)) body = AWS.util.buffer.toStream(body);\n- body.pipe(this.progressStream(stream, httpRequest));\n- }\n+ var totalBytes = parseInt(httpRequest.headers['Content-Length'], 10);\n+ var loadedBytes = 0;\n \n if (body instanceof Stream) {\n+ // for progress support of streaming content\n+ // tap the data event of the stream in addition to piping\n+ body.on('data', function(chunk) {",
"comment_created_at": "2016-04-27T23:13:35+00:00",
"comment_author": "chrisradek",
"comment_body": "I think tapping into the `data` event on a stream could cause some unintended side-effects. In node.js 0.10.x, binding a `data` listener on a stream will cause the stream to emit 'data' events as fast as it can, ignoring the back-pressure that's automatically in place when using `pipe`. If the writable stream is slow, then this could cause loss of data.\n\nI don't think this is an issue in versions of node.js >= 0.12.x, and some simple testing seems to confirm that. However, we need to work with node.js 0.10.x as well.\n\nThe current method creates a new `writable` stream that also gets piped into in order to emit the 'sendProgress' events. I know it'll require refactoring your logic but that path seems safer across all our supported versions of node.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "171932543",
"pr_number": 1956,
"pr_file": "lib/services/s3.js",
"created_at": "2018-03-02T18:56:47+00:00",
"commented_code": "}\n },\n\n /**\n * @api private\n */\n validateMd5TrailingChecksum: function validateMd5TrailingChecksum(resp) {\n var response = resp.httpResponse;\n var buffer = response.body;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "171932543",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/services/s3.js",
"discussion_id": "171932543",
"commented_code": "@@ -479,6 +499,49 @@ AWS.util.update(AWS.S3.prototype, {\n }\n },\n \n+ /**\n+ * @api private\n+ */\n+ validateMd5TrailingChecksum: function validateMd5TrailingChecksum(resp) {\n+ var response = resp.httpResponse;\n+ var buffer = response.body;",
"comment_created_at": "2018-03-02T18:56:47+00:00",
"comment_author": "chrisradek",
"comment_body": "In node.js, the response body is likely to be a stream. It might be easier in node to create a passthrough stream that generates the md5 hash and compares against the checksum. You'll know the expected content-length since it's passed in the response headers. Your solution here should still cover the browser use-case though.",
"pr_file_module": null
},
{
"comment_id": "174218206",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/services/s3.js",
"discussion_id": "171932543",
"commented_code": "@@ -479,6 +499,49 @@ AWS.util.update(AWS.S3.prototype, {\n }\n },\n \n+ /**\n+ * @api private\n+ */\n+ validateMd5TrailingChecksum: function validateMd5TrailingChecksum(resp) {\n+ var response = resp.httpResponse;\n+ var buffer = response.body;",
"comment_created_at": "2018-03-13T17:27:39+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Mark: This part will only be performed on Browser.",
"pr_file_module": null
}
]
},
{
"discussion_id": "173921060",
"pr_number": 1956,
"pr_file": "lib/http/node.js",
"created_at": "2018-03-12T19:34:31+00:00",
"commented_code": "var stream = http.request(options, function (httpResp) {\n if (stream.didCallback) return;\n var headers = httpResp.headers;\n var responseStream = httpResp;\n\n //check content length\n if (headers && headers['content-length'] && httpRequest.method !== 'HEAD') {\n var contentLengthCheckerStream =\n new ContentLengthCheckerStream(\n parseInt(headers['content-length'], 10)\n );\n responseStream.on('error', function(err) {\n contentLengthCheckerStream.emit('error', err);\n });\n responseStream = responseStream.pipe(contentLengthCheckerStream)\n }\n //if response contains checksum in payload, validate it and chop it off the resposne stream.\n if (headers && headers['x-amz-transfer-encoding'] && headers['content-length']) {\n var transferEncoding = headers['x-amz-transfer-encoding'];\n var contentLength = headers['content-length'];\n var integrityCheckerStream = new IntegrityCheckerStream(transferEncoding, contentLength);\n responseStream.on('error', function(err) {\n integrityCheckerStream.emit('error', err);",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "173921060",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/http/node.js",
"discussion_id": "173921060",
"commented_code": "@@ -41,9 +43,40 @@ AWS.NodeHttpClient = AWS.util.inherit({\n \n var stream = http.request(options, function (httpResp) {\n if (stream.didCallback) return;\n+ var headers = httpResp.headers;\n+ var responseStream = httpResp;\n+\n+ //check content length\n+ if (headers && headers['content-length'] && httpRequest.method !== 'HEAD') {\n+ var contentLengthCheckerStream =\n+ new ContentLengthCheckerStream(\n+ parseInt(headers['content-length'], 10)\n+ );\n+ responseStream.on('error', function(err) {\n+ contentLengthCheckerStream.emit('error', err);\n+ });\n+ responseStream = responseStream.pipe(contentLengthCheckerStream)\n+ }\n+ //if response contains checksum in payload, validate it and chop it off the resposne stream.\n+ if (headers && headers['x-amz-transfer-encoding'] && headers['content-length']) {\n+ var transferEncoding = headers['x-amz-transfer-encoding'];\n+ var contentLength = headers['content-length'];\n+ var integrityCheckerStream = new IntegrityCheckerStream(transferEncoding, contentLength);\n+ responseStream.on('error', function(err) {\n+ integrityCheckerStream.emit('error', err);",
"comment_created_at": "2018-03-12T19:34:31+00:00",
"comment_author": "chrisradek",
"comment_body": "If `responseStream` is an `IncomingMessage`, you'll want to call `destroy` on it. As of node 8.0.0, readable streams also have the `destroy` method, so you can check if `destroy` is a function, then call it if it is.",
"pr_file_module": null
}
]
},
{
"discussion_id": "173931162",
"pr_number": 1956,
"pr_file": "lib/request.js",
"created_at": "2018-03-12T20:11:55+00:00",
"commented_code": "resp.error = error;\n resp.error.retryable = false;\n });\n\n var shouldCheckContentLength = false;\n var expectedLen;\n if (req.httpRequest.method !== 'HEAD') {\n expectedLen = parseInt(headers['content-length'], 10);\n }\n if (expectedLen !== undefined && !isNaN(expectedLen) && expectedLen >= 0) {\n shouldCheckContentLength = true;\n var receivedLen = 0;\n }\n\n var checkContentLengthAndEmit = function checkContentLengthAndEmit() {\n if (shouldCheckContentLength && receivedLen !== expectedLen) {\n stream.emit('error', AWS.util.error(\n new Error('Stream content length mismatch. Received ' +\n receivedLen + ' of ' + expectedLen + ' bytes.'),\n { code: 'StreamContentLengthMismatch' }\n ));\n } else if (AWS.HttpClient.streamsApiVersion === 2) {\n stream.end();\n } else {\n stream.emit('end');\n }\n };\n\n var httpStream = resp.httpResponse.createUnbufferedStream();\n\n if (AWS.HttpClient.streamsApiVersion === 2) {\n if (shouldCheckContentLength) {\n var lengthAccumulator = new streams.PassThrough();\n lengthAccumulator._write = function(chunk) {\n if (chunk && chunk.length) {\n receivedLen += chunk.length;\n }\n return streams.PassThrough.prototype._write.apply(this, arguments);\n };\n\n lengthAccumulator.on('end', checkContentLengthAndEmit);\n stream.on('error', function(err) {\n shouldCheckContentLength = false;\n httpStream.unpipe(lengthAccumulator);\n lengthAccumulator.emit('end');\n lengthAccumulator.end();\n });\n httpStream.pipe(lengthAccumulator).pipe(stream, { end: false });\n } else {\n httpStream.pipe(stream);\n }\n } else {\n\n if (shouldCheckContentLength) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "173931162",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/request.js",
"discussion_id": "173931162",
"commented_code": "@@ -595,77 +591,28 @@ AWS.Request = inherit({\n resp.error = error;\n resp.error.retryable = false;\n });\n-\n- var shouldCheckContentLength = false;\n- var expectedLen;\n- if (req.httpRequest.method !== 'HEAD') {\n- expectedLen = parseInt(headers['content-length'], 10);\n- }\n- if (expectedLen !== undefined && !isNaN(expectedLen) && expectedLen >= 0) {\n- shouldCheckContentLength = true;\n- var receivedLen = 0;\n- }\n-\n- var checkContentLengthAndEmit = function checkContentLengthAndEmit() {\n- if (shouldCheckContentLength && receivedLen !== expectedLen) {\n- stream.emit('error', AWS.util.error(\n- new Error('Stream content length mismatch. Received ' +\n- receivedLen + ' of ' + expectedLen + ' bytes.'),\n- { code: 'StreamContentLengthMismatch' }\n- ));\n- } else if (AWS.HttpClient.streamsApiVersion === 2) {\n- stream.end();\n- } else {\n- stream.emit('end');\n- }\n- };\n-\n var httpStream = resp.httpResponse.createUnbufferedStream();\n-\n if (AWS.HttpClient.streamsApiVersion === 2) {\n- if (shouldCheckContentLength) {\n- var lengthAccumulator = new streams.PassThrough();\n- lengthAccumulator._write = function(chunk) {\n- if (chunk && chunk.length) {\n- receivedLen += chunk.length;\n- }\n- return streams.PassThrough.prototype._write.apply(this, arguments);\n- };\n-\n- lengthAccumulator.on('end', checkContentLengthAndEmit);\n- stream.on('error', function(err) {\n- shouldCheckContentLength = false;\n- httpStream.unpipe(lengthAccumulator);\n- lengthAccumulator.emit('end');\n- lengthAccumulator.end();\n- });\n- httpStream.pipe(lengthAccumulator).pipe(stream, { end: false });\n- } else {\n httpStream.pipe(stream);\n- }\n } else {\n-\n- if (shouldCheckContentLength) {",
"comment_created_at": "2018-03-12T20:11:55+00:00",
"comment_author": "chrisradek",
"comment_body": "We'll still need to perform these checks for Node.js 0.8, especially if we start turning it on by default. Unfortunately 0.8.x doesn't include `Transform` in its `stream` package, so your implementation won't work for those cases. You'll likely need to resort to `data` listeners to perform your calculations if `Transform` doesn't exist.",
"pr_file_module": null
}
]
}
]

View File

@@ -40,165 +40,3 @@ When handling HTTP responses, especially with streaming data, properly verify co
- Emit appropriate errors with descriptive messages when content length or integrity checks fail
Implementing these practices ensures data is received completely and correctly, preventing subtle bugs in network communication.
[
{
"discussion_id": "508372275",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"created_at": "2020-10-20T10:02:52+00:00",
"commented_code": "var AWS = require('../core');\nvar pipeThrough = AWS.util.pipeThrough;\nvar TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n undefined;\nrequire('../http');\n\n/**\n * @api private\n */\nAWS.FetchClient = AWS.util.inherit({\n handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n var self = this;\n var endpoint = httpRequest.endpoint;\n if (!httpOptions) httpOptions = {};\n\n var emitter = new EventEmitter();\n\n // Construct href\n var href = endpoint.protocol + '//' + endpoint.hostname;\n if (endpoint.port !== 80 && endpoint.port !== 443) {\n href += ':' + endpoint.port;\n }\n href += httpRequest.path;\n\n // Construct headers\n var headers = new Headers();\n AWS.util.each(httpRequest.headers, function (key, value) {\n // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n headers.set(key, value);\n }\n });\n\n // Send cookies?\n var credentials = 'omit';\n if (httpOptions.xhrWithCredentials) {\n credentials = 'include';\n }\n\n // Construct request\n // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n var req = {\n method: httpRequest.method,\n headers: headers,\n credentials: credentials\n };\n if (httpRequest.body) req.body = httpRequest.body;\n var request = new Request(href, req);\n\n // Send request\n fetch(request).then(function(response) {\n if (!response.ok) throw Error(response.statusText);\n return response;\n }).then(function(response) {\n emitter.statusCode = response.status;\n emitter.headers = self.parseHeaders(response.headers);\n emitter.abort = function() { response.body.cancel(); };\n\n httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n httpRequest.stream = emitter; // attach emitter to httpRequest\n emitter.didCallback = false; // Required for httpDone event\n\n emitter.on('error', function() {\n if (emitter.didCallback) return; emitter.didCallback = true;\n errCallback.apply(emitter, arguments);\n });\n\n if (emitter.didCallback) return;\n callback(emitter);\n emitter.emit(\n 'headers',\n emitter.statusCode,\n emitter.headers,\n response.statusText\n );\n }, function(err) {\n if (emitter.didCallback) return; emitter.didCallback = true;\n errCallback(AWS.util.error(new Error('Network Failure'), {\n code: 'NetworkingError'\n }));\n }).catch(function(err) {\n if (emitter.didCallback) return; emitter.didCallback = true;\n errCallback(AWS.util.error(err, {\n code: 'BadReplyError'\n }));\n });\n\n return emitter;\n },\n\n writeBody: function writeBody(emitter, response) {\n var body = response.body;\n var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "508372275",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T10:02:52+00:00",
"comment_author": "jimmywarting",
"comment_body": "just a heads up: if the response is encoded with gzip, br, deflate or anything then that will be what the content-length will also be (not the actual file size) so when you read the all the chunks then you will only increase the loadedBytes with uncompressed data.\r\n\r\nwhich means loadedBytes might be higher than content-length in some scenarios",
"pr_file_module": null
},
{
"comment_id": "508382288",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T10:19:05+00:00",
"comment_author": "heri16",
"comment_body": "This issue is very tricky indeed due to the fetch implementation, and I don't have a solution yet for `Content-Encoding` compression.\r\n\r\nSee discussion here:\r\nhttps://github.com/w3c/ServiceWorker/issues/339#issuecomment-372304884",
"pr_file_module": null
},
{
"comment_id": "508384763",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T10:23:04+00:00",
"comment_author": "heri16",
"comment_body": "Any suggestions are welcomed. :smiling_face_with_three_hearts: ",
"pr_file_module": null
},
{
"comment_id": "508413111",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3505,
"pr_file": "lib/http/fetch.js",
"discussion_id": "508372275",
"commented_code": "@@ -0,0 +1,159 @@\n+var AWS = require('../core');\n+var pipeThrough = AWS.util.pipeThrough;\n+var TransformStream = typeof TransformStream !== 'undefined' ? TransformStream :\n+ AWS.util.stream && AWS.util.stream.TransformStream ? AWS.util.stream.TransformStream :\n+ typeof WebStreamsPolyfill !== 'undefined' ? WebStreamsPolyfill.TransformStream :\n+ undefined;\n+require('../http');\n+\n+/**\n+ * @api private\n+ */\n+AWS.FetchClient = AWS.util.inherit({\n+ handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {\n+ var self = this;\n+ var endpoint = httpRequest.endpoint;\n+ if (!httpOptions) httpOptions = {};\n+\n+ var emitter = new EventEmitter();\n+\n+ // Construct href\n+ var href = endpoint.protocol + '//' + endpoint.hostname;\n+ if (endpoint.port !== 80 && endpoint.port !== 443) {\n+ href += ':' + endpoint.port;\n+ }\n+ href += httpRequest.path;\n+\n+ // Construct headers\n+ var headers = new Headers();\n+ AWS.util.each(httpRequest.headers, function (key, value) {\n+ // See: https://fetch.spec.whatwg.org/#forbidden-header-name\n+ if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {\n+ headers.set(key, value);\n+ }\n+ });\n+\n+ // Send cookies?\n+ var credentials = 'omit';\n+ if (httpOptions.xhrWithCredentials) {\n+ credentials = 'include';\n+ }\n+\n+ // Construct request\n+ // See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n+ var req = {\n+ method: httpRequest.method,\n+ headers: headers,\n+ credentials: credentials\n+ };\n+ if (httpRequest.body) req.body = httpRequest.body;\n+ var request = new Request(href, req);\n+\n+ // Send request\n+ fetch(request).then(function(response) {\n+ if (!response.ok) throw Error(response.statusText);\n+ return response;\n+ }).then(function(response) {\n+ emitter.statusCode = response.status;\n+ emitter.headers = self.parseHeaders(response.headers);\n+ emitter.abort = function() { response.body.cancel(); };\n+\n+ httpRequest.responseStream = self.writeBody(emitter, response); // attach responseStream to httpRequest\n+ httpRequest.stream = emitter; // attach emitter to httpRequest\n+ emitter.didCallback = false; // Required for httpDone event\n+\n+ emitter.on('error', function() {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback.apply(emitter, arguments);\n+ });\n+\n+ if (emitter.didCallback) return;\n+ callback(emitter);\n+ emitter.emit(\n+ 'headers',\n+ emitter.statusCode,\n+ emitter.headers,\n+ response.statusText\n+ );\n+ }, function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(new Error('Network Failure'), {\n+ code: 'NetworkingError'\n+ }));\n+ }).catch(function(err) {\n+ if (emitter.didCallback) return; emitter.didCallback = true;\n+ errCallback(AWS.util.error(err, {\n+ code: 'BadReplyError'\n+ }));\n+ });\n+\n+ return emitter;\n+ },\n+\n+ writeBody: function writeBody(emitter, response) {\n+ var body = response.body;\n+ var totalBytes = parseInt(response.headers.get('Content-Length'), 10);",
"comment_created_at": "2020-10-20T11:10:34+00:00",
"comment_author": "jimmywarting",
"comment_body": "There is also this observer issue: https://github.com/whatwg/fetch/issues/607 but nothing have happened on that front for a long time now.\r\n\r\nYou could read the content-encoding as well and if it's null or \"identity\" then the content-length is the equivalent to the actual file size and it can be ok to use the progress observer. it's also possible to send a `accept-encoding: identity` request header to ask for raw bytes. but i might best be done with a HEAD request\r\nI don't know exactly how the aws sdk/api looks like, but there looks to be a size metadata when you fetch the file list that may be accurate.\r\n\r\nI would probably just ditch the hole progress stuff and leave that up to the developer to handle b/c it can be unreliable. or give them the option to choose between XHR and Fetch (at least until https://github.com/whatwg/fetch/issues/607 have been implemented)\r\n\r\nAlso if you use the size option in streamsaver than you don't need to be subscribed to any progress event listener since browser native UI will show one for you. but it needs to be the uncompressed size as well - can't be the same content-length\r\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "61352499",
"pr_number": 945,
"pr_file": "lib/http/node.js",
"created_at": "2016-04-27T23:13:35+00:00",
"commented_code": "writeBody: function writeBody(stream, httpRequest) {\n var body = httpRequest.body;\n\n if (body && WritableStream && ReadableStream) { // progress support\n if (!(body instanceof Stream)) body = AWS.util.buffer.toStream(body);\n body.pipe(this.progressStream(stream, httpRequest));\n }\n var totalBytes = parseInt(httpRequest.headers['Content-Length'], 10);\n var loadedBytes = 0;\n\n if (body instanceof Stream) {\n // for progress support of streaming content\n // tap the data event of the stream in addition to piping\n body.on('data', function(chunk) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "61352499",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 945,
"pr_file": "lib/http/node.js",
"discussion_id": "61352499",
"commented_code": "@@ -76,15 +75,40 @@ AWS.NodeHttpClient = AWS.util.inherit({\n \n writeBody: function writeBody(stream, httpRequest) {\n var body = httpRequest.body;\n-\n- if (body && WritableStream && ReadableStream) { // progress support\n- if (!(body instanceof Stream)) body = AWS.util.buffer.toStream(body);\n- body.pipe(this.progressStream(stream, httpRequest));\n- }\n+ var totalBytes = parseInt(httpRequest.headers['Content-Length'], 10);\n+ var loadedBytes = 0;\n \n if (body instanceof Stream) {\n+ // for progress support of streaming content\n+ // tap the data event of the stream in addition to piping\n+ body.on('data', function(chunk) {",
"comment_created_at": "2016-04-27T23:13:35+00:00",
"comment_author": "chrisradek",
"comment_body": "I think tapping into the `data` event on a stream could cause some unintended side-effects. In node.js 0.10.x, binding a `data` listener on a stream will cause the stream to emit 'data' events as fast as it can, ignoring the back-pressure that's automatically in place when using `pipe`. If the writable stream is slow, then this could cause loss of data.\n\nI don't think this is an issue in versions of node.js >= 0.12.x, and some simple testing seems to confirm that. However, we need to work with node.js 0.10.x as well.\n\nThe current method creates a new `writable` stream that also gets piped into in order to emit the 'sendProgress' events. I know it'll require refactoring your logic but that path seems safer across all our supported versions of node.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "171932543",
"pr_number": 1956,
"pr_file": "lib/services/s3.js",
"created_at": "2018-03-02T18:56:47+00:00",
"commented_code": "}\n },\n\n /**\n * @api private\n */\n validateMd5TrailingChecksum: function validateMd5TrailingChecksum(resp) {\n var response = resp.httpResponse;\n var buffer = response.body;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "171932543",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/services/s3.js",
"discussion_id": "171932543",
"commented_code": "@@ -479,6 +499,49 @@ AWS.util.update(AWS.S3.prototype, {\n }\n },\n \n+ /**\n+ * @api private\n+ */\n+ validateMd5TrailingChecksum: function validateMd5TrailingChecksum(resp) {\n+ var response = resp.httpResponse;\n+ var buffer = response.body;",
"comment_created_at": "2018-03-02T18:56:47+00:00",
"comment_author": "chrisradek",
"comment_body": "In node.js, the response body is likely to be a stream. It might be easier in node to create a passthrough stream that generates the md5 hash and compares against the checksum. You'll know the expected content-length since it's passed in the response headers. Your solution here should still cover the browser use-case though.",
"pr_file_module": null
},
{
"comment_id": "174218206",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/services/s3.js",
"discussion_id": "171932543",
"commented_code": "@@ -479,6 +499,49 @@ AWS.util.update(AWS.S3.prototype, {\n }\n },\n \n+ /**\n+ * @api private\n+ */\n+ validateMd5TrailingChecksum: function validateMd5TrailingChecksum(resp) {\n+ var response = resp.httpResponse;\n+ var buffer = response.body;",
"comment_created_at": "2018-03-13T17:27:39+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Mark: This part will only be performed on Browser.",
"pr_file_module": null
}
]
},
{
"discussion_id": "173921060",
"pr_number": 1956,
"pr_file": "lib/http/node.js",
"created_at": "2018-03-12T19:34:31+00:00",
"commented_code": "var stream = http.request(options, function (httpResp) {\n if (stream.didCallback) return;\n var headers = httpResp.headers;\n var responseStream = httpResp;\n\n //check content length\n if (headers && headers['content-length'] && httpRequest.method !== 'HEAD') {\n var contentLengthCheckerStream =\n new ContentLengthCheckerStream(\n parseInt(headers['content-length'], 10)\n );\n responseStream.on('error', function(err) {\n contentLengthCheckerStream.emit('error', err);\n });\n responseStream = responseStream.pipe(contentLengthCheckerStream)\n }\n //if response contains checksum in payload, validate it and chop it off the resposne stream.\n if (headers && headers['x-amz-transfer-encoding'] && headers['content-length']) {\n var transferEncoding = headers['x-amz-transfer-encoding'];\n var contentLength = headers['content-length'];\n var integrityCheckerStream = new IntegrityCheckerStream(transferEncoding, contentLength);\n responseStream.on('error', function(err) {\n integrityCheckerStream.emit('error', err);",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "173921060",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/http/node.js",
"discussion_id": "173921060",
"commented_code": "@@ -41,9 +43,40 @@ AWS.NodeHttpClient = AWS.util.inherit({\n \n var stream = http.request(options, function (httpResp) {\n if (stream.didCallback) return;\n+ var headers = httpResp.headers;\n+ var responseStream = httpResp;\n+\n+ //check content length\n+ if (headers && headers['content-length'] && httpRequest.method !== 'HEAD') {\n+ var contentLengthCheckerStream =\n+ new ContentLengthCheckerStream(\n+ parseInt(headers['content-length'], 10)\n+ );\n+ responseStream.on('error', function(err) {\n+ contentLengthCheckerStream.emit('error', err);\n+ });\n+ responseStream = responseStream.pipe(contentLengthCheckerStream)\n+ }\n+ //if response contains checksum in payload, validate it and chop it off the resposne stream.\n+ if (headers && headers['x-amz-transfer-encoding'] && headers['content-length']) {\n+ var transferEncoding = headers['x-amz-transfer-encoding'];\n+ var contentLength = headers['content-length'];\n+ var integrityCheckerStream = new IntegrityCheckerStream(transferEncoding, contentLength);\n+ responseStream.on('error', function(err) {\n+ integrityCheckerStream.emit('error', err);",
"comment_created_at": "2018-03-12T19:34:31+00:00",
"comment_author": "chrisradek",
"comment_body": "If `responseStream` is an `IncomingMessage`, you'll want to call `destroy` on it. As of node 8.0.0, readable streams also have the `destroy` method, so you can check if `destroy` is a function, then call it if it is.",
"pr_file_module": null
}
]
},
{
"discussion_id": "173931162",
"pr_number": 1956,
"pr_file": "lib/request.js",
"created_at": "2018-03-12T20:11:55+00:00",
"commented_code": "resp.error = error;\n resp.error.retryable = false;\n });\n\n var shouldCheckContentLength = false;\n var expectedLen;\n if (req.httpRequest.method !== 'HEAD') {\n expectedLen = parseInt(headers['content-length'], 10);\n }\n if (expectedLen !== undefined && !isNaN(expectedLen) && expectedLen >= 0) {\n shouldCheckContentLength = true;\n var receivedLen = 0;\n }\n\n var checkContentLengthAndEmit = function checkContentLengthAndEmit() {\n if (shouldCheckContentLength && receivedLen !== expectedLen) {\n stream.emit('error', AWS.util.error(\n new Error('Stream content length mismatch. Received ' +\n receivedLen + ' of ' + expectedLen + ' bytes.'),\n { code: 'StreamContentLengthMismatch' }\n ));\n } else if (AWS.HttpClient.streamsApiVersion === 2) {\n stream.end();\n } else {\n stream.emit('end');\n }\n };\n\n var httpStream = resp.httpResponse.createUnbufferedStream();\n\n if (AWS.HttpClient.streamsApiVersion === 2) {\n if (shouldCheckContentLength) {\n var lengthAccumulator = new streams.PassThrough();\n lengthAccumulator._write = function(chunk) {\n if (chunk && chunk.length) {\n receivedLen += chunk.length;\n }\n return streams.PassThrough.prototype._write.apply(this, arguments);\n };\n\n lengthAccumulator.on('end', checkContentLengthAndEmit);\n stream.on('error', function(err) {\n shouldCheckContentLength = false;\n httpStream.unpipe(lengthAccumulator);\n lengthAccumulator.emit('end');\n lengthAccumulator.end();\n });\n httpStream.pipe(lengthAccumulator).pipe(stream, { end: false });\n } else {\n httpStream.pipe(stream);\n }\n } else {\n\n if (shouldCheckContentLength) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "173931162",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "lib/request.js",
"discussion_id": "173931162",
"commented_code": "@@ -595,77 +591,28 @@ AWS.Request = inherit({\n resp.error = error;\n resp.error.retryable = false;\n });\n-\n- var shouldCheckContentLength = false;\n- var expectedLen;\n- if (req.httpRequest.method !== 'HEAD') {\n- expectedLen = parseInt(headers['content-length'], 10);\n- }\n- if (expectedLen !== undefined && !isNaN(expectedLen) && expectedLen >= 0) {\n- shouldCheckContentLength = true;\n- var receivedLen = 0;\n- }\n-\n- var checkContentLengthAndEmit = function checkContentLengthAndEmit() {\n- if (shouldCheckContentLength && receivedLen !== expectedLen) {\n- stream.emit('error', AWS.util.error(\n- new Error('Stream content length mismatch. Received ' +\n- receivedLen + ' of ' + expectedLen + ' bytes.'),\n- { code: 'StreamContentLengthMismatch' }\n- ));\n- } else if (AWS.HttpClient.streamsApiVersion === 2) {\n- stream.end();\n- } else {\n- stream.emit('end');\n- }\n- };\n-\n var httpStream = resp.httpResponse.createUnbufferedStream();\n-\n if (AWS.HttpClient.streamsApiVersion === 2) {\n- if (shouldCheckContentLength) {\n- var lengthAccumulator = new streams.PassThrough();\n- lengthAccumulator._write = function(chunk) {\n- if (chunk && chunk.length) {\n- receivedLen += chunk.length;\n- }\n- return streams.PassThrough.prototype._write.apply(this, arguments);\n- };\n-\n- lengthAccumulator.on('end', checkContentLengthAndEmit);\n- stream.on('error', function(err) {\n- shouldCheckContentLength = false;\n- httpStream.unpipe(lengthAccumulator);\n- lengthAccumulator.emit('end');\n- lengthAccumulator.end();\n- });\n- httpStream.pipe(lengthAccumulator).pipe(stream, { end: false });\n- } else {\n httpStream.pipe(stream);\n- }\n } else {\n-\n- if (shouldCheckContentLength) {",
"comment_created_at": "2018-03-12T20:11:55+00:00",
"comment_author": "chrisradek",
"comment_body": "We'll still need to perform these checks for Node.js 0.8, especially if we start turning it on by default. Unfortunately 0.8.x doesn't include `Transform` in its `stream` package, so your implementation won't work for those cases. You'll likely need to resort to `data` listeners to perform your calculations if `Transform` doesn't exist.",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,68 @@
[
{
"discussion_id": "574012172",
"pr_number": 3524,
"pr_file": "lib/config-base.d.ts",
"created_at": "2021-02-10T19:25:57+00:00",
"commented_code": "* Used in node.js environments only.\n */\n connectTimeout?: number;\n /**\n * Custom DNS lookup function.\n * Defaults to dns.lookup.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "574012172",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3524,
"pr_file": "lib/config-base.d.ts",
"discussion_id": "574012172",
"commented_code": "@@ -61,6 +61,11 @@ export interface HTTPOptions {\n * Used in node.js environments only.\n */\n connectTimeout?: number;\n+ /**\n+ * Custom DNS lookup function.\n+ * Defaults to dns.lookup.",
"comment_created_at": "2021-02-10T19:25:57+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Nit: can you specify that they are node.js environments only? And looks like it's only available in newer version: https://github.com/nodejs/node/commit/5bd6f516d82d069ff8710b86108dedc333b2b580\r\n\r\n```suggestion\r\n * Defaults to dns.lookup.\r\n * Used in Node.js (>= v12.x) environment only.\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "229453899",
"pr_number": 2253,
"pr_file": "lib/config.d.ts",
"created_at": "2018-10-30T19:30:50+00:00",
"commented_code": "* by DynamoDB.\n */\n dynamoDbCrc32?: boolean;\n /**\n * whether to request endpoint for operations that allow",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229453899",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/config.d.ts",
"discussion_id": "229453899",
"commented_code": "@@ -276,4 +276,9 @@ export abstract class ConfigurationOptions {\n * by DynamoDB.\n */\n dynamoDbCrc32?: boolean;\n+ /**\n+ * whether to request endpoint for operations that allow ",
"comment_created_at": "2018-10-30T19:30:50+00:00",
"comment_author": "chrisradek",
"comment_body": "Grammar: Maybe change to something like\r\n> Whether to enable endpoint discovery for operations that allow optionally using an endpoint returned by the service.\r\n\r\nI couldn't find an example of what other teams were using for their docs.",
"pr_file_module": null
}
]
},
{
"discussion_id": "225988591",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.d.ts",
"created_at": "2018-10-17T15:46:15+00:00",
"commented_code": "export interface LoadFileOptions {\n filename?: string,\n isConfig?: boolean,\n}\n\nexport interface IniFileContent {\n [key: string]: {[key: string]: string}\n}\n\nexport class IniLoader{\n \n/** Remove all cached files. Used after config files are updated. */\n clearCachedFiles():void;\n\n/**\n * Load configurations from config/credentials files and cache them \n * for later use. If no file is specified it will try to load default\n * files.\n * @returns {object} object of all profile information in the file\n */\n loadFrom(options: LoadFileOptions): IniFileContent;\n\n/**\n * Read specified file and return parsed config object. This method will always\n * read from disk and won't update cache. This is a lower level function of \n * loadFrom().\n * @param filename [string] valid readable file path containing aws credentials\n * or aws configs\n * @param isConfig [boolean] true if specified file is an aws config file; false\n * if the file is an aws credentials file\n */\n parseFile(filename: string, isConfig: boolean): IniFileContent;\n}\n\nexport function parseFile(filename: string, isConfig: boolean): IniFileContent;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "225988591",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.d.ts",
"discussion_id": "225988591",
"commented_code": "@@ -0,0 +1,35 @@\n+export interface LoadFileOptions {\n+ filename?: string,\n+ isConfig?: boolean,\n+}\n+\n+export interface IniFileContent {\n+ [key: string]: {[key: string]: string}\n+}\n+\n+export class IniLoader{\n+ \n+/** Remove all cached files. Used after config files are updated. */\n+ clearCachedFiles():void;\n+\n+/**\n+ * Load configurations from config/credentials files and cache them \n+ * for later use. If no file is specified it will try to load default\n+ * files.\n+ * @returns {object} object of all profile information in the file\n+ */\n+ loadFrom(options: LoadFileOptions): IniFileContent;\n+\n+/**\n+ * Read specified file and return parsed config object. This method will always\n+ * read from disk and won't update cache. This is a lower level function of \n+ * loadFrom().\n+ * @param filename [string] valid readable file path containing aws credentials\n+ * or aws configs\n+ * @param isConfig [boolean] true if specified file is an aws config file; false\n+ * if the file is an aws credentials file\n+ */\n+ parseFile(filename: string, isConfig: boolean): IniFileContent;\n+}\n+\n+export function parseFile(filename: string, isConfig: boolean): IniFileContent;",
"comment_created_at": "2018-10-17T15:46:15+00:00",
"comment_author": "chrisradek",
"comment_body": "Is `parseFile` exposed to consumers of the SDK? If so, it should probably have some documentation, otherwise we don't need typings for it.",
"pr_file_module": null
}
]
}
]

View File

@@ -36,73 +36,3 @@ endpointDiscovery?: boolean;
```
Thorough documentation helps other developers understand how to use your code correctly and identify potential limitations or requirements.
[
{
"discussion_id": "574012172",
"pr_number": 3524,
"pr_file": "lib/config-base.d.ts",
"created_at": "2021-02-10T19:25:57+00:00",
"commented_code": "* Used in node.js environments only.\n */\n connectTimeout?: number;\n /**\n * Custom DNS lookup function.\n * Defaults to dns.lookup.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "574012172",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3524,
"pr_file": "lib/config-base.d.ts",
"discussion_id": "574012172",
"commented_code": "@@ -61,6 +61,11 @@ export interface HTTPOptions {\n * Used in node.js environments only.\n */\n connectTimeout?: number;\n+ /**\n+ * Custom DNS lookup function.\n+ * Defaults to dns.lookup.",
"comment_created_at": "2021-02-10T19:25:57+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Nit: can you specify that they are node.js environments only? And looks like it's only available in newer version: https://github.com/nodejs/node/commit/5bd6f516d82d069ff8710b86108dedc333b2b580\r\n\r\n```suggestion\r\n * Defaults to dns.lookup.\r\n * Used in Node.js (>= v12.x) environment only.\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "229453899",
"pr_number": 2253,
"pr_file": "lib/config.d.ts",
"created_at": "2018-10-30T19:30:50+00:00",
"commented_code": "* by DynamoDB.\n */\n dynamoDbCrc32?: boolean;\n /**\n * whether to request endpoint for operations that allow",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229453899",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/config.d.ts",
"discussion_id": "229453899",
"commented_code": "@@ -276,4 +276,9 @@ export abstract class ConfigurationOptions {\n * by DynamoDB.\n */\n dynamoDbCrc32?: boolean;\n+ /**\n+ * whether to request endpoint for operations that allow ",
"comment_created_at": "2018-10-30T19:30:50+00:00",
"comment_author": "chrisradek",
"comment_body": "Grammar: Maybe change to something like\r\n> Whether to enable endpoint discovery for operations that allow optionally using an endpoint returned by the service.\r\n\r\nI couldn't find an example of what other teams were using for their docs.",
"pr_file_module": null
}
]
},
{
"discussion_id": "225988591",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.d.ts",
"created_at": "2018-10-17T15:46:15+00:00",
"commented_code": "export interface LoadFileOptions {\n filename?: string,\n isConfig?: boolean,\n}\n\nexport interface IniFileContent {\n [key: string]: {[key: string]: string}\n}\n\nexport class IniLoader{\n \n/** Remove all cached files. Used after config files are updated. */\n clearCachedFiles():void;\n\n/**\n * Load configurations from config/credentials files and cache them \n * for later use. If no file is specified it will try to load default\n * files.\n * @returns {object} object of all profile information in the file\n */\n loadFrom(options: LoadFileOptions): IniFileContent;\n\n/**\n * Read specified file and return parsed config object. This method will always\n * read from disk and won't update cache. This is a lower level function of \n * loadFrom().\n * @param filename [string] valid readable file path containing aws credentials\n * or aws configs\n * @param isConfig [boolean] true if specified file is an aws config file; false\n * if the file is an aws credentials file\n */\n parseFile(filename: string, isConfig: boolean): IniFileContent;\n}\n\nexport function parseFile(filename: string, isConfig: boolean): IniFileContent;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "225988591",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.d.ts",
"discussion_id": "225988591",
"commented_code": "@@ -0,0 +1,35 @@\n+export interface LoadFileOptions {\n+ filename?: string,\n+ isConfig?: boolean,\n+}\n+\n+export interface IniFileContent {\n+ [key: string]: {[key: string]: string}\n+}\n+\n+export class IniLoader{\n+ \n+/** Remove all cached files. Used after config files are updated. */\n+ clearCachedFiles():void;\n+\n+/**\n+ * Load configurations from config/credentials files and cache them \n+ * for later use. If no file is specified it will try to load default\n+ * files.\n+ * @returns {object} object of all profile information in the file\n+ */\n+ loadFrom(options: LoadFileOptions): IniFileContent;\n+\n+/**\n+ * Read specified file and return parsed config object. This method will always\n+ * read from disk and won't update cache. This is a lower level function of \n+ * loadFrom().\n+ * @param filename [string] valid readable file path containing aws credentials\n+ * or aws configs\n+ * @param isConfig [boolean] true if specified file is an aws config file; false\n+ * if the file is an aws credentials file\n+ */\n+ parseFile(filename: string, isConfig: boolean): IniFileContent;\n+}\n+\n+export function parseFile(filename: string, isConfig: boolean): IniFileContent;",
"comment_created_at": "2018-10-17T15:46:15+00:00",
"comment_author": "chrisradek",
"comment_body": "Is `parseFile` exposed to consumers of the SDK? If so, it should probably have some documentation, otherwise we don't need typings for it.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,134 @@
[
{
"discussion_id": "42285305",
"pr_number": 707,
"pr_file": "lib/config.js",
"created_at": "2015-10-16T20:17:20+00:00",
"commented_code": "convertResponseTypes: true,\n dynamoDbCrc32: true,\n systemClockOffset: 0,\n signatureVersion: null\n signatureVersion: null,\n signatureCache: true",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "42285305",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 707,
"pr_file": "lib/config.js",
"discussion_id": "42285305",
"commented_code": "@@ -390,7 +390,8 @@ AWS.Config = AWS.util.inherit({\n convertResponseTypes: true,\n dynamoDbCrc32: true,\n systemClockOffset: 0,\n- signatureVersion: null\n+ signatureVersion: null,\n+ signatureCache: true",
"comment_created_at": "2015-10-16T20:17:20+00:00",
"comment_author": "chrisradek",
"comment_body": "Would you mind adding some docs around this new config parameter?\nSomething like this at line 98:\n\n```\n * @!attribute signatureCache\n * @return [Boolean] whether the signature to sign requests with (overriding\n * the API configuration) is cached. Only applies to the signature version 'v4'.\n * Defaults to `true`.\n```\n\nand something like this at line 185:\n\n```\n * @option options signatureCache [Boolean] whether the signature to sign\n * requests with (overriding the API configuration) is cached. Only applies\n * to the signature version 'v4'. Defaults to `true`.\n```\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "107561314",
"pr_number": 1425,
"pr_file": "lib/s3/managed_upload.js",
"created_at": "2017-03-22T23:56:11+00:00",
"commented_code": "* failures manually.\n * @option options service [AWS.S3] an optional S3 service object to use for\n * requests. This object might have bound parameters used by the uploader.\n * @option options tags [Array<map>] The tags to apply to the uploaded object.\n * Each tag should have a `Key` and `Value` keys.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "107561314",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1425,
"pr_file": "lib/s3/managed_upload.js",
"discussion_id": "107561314",
"commented_code": "@@ -60,6 +60,8 @@ AWS.S3.ManagedUpload = AWS.util.inherit({\n * failures manually.\n * @option options service [AWS.S3] an optional S3 service object to use for\n * requests. This object might have bound parameters used by the uploader.\n+ * @option options tags [Array<map>] The tags to apply to the uploaded object.\n+ * Each tag should have a `Key` and `Value` keys.",
"comment_created_at": "2017-03-22T23:56:11+00:00",
"comment_author": "chrisradek",
"comment_body": "Minor: Might be worth adding an example that uses tags.",
"pr_file_module": null
}
]
},
{
"discussion_id": "229467137",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"created_at": "2018-10-30T20:10:18+00:00",
"commented_code": "var AWS = require('./core');\nvar util = require('./util');\nvar ENDPOINT_OPERATION_MAX_RETRIES = 60;\nvar endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n\n/**\n * Generate key to index the endpoints in the cache\n * @return [map<String,String>] object with keys to index endpoints.\n * @api private\n */\nfunction getCacheKey(request) {\n var service = request.service;\n var api = service.api || {};\n var operations = api.operations;\n var identifiers = {};\n if (operations[request.operation] && operations[request.operation].name) {\n identifiers.operation = operations[request.operation].name;\n }\n if (service.config.region) {\n identifiers.region = service.config.region;\n }\n if (api.serviceId) {\n identifiers.serviceId = api.serviceId\n }\n if (service.config.credentials.accessKeyId) {\n identifiers.accessKeyId = service.config.credentials.accessKeyId\n }\n return identifiers;\n}\n\n/**\n * @api private",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229467137",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"discussion_id": "229467137",
"commented_code": "@@ -0,0 +1,340 @@\n+var AWS = require('./core');\n+var util = require('./util');\n+var ENDPOINT_OPERATION_MAX_RETRIES = 60;\n+var endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n+\n+/**\n+ * Generate key to index the endpoints in the cache\n+ * @return [map<String,String>] object with keys to index endpoints.\n+ * @api private\n+ */\n+function getCacheKey(request) {\n+ var service = request.service;\n+ var api = service.api || {};\n+ var operations = api.operations;\n+ var identifiers = {};\n+ if (operations[request.operation] && operations[request.operation].name) {\n+ identifiers.operation = operations[request.operation].name;\n+ }\n+ if (service.config.region) {\n+ identifiers.region = service.config.region;\n+ }\n+ if (api.serviceId) {\n+ identifiers.serviceId = api.serviceId\n+ }\n+ if (service.config.credentials.accessKeyId) {\n+ identifiers.accessKeyId = service.config.credentials.accessKeyId\n+ }\n+ return identifiers;\n+}\n+\n+/**\n+ * @api private",
"comment_created_at": "2018-10-30T20:10:18+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you add a comment that explains what this function is supposed to do? It looks like you're populating an object with identifiers and customer-provided values, but it took me a while to grok that and the function name isn't clear. ",
"pr_file_module": null
}
]
},
{
"discussion_id": "229470362",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"created_at": "2018-10-30T20:19:55+00:00",
"commented_code": "var AWS = require('./core');\nvar util = require('./util');\nvar ENDPOINT_OPERATION_MAX_RETRIES = 60;\nvar endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n\n/**\n * Generate key to index the endpoints in the cache\n * @return [map<String,String>] object with keys to index endpoints.\n * @api private\n */\nfunction getCacheKey(request) {\n var service = request.service;\n var api = service.api || {};\n var operations = api.operations;\n var identifiers = {};\n if (operations[request.operation] && operations[request.operation].name) {\n identifiers.operation = operations[request.operation].name;\n }\n if (service.config.region) {\n identifiers.region = service.config.region;\n }\n if (api.serviceId) {\n identifiers.serviceId = api.serviceId\n }\n if (service.config.credentials.accessKeyId) {\n identifiers.accessKeyId = service.config.credentials.accessKeyId\n }\n return identifiers;\n}\n\n/**\n * @api private\n */\nfunction marshallCustomIdentifiersHelper(result, params, shape) {\n if (!shape || params === undefined || params === null) return;\n if (shape.type === 'structure' && shape.required && shape.required.length > 0) {\n util.arrayEach(shape.required, function(name) {\n var memberShape = shape.members[name];\n if (memberShape.endpointDiscoveryId === true) {\n var locationName = memberShape.isLocationName ? memberShape.name : name;\n result[locationName] = String(params[name]);\n } else {\n marshallCustomIdentifiersHelper(result, params[name], memberShape);\n }\n });\n }\n}\n\n/**\n * Get customized cache key according to the 'endpointDiscoveryId' trait.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229470362",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"discussion_id": "229470362",
"commented_code": "@@ -0,0 +1,340 @@\n+var AWS = require('./core');\n+var util = require('./util');\n+var ENDPOINT_OPERATION_MAX_RETRIES = 60;\n+var endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n+\n+/**\n+ * Generate key to index the endpoints in the cache\n+ * @return [map<String,String>] object with keys to index endpoints.\n+ * @api private\n+ */\n+function getCacheKey(request) {\n+ var service = request.service;\n+ var api = service.api || {};\n+ var operations = api.operations;\n+ var identifiers = {};\n+ if (operations[request.operation] && operations[request.operation].name) {\n+ identifiers.operation = operations[request.operation].name;\n+ }\n+ if (service.config.region) {\n+ identifiers.region = service.config.region;\n+ }\n+ if (api.serviceId) {\n+ identifiers.serviceId = api.serviceId\n+ }\n+ if (service.config.credentials.accessKeyId) {\n+ identifiers.accessKeyId = service.config.credentials.accessKeyId\n+ }\n+ return identifiers;\n+}\n+\n+/**\n+ * @api private\n+ */\n+function marshallCustomIdentifiersHelper(result, params, shape) {\n+ if (!shape || params === undefined || params === null) return;\n+ if (shape.type === 'structure' && shape.required && shape.required.length > 0) {\n+ util.arrayEach(shape.required, function(name) {\n+ var memberShape = shape.members[name];\n+ if (memberShape.endpointDiscoveryId === true) {\n+ var locationName = memberShape.isLocationName ? memberShape.name : name;\n+ result[locationName] = String(params[name]);\n+ } else {\n+ marshallCustomIdentifiersHelper(result, params[name], memberShape);\n+ }\n+ });\n+ }\n+}\n+\n+/**\n+ * Get customized cache key according to the 'endpointDiscoveryId' trait.",
"comment_created_at": "2018-10-30T20:19:55+00:00",
"comment_author": "chrisradek",
"comment_body": "I think this comment is a bit misleading, as is the one for `getCacheKey`. Both imply that you're going to get a single `key` (presumably a string), but you're returning a map. I think something like the following is more clear: \r\n```javascript\r\n/**\r\n * Get custom identifiers for cache key.\r\n * Identifies custom identifiers by checking each shape's `endpointDiscoveryId` trait.\r\n */\r\n```\r\nThis would at least help me, because I kept expecting `cacheKey` later on to be a string you pass to `endpointCache.get`, but it turns out you pass in a map of elements.",
"pr_file_module": null
}
]
},
{
"discussion_id": "223489992",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.js",
"created_at": "2018-10-08T20:37:17+00:00",
"commented_code": "var AWS = require('../core');\nvar os = require('os');\nvar path = require('path');\n\nmodule.exports = AWS.util.inherit({",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "223489992",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.js",
"discussion_id": "223489992",
"commented_code": "@@ -0,0 +1,70 @@\n+var AWS = require('../core');\n+var os = require('os');\n+var path = require('path');\n+\n+module.exports = AWS.util.inherit({",
"comment_created_at": "2018-10-08T20:37:17+00:00",
"comment_author": "chrisradek",
"comment_body": "If you want this to appear in documentation, you also need to attach IniLoader to the AWS namespace:\r\n```javascript\r\nAWS.IniLoader = AWS.util.inherit/* ... */\r\n\r\n// optionally also export it:\r\nmodule.exports = AWS.IniLoader;\r\n```\r\n\r\nYou'll also want to add doc strings to the public methods.",
"pr_file_module": null
}
]
},
{
"discussion_id": "138403541",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"created_at": "2017-09-12T16:40:41+00:00",
"commented_code": "}\n },\n\n /**\n * @!method getSignedUrlPromise()\n * Returns a 'thenable' promise.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "138403541",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138403541",
"commented_code": "@@ -782,6 +782,52 @@ AWS.util.update(AWS.S3.prototype, {\n }\n },\n \n+ /**\n+ * @!method getSignedUrlPromise()\n+ * Returns a 'thenable' promise.",
"comment_created_at": "2017-09-12T16:40:41+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you amend this to state a URL will be returned?\r\n\r\nSomething like:\r\n\r\n> Returns a 'thenable' promise that will be resolved with a pre-signed URL.",
"pr_file_module": null
}
]
}
]

View File

@@ -46,139 +46,3 @@ For methods that return promises, clearly document what the promise resolves to:
```
Ensure that if you're making an API public, all necessary documentation is added and the component is properly exported so it appears in generated documentation.
[
{
"discussion_id": "42285305",
"pr_number": 707,
"pr_file": "lib/config.js",
"created_at": "2015-10-16T20:17:20+00:00",
"commented_code": "convertResponseTypes: true,\n dynamoDbCrc32: true,\n systemClockOffset: 0,\n signatureVersion: null\n signatureVersion: null,\n signatureCache: true",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "42285305",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 707,
"pr_file": "lib/config.js",
"discussion_id": "42285305",
"commented_code": "@@ -390,7 +390,8 @@ AWS.Config = AWS.util.inherit({\n convertResponseTypes: true,\n dynamoDbCrc32: true,\n systemClockOffset: 0,\n- signatureVersion: null\n+ signatureVersion: null,\n+ signatureCache: true",
"comment_created_at": "2015-10-16T20:17:20+00:00",
"comment_author": "chrisradek",
"comment_body": "Would you mind adding some docs around this new config parameter?\nSomething like this at line 98:\n\n```\n * @!attribute signatureCache\n * @return [Boolean] whether the signature to sign requests with (overriding\n * the API configuration) is cached. Only applies to the signature version 'v4'.\n * Defaults to `true`.\n```\n\nand something like this at line 185:\n\n```\n * @option options signatureCache [Boolean] whether the signature to sign\n * requests with (overriding the API configuration) is cached. Only applies\n * to the signature version 'v4'. Defaults to `true`.\n```\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "107561314",
"pr_number": 1425,
"pr_file": "lib/s3/managed_upload.js",
"created_at": "2017-03-22T23:56:11+00:00",
"commented_code": "* failures manually.\n * @option options service [AWS.S3] an optional S3 service object to use for\n * requests. This object might have bound parameters used by the uploader.\n * @option options tags [Array<map>] The tags to apply to the uploaded object.\n * Each tag should have a `Key` and `Value` keys.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "107561314",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1425,
"pr_file": "lib/s3/managed_upload.js",
"discussion_id": "107561314",
"commented_code": "@@ -60,6 +60,8 @@ AWS.S3.ManagedUpload = AWS.util.inherit({\n * failures manually.\n * @option options service [AWS.S3] an optional S3 service object to use for\n * requests. This object might have bound parameters used by the uploader.\n+ * @option options tags [Array<map>] The tags to apply to the uploaded object.\n+ * Each tag should have a `Key` and `Value` keys.",
"comment_created_at": "2017-03-22T23:56:11+00:00",
"comment_author": "chrisradek",
"comment_body": "Minor: Might be worth adding an example that uses tags.",
"pr_file_module": null
}
]
},
{
"discussion_id": "229467137",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"created_at": "2018-10-30T20:10:18+00:00",
"commented_code": "var AWS = require('./core');\nvar util = require('./util');\nvar ENDPOINT_OPERATION_MAX_RETRIES = 60;\nvar endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n\n/**\n * Generate key to index the endpoints in the cache\n * @return [map<String,String>] object with keys to index endpoints.\n * @api private\n */\nfunction getCacheKey(request) {\n var service = request.service;\n var api = service.api || {};\n var operations = api.operations;\n var identifiers = {};\n if (operations[request.operation] && operations[request.operation].name) {\n identifiers.operation = operations[request.operation].name;\n }\n if (service.config.region) {\n identifiers.region = service.config.region;\n }\n if (api.serviceId) {\n identifiers.serviceId = api.serviceId\n }\n if (service.config.credentials.accessKeyId) {\n identifiers.accessKeyId = service.config.credentials.accessKeyId\n }\n return identifiers;\n}\n\n/**\n * @api private",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229467137",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"discussion_id": "229467137",
"commented_code": "@@ -0,0 +1,340 @@\n+var AWS = require('./core');\n+var util = require('./util');\n+var ENDPOINT_OPERATION_MAX_RETRIES = 60;\n+var endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n+\n+/**\n+ * Generate key to index the endpoints in the cache\n+ * @return [map<String,String>] object with keys to index endpoints.\n+ * @api private\n+ */\n+function getCacheKey(request) {\n+ var service = request.service;\n+ var api = service.api || {};\n+ var operations = api.operations;\n+ var identifiers = {};\n+ if (operations[request.operation] && operations[request.operation].name) {\n+ identifiers.operation = operations[request.operation].name;\n+ }\n+ if (service.config.region) {\n+ identifiers.region = service.config.region;\n+ }\n+ if (api.serviceId) {\n+ identifiers.serviceId = api.serviceId\n+ }\n+ if (service.config.credentials.accessKeyId) {\n+ identifiers.accessKeyId = service.config.credentials.accessKeyId\n+ }\n+ return identifiers;\n+}\n+\n+/**\n+ * @api private",
"comment_created_at": "2018-10-30T20:10:18+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you add a comment that explains what this function is supposed to do? It looks like you're populating an object with identifiers and customer-provided values, but it took me a while to grok that and the function name isn't clear. ",
"pr_file_module": null
}
]
},
{
"discussion_id": "229470362",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"created_at": "2018-10-30T20:19:55+00:00",
"commented_code": "var AWS = require('./core');\nvar util = require('./util');\nvar ENDPOINT_OPERATION_MAX_RETRIES = 60;\nvar endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n\n/**\n * Generate key to index the endpoints in the cache\n * @return [map<String,String>] object with keys to index endpoints.\n * @api private\n */\nfunction getCacheKey(request) {\n var service = request.service;\n var api = service.api || {};\n var operations = api.operations;\n var identifiers = {};\n if (operations[request.operation] && operations[request.operation].name) {\n identifiers.operation = operations[request.operation].name;\n }\n if (service.config.region) {\n identifiers.region = service.config.region;\n }\n if (api.serviceId) {\n identifiers.serviceId = api.serviceId\n }\n if (service.config.credentials.accessKeyId) {\n identifiers.accessKeyId = service.config.credentials.accessKeyId\n }\n return identifiers;\n}\n\n/**\n * @api private\n */\nfunction marshallCustomIdentifiersHelper(result, params, shape) {\n if (!shape || params === undefined || params === null) return;\n if (shape.type === 'structure' && shape.required && shape.required.length > 0) {\n util.arrayEach(shape.required, function(name) {\n var memberShape = shape.members[name];\n if (memberShape.endpointDiscoveryId === true) {\n var locationName = memberShape.isLocationName ? memberShape.name : name;\n result[locationName] = String(params[name]);\n } else {\n marshallCustomIdentifiersHelper(result, params[name], memberShape);\n }\n });\n }\n}\n\n/**\n * Get customized cache key according to the 'endpointDiscoveryId' trait.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229470362",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"discussion_id": "229470362",
"commented_code": "@@ -0,0 +1,340 @@\n+var AWS = require('./core');\n+var util = require('./util');\n+var ENDPOINT_OPERATION_MAX_RETRIES = 60;\n+var endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n+\n+/**\n+ * Generate key to index the endpoints in the cache\n+ * @return [map<String,String>] object with keys to index endpoints.\n+ * @api private\n+ */\n+function getCacheKey(request) {\n+ var service = request.service;\n+ var api = service.api || {};\n+ var operations = api.operations;\n+ var identifiers = {};\n+ if (operations[request.operation] && operations[request.operation].name) {\n+ identifiers.operation = operations[request.operation].name;\n+ }\n+ if (service.config.region) {\n+ identifiers.region = service.config.region;\n+ }\n+ if (api.serviceId) {\n+ identifiers.serviceId = api.serviceId\n+ }\n+ if (service.config.credentials.accessKeyId) {\n+ identifiers.accessKeyId = service.config.credentials.accessKeyId\n+ }\n+ return identifiers;\n+}\n+\n+/**\n+ * @api private\n+ */\n+function marshallCustomIdentifiersHelper(result, params, shape) {\n+ if (!shape || params === undefined || params === null) return;\n+ if (shape.type === 'structure' && shape.required && shape.required.length > 0) {\n+ util.arrayEach(shape.required, function(name) {\n+ var memberShape = shape.members[name];\n+ if (memberShape.endpointDiscoveryId === true) {\n+ var locationName = memberShape.isLocationName ? memberShape.name : name;\n+ result[locationName] = String(params[name]);\n+ } else {\n+ marshallCustomIdentifiersHelper(result, params[name], memberShape);\n+ }\n+ });\n+ }\n+}\n+\n+/**\n+ * Get customized cache key according to the 'endpointDiscoveryId' trait.",
"comment_created_at": "2018-10-30T20:19:55+00:00",
"comment_author": "chrisradek",
"comment_body": "I think this comment is a bit misleading, as is the one for `getCacheKey`. Both imply that you're going to get a single `key` (presumably a string), but you're returning a map. I think something like the following is more clear: \r\n```javascript\r\n/**\r\n * Get custom identifiers for cache key.\r\n * Identifies custom identifiers by checking each shape's `endpointDiscoveryId` trait.\r\n */\r\n```\r\nThis would at least help me, because I kept expecting `cacheKey` later on to be a string you pass to `endpointCache.get`, but it turns out you pass in a map of elements.",
"pr_file_module": null
}
]
},
{
"discussion_id": "223489992",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.js",
"created_at": "2018-10-08T20:37:17+00:00",
"commented_code": "var AWS = require('../core');\nvar os = require('os');\nvar path = require('path');\n\nmodule.exports = AWS.util.inherit({",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "223489992",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2283,
"pr_file": "lib/shared-ini/ini-loader.js",
"discussion_id": "223489992",
"commented_code": "@@ -0,0 +1,70 @@\n+var AWS = require('../core');\n+var os = require('os');\n+var path = require('path');\n+\n+module.exports = AWS.util.inherit({",
"comment_created_at": "2018-10-08T20:37:17+00:00",
"comment_author": "chrisradek",
"comment_body": "If you want this to appear in documentation, you also need to attach IniLoader to the AWS namespace:\r\n```javascript\r\nAWS.IniLoader = AWS.util.inherit/* ... */\r\n\r\n// optionally also export it:\r\nmodule.exports = AWS.IniLoader;\r\n```\r\n\r\nYou'll also want to add doc strings to the public methods.",
"pr_file_module": null
}
]
},
{
"discussion_id": "138403541",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"created_at": "2017-09-12T16:40:41+00:00",
"commented_code": "}\n },\n\n /**\n * @!method getSignedUrlPromise()\n * Returns a 'thenable' promise.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "138403541",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138403541",
"commented_code": "@@ -782,6 +782,52 @@ AWS.util.update(AWS.S3.prototype, {\n }\n },\n \n+ /**\n+ * @!method getSignedUrlPromise()\n+ * Returns a 'thenable' promise.",
"comment_created_at": "2017-09-12T16:40:41+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you amend this to state a URL will be returned?\r\n\r\nSomething like:\r\n\r\n> Returns a 'thenable' promise that will be resolved with a pre-signed URL.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,46 @@
[
{
"discussion_id": "396624797",
"pr_number": 3171,
"pr_file": ".github/ISSUE_TEMPLATE/---bug-report.md",
"created_at": "2020-03-23T17:25:09+00:00",
"commented_code": "* For browsers, the SDK version number is in the script tag <pre>src=\".../aws-sdk-<b>2.466.0</b>.min.js\"</pre>\n* For Node.js, get SDK version by\n * running command `npm list aws-sdk` from your root directory\n * printing the output of `AWS.VERSION` in your code where `AWS = require(\"aws-sdk\");`\n * printing the output of `console.log(AWS.VERSION)` in your code where `AWS = require(\"aws-sdk\");`\n * if running on Lambda, Lambda runtimes can be found [here](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html)",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "396624797",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3171,
"pr_file": ".github/ISSUE_TEMPLATE/---bug-report.md",
"discussion_id": "396624797",
"commented_code": "@@ -28,7 +28,8 @@ Example: v2.466.0\n * For browsers, the SDK version number is in the script tag <pre>src=\".../aws-sdk-<b>2.466.0</b>.min.js\"</pre>\n * For Node.js, get SDK version by\n * running command `npm list aws-sdk` from your root directory\n- * printing the output of `AWS.VERSION` in your code where `AWS = require(\"aws-sdk\");`\n+ * printing the output of `console.log(AWS.VERSION)` in your code where `AWS = require(\"aws-sdk\");`\n+ * if running on Lambda, Lambda runtimes can be found [here](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html) ",
"comment_created_at": "2020-03-23T17:25:09+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "People can bundle their own AWS to the Lambda. You can ask something like `If running on Lambda and using SDK provided by Lambda runtime, you can find the SDK versions here`",
"pr_file_module": null
}
]
},
{
"discussion_id": "297439672",
"pr_number": 2742,
"pr_file": ".github/ISSUE_TEMPLATE/---known-issues.md",
"created_at": "2019-06-26T00:00:43+00:00",
"commented_code": "---\nname: \"\\U0001F6E0 Know issues\"\nabout: Read me before opening an issue!\ntitle: ''\nassignees: ''\n\n---\n\nThis is a list of know issues and how can you work around them:\n\n1. **Exceptions contains `Unexpected key...` or `... is not a function`**\n\n AWS APIs are updating constantly, you might be calling a new API with an older version is SDK. In\n this case you should try installing the newest `aws-sdk`like: \n\n `npm install aws-sdk@latest`\n\n If you are using the SDK in AWS Lambda, you are likely using old SDK version too. AWS Lambda bundled\n SDK version is shown in [this document](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html).\n You can import the latest version of SDK by following \n [these guidelines](https://docs.aws.amazon.com/lambda/latest/dg/nodejs-create-deployment-pkg.html#nodejs-package-dependencies).\n\n Go ahead and open an issue is you are still seeing this exception.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "297439672",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2742,
"pr_file": ".github/ISSUE_TEMPLATE/---known-issues.md",
"discussion_id": "297439672",
"commented_code": "@@ -0,0 +1,23 @@\n+---\n+name: \"\\U0001F6E0 Know issues\"\n+about: Read me before opening an issue!\n+title: ''\n+assignees: ''\n+\n+---\n+\n+This is a list of know issues and how can you work around them:\n+\n+1. **Exceptions contains `Unexpected key...` or `... is not a function`**\n+\n+ AWS APIs are updating constantly, you might be calling a new API with an older version is SDK. In\n+ this case you should try installing the newest `aws-sdk`like: \n+\n+ `npm install aws-sdk@latest`\n+\n+ If you are using the SDK in AWS Lambda, you are likely using old SDK version too. AWS Lambda bundled\n+ SDK version is shown in [this document](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html).\n+ You can import the latest version of SDK by following \n+ [these guidelines](https://docs.aws.amazon.com/lambda/latest/dg/nodejs-create-deployment-pkg.html#nodejs-package-dependencies).\n+\n+ Go ahead and open an issue is you are still seeing this exception.",
"comment_created_at": "2019-06-26T00:00:43+00:00",
"comment_author": "trivikr",
"comment_body": "```suggestion\r\nIf you still like to open an issue, change [ ] to [x] below:\r\n- [ ] I've confirmed that I'm using latest version of SDK, and issue is still reproducible\r\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -34,51 +34,3 @@ console.log("Using AWS SDK version:", AWS.VERSION);
```
This documentation helps prevent compatibility issues like "Unexpected key" errors and ensures all developers understand the environment configuration requirements.
[
{
"discussion_id": "396624797",
"pr_number": 3171,
"pr_file": ".github/ISSUE_TEMPLATE/---bug-report.md",
"created_at": "2020-03-23T17:25:09+00:00",
"commented_code": "* For browsers, the SDK version number is in the script tag <pre>src=\".../aws-sdk-<b>2.466.0</b>.min.js\"</pre>\n* For Node.js, get SDK version by\n * running command `npm list aws-sdk` from your root directory\n * printing the output of `AWS.VERSION` in your code where `AWS = require(\"aws-sdk\");`\n * printing the output of `console.log(AWS.VERSION)` in your code where `AWS = require(\"aws-sdk\");`\n * if running on Lambda, Lambda runtimes can be found [here](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html)",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "396624797",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3171,
"pr_file": ".github/ISSUE_TEMPLATE/---bug-report.md",
"discussion_id": "396624797",
"commented_code": "@@ -28,7 +28,8 @@ Example: v2.466.0\n * For browsers, the SDK version number is in the script tag <pre>src=\".../aws-sdk-<b>2.466.0</b>.min.js\"</pre>\n * For Node.js, get SDK version by\n * running command `npm list aws-sdk` from your root directory\n- * printing the output of `AWS.VERSION` in your code where `AWS = require(\"aws-sdk\");`\n+ * printing the output of `console.log(AWS.VERSION)` in your code where `AWS = require(\"aws-sdk\");`\n+ * if running on Lambda, Lambda runtimes can be found [here](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html) ",
"comment_created_at": "2020-03-23T17:25:09+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "People can bundle their own AWS to the Lambda. You can ask something like `If running on Lambda and using SDK provided by Lambda runtime, you can find the SDK versions here`",
"pr_file_module": null
}
]
},
{
"discussion_id": "297439672",
"pr_number": 2742,
"pr_file": ".github/ISSUE_TEMPLATE/---known-issues.md",
"created_at": "2019-06-26T00:00:43+00:00",
"commented_code": "---\nname: \"\\U0001F6E0 Know issues\"\nabout: Read me before opening an issue!\ntitle: ''\nassignees: ''\n\n---\n\nThis is a list of know issues and how can you work around them:\n\n1. **Exceptions contains `Unexpected key...` or `... is not a function`**\n\n AWS APIs are updating constantly, you might be calling a new API with an older version is SDK. In\n this case you should try installing the newest `aws-sdk`like: \n\n `npm install aws-sdk@latest`\n\n If you are using the SDK in AWS Lambda, you are likely using old SDK version too. AWS Lambda bundled\n SDK version is shown in [this document](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html).\n You can import the latest version of SDK by following \n [these guidelines](https://docs.aws.amazon.com/lambda/latest/dg/nodejs-create-deployment-pkg.html#nodejs-package-dependencies).\n\n Go ahead and open an issue is you are still seeing this exception.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "297439672",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2742,
"pr_file": ".github/ISSUE_TEMPLATE/---known-issues.md",
"discussion_id": "297439672",
"commented_code": "@@ -0,0 +1,23 @@\n+---\n+name: \"\\U0001F6E0 Know issues\"\n+about: Read me before opening an issue!\n+title: ''\n+assignees: ''\n+\n+---\n+\n+This is a list of know issues and how can you work around them:\n+\n+1. **Exceptions contains `Unexpected key...` or `... is not a function`**\n+\n+ AWS APIs are updating constantly, you might be calling a new API with an older version is SDK. In\n+ this case you should try installing the newest `aws-sdk`like: \n+\n+ `npm install aws-sdk@latest`\n+\n+ If you are using the SDK in AWS Lambda, you are likely using old SDK version too. AWS Lambda bundled\n+ SDK version is shown in [this document](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html).\n+ You can import the latest version of SDK by following \n+ [these guidelines](https://docs.aws.amazon.com/lambda/latest/dg/nodejs-create-deployment-pkg.html#nodejs-package-dependencies).\n+\n+ Go ahead and open an issue is you are still seeing this exception.",
"comment_created_at": "2019-06-26T00:00:43+00:00",
"comment_author": "trivikr",
"comment_body": "```suggestion\r\nIf you still like to open an issue, change [ ] to [x] below:\r\n- [ ] I've confirmed that I'm using latest version of SDK, and issue is still reproducible\r\n```",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,46 @@
[
{
"discussion_id": "35271327",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"created_at": "2015-07-22T22:20:41+00:00",
"commented_code": "@lines += shapes(api, operation['input']).map {|line| \" \" + line }\n\n ## @example tag\n\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "35271327",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"discussion_id": "35271327",
"commented_code": "@@ -93,11 +93,19 @@ def initialize(operation_name, operation, api, klass)\n @lines += shapes(api, operation['input']).map {|line| \" \" + line }\n \n ## @example tag\n-\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"",
"comment_created_at": "2015-07-22T22:20:41+00:00",
"comment_author": "lsegal",
"comment_body": "I would suggest moving any hand-written examples above the generated example, since they are likely to be more correct / useful.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "35271330",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"created_at": "2015-07-22T22:20:42+00:00",
"commented_code": "@lines += shapes(api, operation['input']).map {|line| \" \" + line }\n\n ## @example tag\n\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"\n @lines << generate_example(api, klass, method_name(operation_name),\n operation['input']).split(\"\\n\").map {|line| \" \" + line }\n @lines << \"\"\n if examples\n examples.each do |example|\n @lines << \"@example Example: #{example['title']}\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "35271330",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"discussion_id": "35271330",
"commented_code": "@@ -93,11 +93,19 @@ def initialize(operation_name, operation, api, klass)\n @lines += shapes(api, operation['input']).map {|line| \" \" + line }\n \n ## @example tag\n-\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"\n @lines << generate_example(api, klass, method_name(operation_name),\n operation['input']).split(\"\\n\").map {|line| \" \" + line }\n- @lines << \"\"\n+ if examples\n+ examples.each do |example|\n+ @lines << \"@example Example: #{example['title']}\"",
"comment_created_at": "2015-07-22T22:20:42+00:00",
"comment_author": "lsegal",
"comment_body": "Example tags should not be prefixed with \"Example:\" text, let's just list the title.\n",
"pr_file_module": null
}
]
}
]

View File

@@ -27,51 +27,3 @@ For instance:
```
This approach improves readability and maintains consistent documentation style.
[
{
"discussion_id": "35271327",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"created_at": "2015-07-22T22:20:41+00:00",
"commented_code": "@lines += shapes(api, operation['input']).map {|line| \" \" + line }\n\n ## @example tag\n\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "35271327",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"discussion_id": "35271327",
"commented_code": "@@ -93,11 +93,19 @@ def initialize(operation_name, operation, api, klass)\n @lines += shapes(api, operation['input']).map {|line| \" \" + line }\n \n ## @example tag\n-\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"",
"comment_created_at": "2015-07-22T22:20:41+00:00",
"comment_author": "lsegal",
"comment_body": "I would suggest moving any hand-written examples above the generated example, since they are likely to be more correct / useful.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "35271330",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"created_at": "2015-07-22T22:20:42+00:00",
"commented_code": "@lines += shapes(api, operation['input']).map {|line| \" \" + line }\n\n ## @example tag\n\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"\n @lines << generate_example(api, klass, method_name(operation_name),\n operation['input']).split(\"\\n\").map {|line| \" \" + line }\n @lines << \"\"\n if examples\n examples.each do |example|\n @lines << \"@example Example: #{example['title']}\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "35271330",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 667,
"pr_file": "doc-src/templates/api-versions/model_documentor.rb",
"discussion_id": "35271330",
"commented_code": "@@ -93,11 +93,19 @@ def initialize(operation_name, operation, api, klass)\n @lines += shapes(api, operation['input']).map {|line| \" \" + line }\n \n ## @example tag\n-\n @lines << \"@example Calling the #{method_name(operation_name)} operation\"\n @lines << generate_example(api, klass, method_name(operation_name),\n operation['input']).split(\"\\n\").map {|line| \" \" + line }\n- @lines << \"\"\n+ if examples\n+ examples.each do |example|\n+ @lines << \"@example Example: #{example['title']}\"",
"comment_created_at": "2015-07-22T22:20:42+00:00",
"comment_author": "lsegal",
"comment_body": "Example tags should not be prefixed with \"Example:\" text, let's just list the title.\n",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,92 @@
[
{
"discussion_id": "1196576262",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"created_at": "2023-05-17T14:03:21+00:00",
"commented_code": "\"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n \"us-gov-*/iam\": \"globalGovCloud\",\n \"us-iso-*/iam\": {\n \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n \"globalEndpoint\": true,\n \"signingRegion\": \"us-east-1\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "1196576262",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"discussion_id": "1196576262",
"commented_code": "@@ -43,8 +43,13 @@\n \"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n- \"us-gov-*/iam\": \"globalGovCloud\",\n+ \"us-iso-*/iam\": {\n+ \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n+ \"globalEndpoint\": true,\n+ \"signingRegion\": \"us-east-1\"",
"comment_created_at": "2023-05-17T14:03:21+00:00",
"comment_author": "trivikr",
"comment_body": "Should this be us-iso-east-1?\r\n```suggestion\r\n \"signingRegion\": \"us-iso-east-1\"\r\n```",
"pr_file_module": null
},
{
"comment_id": "1196584529",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"discussion_id": "1196576262",
"commented_code": "@@ -43,8 +43,13 @@\n \"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n- \"us-gov-*/iam\": \"globalGovCloud\",\n+ \"us-iso-*/iam\": {\n+ \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n+ \"globalEndpoint\": true,\n+ \"signingRegion\": \"us-east-1\"",
"comment_created_at": "2023-05-17T14:09:18+00:00",
"comment_author": "trivikr",
"comment_body": "Verified that it should be `us-iso-east-1` from Endpoints 2.0 tests https://github.com/aws/aws-sdk-js-v3/blob/7ed7101dcc4e81038b6c7f581162b959e6b33a04/codegen/sdk-codegen/aws-models/iam.json#L2270-L2291",
"pr_file_module": null
},
{
"comment_id": "1198708906",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"discussion_id": "1196576262",
"commented_code": "@@ -43,8 +43,13 @@\n \"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n- \"us-gov-*/iam\": \"globalGovCloud\",\n+ \"us-iso-*/iam\": {\n+ \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n+ \"globalEndpoint\": true,\n+ \"signingRegion\": \"us-east-1\"",
"comment_created_at": "2023-05-19T08:46:01+00:00",
"comment_author": "pinak",
"comment_body": "Yes, thanks for catching that!",
"pr_file_module": null
}
]
},
{
"discussion_id": "1034017482",
"pr_number": 4288,
"pr_file": ".changes/next-release/bugfix-region-config-31f590e0.json",
"created_at": "2022-11-28T20:28:38+00:00",
"commented_code": "{\n \"type\": \"bugfix\",\n \"category\": \"region_config\",\n \"description\": \"avoid mutation in global object signatureVersion\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "1034017482",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4288,
"pr_file": ".changes/next-release/bugfix-region-config-31f590e0.json",
"discussion_id": "1034017482",
"commented_code": "@@ -0,0 +1,5 @@\n+{\n+ \"type\": \"bugfix\",\n+ \"category\": \"region_config\",\n+ \"description\": \"avoid mutation in global object signatureVersion\"",
"comment_created_at": "2022-11-28T20:28:38+00:00",
"comment_author": "trivikr",
"comment_body": "```suggestion\r\n \"description\": \"Set signatureVersion to bearer explcitly when defined in service API\"\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "608843822",
"pr_number": 3700,
"pr_file": ".changes/next-release/bugfix-ManagedUpload-2bd31158.json",
"created_at": "2021-04-07T17:14:07+00:00",
"commented_code": "{\n \"type\": \"bugfix\",\n \"category\": \"ManagedUpload\",\n \"description\": \"fix a bug that credentials refresh to frequently\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "608843822",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3700,
"pr_file": ".changes/next-release/bugfix-ManagedUpload-2bd31158.json",
"discussion_id": "608843822",
"commented_code": "@@ -0,0 +1,5 @@\n+{\n+ \"type\": \"bugfix\",\n+ \"category\": \"ManagedUpload\",\n+ \"description\": \"fix a bug that credentials refresh to frequently\"",
"comment_created_at": "2021-04-07T17:14:07+00:00",
"comment_author": "trivikr",
"comment_body": "The type already has bugfix\r\n```suggestion\r\n \"description\": \"Use resolved credentials if customer supplies configured S3 Client\"\r\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -31,97 +31,3 @@ Example:
```
For credential configurations, always use resolved credentials when available rather than relying on automatic refresh mechanisms that may cause performance issues.
[
{
"discussion_id": "1196576262",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"created_at": "2023-05-17T14:03:21+00:00",
"commented_code": "\"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n \"us-gov-*/iam\": \"globalGovCloud\",\n \"us-iso-*/iam\": {\n \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n \"globalEndpoint\": true,\n \"signingRegion\": \"us-east-1\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "1196576262",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"discussion_id": "1196576262",
"commented_code": "@@ -43,8 +43,13 @@\n \"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n- \"us-gov-*/iam\": \"globalGovCloud\",\n+ \"us-iso-*/iam\": {\n+ \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n+ \"globalEndpoint\": true,\n+ \"signingRegion\": \"us-east-1\"",
"comment_created_at": "2023-05-17T14:03:21+00:00",
"comment_author": "trivikr",
"comment_body": "Should this be us-iso-east-1?\r\n```suggestion\r\n \"signingRegion\": \"us-iso-east-1\"\r\n```",
"pr_file_module": null
},
{
"comment_id": "1196584529",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"discussion_id": "1196576262",
"commented_code": "@@ -43,8 +43,13 @@\n \"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n- \"us-gov-*/iam\": \"globalGovCloud\",\n+ \"us-iso-*/iam\": {\n+ \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n+ \"globalEndpoint\": true,\n+ \"signingRegion\": \"us-east-1\"",
"comment_created_at": "2023-05-17T14:09:18+00:00",
"comment_author": "trivikr",
"comment_body": "Verified that it should be `us-iso-east-1` from Endpoints 2.0 tests https://github.com/aws/aws-sdk-js-v3/blob/7ed7101dcc4e81038b6c7f581162b959e6b33a04/codegen/sdk-codegen/aws-models/iam.json#L2270-L2291",
"pr_file_module": null
},
{
"comment_id": "1198708906",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4422,
"pr_file": "lib/region_config_data.json",
"discussion_id": "1196576262",
"commented_code": "@@ -43,8 +43,13 @@\n \"globalEndpoint\": true,\n \"signingRegion\": \"cn-north-1\"\n },\n- \"us-gov-*/iam\": \"globalGovCloud\",\n+ \"us-iso-*/iam\": {\n+ \"endpoint\": \"{service}.us-iso-east-1.c2s.ic.gov\",\n+ \"globalEndpoint\": true,\n+ \"signingRegion\": \"us-east-1\"",
"comment_created_at": "2023-05-19T08:46:01+00:00",
"comment_author": "pinak",
"comment_body": "Yes, thanks for catching that!",
"pr_file_module": null
}
]
},
{
"discussion_id": "1034017482",
"pr_number": 4288,
"pr_file": ".changes/next-release/bugfix-region-config-31f590e0.json",
"created_at": "2022-11-28T20:28:38+00:00",
"commented_code": "{\n \"type\": \"bugfix\",\n \"category\": \"region_config\",\n \"description\": \"avoid mutation in global object signatureVersion\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "1034017482",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 4288,
"pr_file": ".changes/next-release/bugfix-region-config-31f590e0.json",
"discussion_id": "1034017482",
"commented_code": "@@ -0,0 +1,5 @@\n+{\n+ \"type\": \"bugfix\",\n+ \"category\": \"region_config\",\n+ \"description\": \"avoid mutation in global object signatureVersion\"",
"comment_created_at": "2022-11-28T20:28:38+00:00",
"comment_author": "trivikr",
"comment_body": "```suggestion\r\n \"description\": \"Set signatureVersion to bearer explcitly when defined in service API\"\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "608843822",
"pr_number": 3700,
"pr_file": ".changes/next-release/bugfix-ManagedUpload-2bd31158.json",
"created_at": "2021-04-07T17:14:07+00:00",
"commented_code": "{\n \"type\": \"bugfix\",\n \"category\": \"ManagedUpload\",\n \"description\": \"fix a bug that credentials refresh to frequently\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "608843822",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 3700,
"pr_file": ".changes/next-release/bugfix-ManagedUpload-2bd31158.json",
"discussion_id": "608843822",
"commented_code": "@@ -0,0 +1,5 @@\n+{\n+ \"type\": \"bugfix\",\n+ \"category\": \"ManagedUpload\",\n+ \"description\": \"fix a bug that credentials refresh to frequently\"",
"comment_created_at": "2021-04-07T17:14:07+00:00",
"comment_author": "trivikr",
"comment_body": "The type already has bugfix\r\n```suggestion\r\n \"description\": \"Use resolved credentials if customer supplies configured S3 Client\"\r\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,68 @@
[
{
"discussion_id": "61648554",
"pr_number": 976,
"pr_file": "test/signers/v4.spec.coffee",
"created_at": "2016-04-29T21:39:14+00:00",
"commented_code": "describe 'canonicalString', ->\n it 'sorts the search string', ->\n req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'}).search({query: 'foo', cursor: 'initial', queryOptions: '{}'}).build()\n req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'})\n .search({query: 'foo', cursor: 'initial', queryOptions: '{}'})\n .removeListener('build', AWS.CloudSearchDomain.prototype.convertGetToPost)",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "61648554",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 976,
"pr_file": "test/signers/v4.spec.coffee",
"discussion_id": "61648554",
"commented_code": "@@ -117,7 +117,10 @@ describe 'AWS.Signers.V4', ->\n \n describe 'canonicalString', ->\n it 'sorts the search string', ->\n- req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'}).search({query: 'foo', cursor: 'initial', queryOptions: '{}'}).build()\n+ req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'})\n+ .search({query: 'foo', cursor: 'initial', queryOptions: '{}'})\n+ .removeListener('build', AWS.CloudSearchDomain.prototype.convertGetToPost)",
"comment_created_at": "2016-04-29T21:39:14+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "This test is testing whether the query string is alphabetically sorted by its field names. The listener that I'm removing here previously did not exist, as it was added as part of this PR, so I'm not removing something that used to be there. Since the listener I added in this PR changes the GET request to a POST, there is no longer a query string, and the test cannot check if it's alphabetically sorted. Therefore, I removed the listener so that just for the purpose of this test, the request is still a GET and has a query string. The other two options I have if I don't remove this listener is to switch the test to use a different operation (I could use `suggest` instead of `search`) or to remove this test altogether.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "92718343",
"pr_number": 1268,
"pr_file": "test/dynamodb/document_client.coffee",
"created_at": "2016-12-15T22:53:42+00:00",
"commented_code": "foo: S: 'bar'\n expect(translateInput(input)).to.eql(params)\n\n it 'translates empty strings', ->",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "92718343",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1268,
"pr_file": "test/dynamodb/document_client.coffee",
"discussion_id": "92718343",
"commented_code": "@@ -52,6 +52,39 @@ describe 'AWS.DynamoDB.DocumentClient', ->\n foo: S: 'bar'\n expect(translateInput(input)).to.eql(params)\n \n+ it 'translates empty strings', ->",
"comment_created_at": "2016-12-15T22:53:42+00:00",
"comment_author": "chrisradek",
"comment_body": "For all these tests where you're making sure we remove empty inputs, can you also add tests to verify that we don't translate empty strings/sets/buffers if `convertEmptyValues` isn't set?",
"pr_file_module": null
}
]
},
{
"discussion_id": "145843268",
"pr_number": 1765,
"pr_file": "features/directconnect/directconnect.feature",
"created_at": "2017-10-19T22:48:11+00:00",
"commented_code": "I want to use AWS Direct Connect\n\n Scenario: Managing connections\n Given I create a Direct Connect connection with name prefix \"aws-sdk-js\"\n Then I should get a Direct Connect connection ID\n And I describe the connection\n Then the bandwidth should match the connection bandwidth\n And I delete the Direct Connect connection\n Scenario: describe connections\n When I describe the connection\n Then I should get response of type \"Array\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "145843268",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1765,
"pr_file": "features/directconnect/directconnect.feature",
"discussion_id": "145843268",
"commented_code": "@@ -4,12 +4,9 @@ Feature: AWS Direct Connect\n \n I want to use AWS Direct Connect\n \n- Scenario: Managing connections\n- Given I create a Direct Connect connection with name prefix \"aws-sdk-js\"\n- Then I should get a Direct Connect connection ID\n- And I describe the connection\n- Then the bandwidth should match the connection bandwidth\n- And I delete the Direct Connect connection\n+ Scenario: describe connections\n+ When I describe the connection\n+ Then I should get response of type \"Array\"",
"comment_created_at": "2017-10-19T22:48:11+00:00",
"comment_author": "chrisradek",
"comment_body": "We have a standard way of testing list/describe operations in most of our feature tests that look like this:\r\nhttps://github.com/aws/aws-sdk-js/blob/v2.135.0/features/acm/acm.feature#L7-L10\r\n\r\nIf you follow this patten, you don't have to create your own step definitions, since cucumber will use the ones defined here:\r\nhttps://github.com/aws/aws-sdk-js/blob/v2.135.0/features/extra/hooks.js#L56",
"pr_file_module": null
}
]
}
]

View File

@@ -38,73 +38,3 @@ Scenario: Managing connections
it('translates empty strings when convertEmptyValues is true', -> ...)
it('does not translate empty strings when convertEmptyValues is false', -> ...)
```
[
{
"discussion_id": "61648554",
"pr_number": 976,
"pr_file": "test/signers/v4.spec.coffee",
"created_at": "2016-04-29T21:39:14+00:00",
"commented_code": "describe 'canonicalString', ->\n it 'sorts the search string', ->\n req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'}).search({query: 'foo', cursor: 'initial', queryOptions: '{}'}).build()\n req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'})\n .search({query: 'foo', cursor: 'initial', queryOptions: '{}'})\n .removeListener('build', AWS.CloudSearchDomain.prototype.convertGetToPost)",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "61648554",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 976,
"pr_file": "test/signers/v4.spec.coffee",
"discussion_id": "61648554",
"commented_code": "@@ -117,7 +117,10 @@ describe 'AWS.Signers.V4', ->\n \n describe 'canonicalString', ->\n it 'sorts the search string', ->\n- req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'}).search({query: 'foo', cursor: 'initial', queryOptions: '{}'}).build()\n+ req = new AWS.CloudSearchDomain({endpoint: 'host.domain.com'})\n+ .search({query: 'foo', cursor: 'initial', queryOptions: '{}'})\n+ .removeListener('build', AWS.CloudSearchDomain.prototype.convertGetToPost)",
"comment_created_at": "2016-04-29T21:39:14+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "This test is testing whether the query string is alphabetically sorted by its field names. The listener that I'm removing here previously did not exist, as it was added as part of this PR, so I'm not removing something that used to be there. Since the listener I added in this PR changes the GET request to a POST, there is no longer a query string, and the test cannot check if it's alphabetically sorted. Therefore, I removed the listener so that just for the purpose of this test, the request is still a GET and has a query string. The other two options I have if I don't remove this listener is to switch the test to use a different operation (I could use `suggest` instead of `search`) or to remove this test altogether.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "92718343",
"pr_number": 1268,
"pr_file": "test/dynamodb/document_client.coffee",
"created_at": "2016-12-15T22:53:42+00:00",
"commented_code": "foo: S: 'bar'\n expect(translateInput(input)).to.eql(params)\n\n it 'translates empty strings', ->",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "92718343",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1268,
"pr_file": "test/dynamodb/document_client.coffee",
"discussion_id": "92718343",
"commented_code": "@@ -52,6 +52,39 @@ describe 'AWS.DynamoDB.DocumentClient', ->\n foo: S: 'bar'\n expect(translateInput(input)).to.eql(params)\n \n+ it 'translates empty strings', ->",
"comment_created_at": "2016-12-15T22:53:42+00:00",
"comment_author": "chrisradek",
"comment_body": "For all these tests where you're making sure we remove empty inputs, can you also add tests to verify that we don't translate empty strings/sets/buffers if `convertEmptyValues` isn't set?",
"pr_file_module": null
}
]
},
{
"discussion_id": "145843268",
"pr_number": 1765,
"pr_file": "features/directconnect/directconnect.feature",
"created_at": "2017-10-19T22:48:11+00:00",
"commented_code": "I want to use AWS Direct Connect\n\n Scenario: Managing connections\n Given I create a Direct Connect connection with name prefix \"aws-sdk-js\"\n Then I should get a Direct Connect connection ID\n And I describe the connection\n Then the bandwidth should match the connection bandwidth\n And I delete the Direct Connect connection\n Scenario: describe connections\n When I describe the connection\n Then I should get response of type \"Array\"",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "145843268",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1765,
"pr_file": "features/directconnect/directconnect.feature",
"discussion_id": "145843268",
"commented_code": "@@ -4,12 +4,9 @@ Feature: AWS Direct Connect\n \n I want to use AWS Direct Connect\n \n- Scenario: Managing connections\n- Given I create a Direct Connect connection with name prefix \"aws-sdk-js\"\n- Then I should get a Direct Connect connection ID\n- And I describe the connection\n- Then the bandwidth should match the connection bandwidth\n- And I delete the Direct Connect connection\n+ Scenario: describe connections\n+ When I describe the connection\n+ Then I should get response of type \"Array\"",
"comment_created_at": "2017-10-19T22:48:11+00:00",
"comment_author": "chrisradek",
"comment_body": "We have a standard way of testing list/describe operations in most of our feature tests that look like this:\r\nhttps://github.com/aws/aws-sdk-js/blob/v2.135.0/features/acm/acm.feature#L7-L10\r\n\r\nIf you follow this patten, you don't have to create your own step definitions, since cucumber will use the ones defined here:\r\nhttps://github.com/aws/aws-sdk-js/blob/v2.135.0/features/extra/hooks.js#L56",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,36 @@
[
{
"discussion_id": "274981081",
"pr_number": 2622,
"pr_file": "lib/model/shape.js",
"created_at": "2019-04-12T16:35:33+00:00",
"commented_code": "function BinaryShape() {\n Shape.apply(this, arguments);\n this.toType = util.base64.decode;\n this.toType = function(value) {\n var buf = util.base64.decode(value);\n if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "274981081",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2622,
"pr_file": "lib/model/shape.js",
"discussion_id": "274981081",
"commented_code": "@@ -354,7 +354,16 @@ function IntegerShape() {\n \n function BinaryShape() {\n Shape.apply(this, arguments);\n- this.toType = util.base64.decode;\n+ this.toType = function(value) {\n+ var buf = util.base64.decode(value);\n+ if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') {",
"comment_created_at": "2019-04-12T16:35:33+00:00",
"comment_author": "seebees",
"comment_body": "Maybe a comment to explain why. Word smith to your liking :)\r\n\r\n```javascript\r\n /* Node.js can create a Buffer that is not isolated.\r\n * i.e. buf.byteLength !== buf.buffer.byteLength\r\n * This means that the sensitive data is accessible to anyone with access to buf.buffer.\r\n * If this is the node shared Buffer, then other code within this process _could_ find this secret.\r\n * Copy sensitive data to an isolated Buffer and zero the sensitive data.\r\n * While this is safe to do here, copying this code somewhere else may produce unexpected results.\r\n */\r\n```\r\n\r\nAdditionally, why not check `buf.byteLength !== buf.buffer.byteLength`? And then handle the solution in node or the browser?",
"pr_file_module": null
},
{
"comment_id": "275458536",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2622,
"pr_file": "lib/model/shape.js",
"discussion_id": "274981081",
"commented_code": "@@ -354,7 +354,16 @@ function IntegerShape() {\n \n function BinaryShape() {\n Shape.apply(this, arguments);\n- this.toType = util.base64.decode;\n+ this.toType = function(value) {\n+ var buf = util.base64.decode(value);\n+ if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') {",
"comment_created_at": "2019-04-15T17:08:02+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Thanks for the information, I will update the comment. \r\n> why not check buf.byteLength !== buf.buffer.byteLength?\r\n\r\nI'm not sure whether `buf.buffer` would be undefined in browser polyfill, it's just safer this way. And it's not clear in Node doc on when the `buf.buffer` is added. In very old Node, it should be `buf.parent`. Checking availability of `alloc` seems clearer and safer because we will use this API to locate the buffer anyway.",
"pr_file_module": null
}
]
}
]

View File

@@ -40,41 +40,3 @@ function handleSensitiveData(base64Value) {
```
Always consider whether your data is sensitive and might require this additional protection. This pattern is particularly important when handling authentication tokens, encryption keys, and other credentials.
[
{
"discussion_id": "274981081",
"pr_number": 2622,
"pr_file": "lib/model/shape.js",
"created_at": "2019-04-12T16:35:33+00:00",
"commented_code": "function BinaryShape() {\n Shape.apply(this, arguments);\n this.toType = util.base64.decode;\n this.toType = function(value) {\n var buf = util.base64.decode(value);\n if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "274981081",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2622,
"pr_file": "lib/model/shape.js",
"discussion_id": "274981081",
"commented_code": "@@ -354,7 +354,16 @@ function IntegerShape() {\n \n function BinaryShape() {\n Shape.apply(this, arguments);\n- this.toType = util.base64.decode;\n+ this.toType = function(value) {\n+ var buf = util.base64.decode(value);\n+ if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') {",
"comment_created_at": "2019-04-12T16:35:33+00:00",
"comment_author": "seebees",
"comment_body": "Maybe a comment to explain why. Word smith to your liking :)\r\n\r\n```javascript\r\n /* Node.js can create a Buffer that is not isolated.\r\n * i.e. buf.byteLength !== buf.buffer.byteLength\r\n * This means that the sensitive data is accessible to anyone with access to buf.buffer.\r\n * If this is the node shared Buffer, then other code within this process _could_ find this secret.\r\n * Copy sensitive data to an isolated Buffer and zero the sensitive data.\r\n * While this is safe to do here, copying this code somewhere else may produce unexpected results.\r\n */\r\n```\r\n\r\nAdditionally, why not check `buf.byteLength !== buf.buffer.byteLength`? And then handle the solution in node or the browser?",
"pr_file_module": null
},
{
"comment_id": "275458536",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2622,
"pr_file": "lib/model/shape.js",
"discussion_id": "274981081",
"commented_code": "@@ -354,7 +354,16 @@ function IntegerShape() {\n \n function BinaryShape() {\n Shape.apply(this, arguments);\n- this.toType = util.base64.decode;\n+ this.toType = function(value) {\n+ var buf = util.base64.decode(value);\n+ if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') {",
"comment_created_at": "2019-04-15T17:08:02+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Thanks for the information, I will update the comment. \r\n> why not check buf.byteLength !== buf.buffer.byteLength?\r\n\r\nI'm not sure whether `buf.buffer` would be undefined in browser polyfill, it's just safer this way. And it's not clear in Node doc on when the `buf.buffer` is added. In very old Node, it should be `buf.parent`. Checking availability of `alloc` seems clearer and safer because we will use this API to locate the buffer anyway.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,80 @@
[
{
"discussion_id": "70883152",
"pr_number": 1054,
"pr_file": "lib/signers/v4.js",
"created_at": "2016-07-14T21:02:02+00:00",
"commented_code": "return AWS.util.crypto.hmac(kCredentials, this.stringToSign(datetime), 'hex');\n }\n\n cachedSecret[this.serviceName] = {\n cachedSecret[cacheIdentifier] = {\n region: this.request.region, date: date,\n key: kCredentials, akid: credentials.accessKeyId\n };",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "70883152",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1054,
"pr_file": "lib/signers/v4.js",
"discussion_id": "70883152",
"commented_code": "@@ -117,13 +118,13 @@ AWS.Signers.V4 = inherit(AWS.Signers.RequestSigner, {\n return AWS.util.crypto.hmac(kCredentials, this.stringToSign(datetime), 'hex');\n }\n \n- cachedSecret[this.serviceName] = {\n+ cachedSecret[cacheIdentifier] = {\n region: this.request.region, date: date,\n key: kCredentials, akid: credentials.accessKeyId\n };",
"comment_created_at": "2016-07-14T21:02:02+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "We should put a maximum size constraint on `cachedSecret`. Before, there was only one cache per service (and only the last signature key for that service is kept), so the cache was necessarily bounded by the number of services. There is no upper bound on the number of service clients generated. Some applications could be generating a new service client for every request. The service client objects get garbage-collected eventually, but the cache doesn't and could grow to millions of objects fairly quickly and hog a lot of memory. Perhaps we could have a `cacheIdentifierQueue` of some sort so that when it's length reaches a certain number, we can get the oldest `cacheIdentifier` and delete it from `cachedSecret`.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "101653214",
"pr_number": 1350,
"pr_file": "lib/signers/v4_credentials.js",
"created_at": "2017-02-16T23:25:57+00:00",
"commented_code": "var AWS = require('../core');\n\n/**\n * @api private\n */\nvar cachedSecret = {};\n\n/**\n * @api private\n */\nvar cacheQueue = [];\n\n/**\n * @api private\n */\nvar maxCacheEntries = 50;\n\n/**\n * @api private\n */\nvar v4Identifier = 'aws4_request';\n\nAWS.Signers.v4_credentials = {\n /**\n * @api private\n *\n * @param date [String]\n * @param region [String]\n * @param serviceName [String]\n * @returns {string}\n */\n createScope: function createScope(date, region, serviceName) {\n return [\n date.substr(0, 8),\n region,\n serviceName,\n v4Identifier\n ].join('/');\n },\n\n /**\n * @api private\n *\n * @param credentials [Credentials]\n * @param date [String]\n * @param region [String]\n * @param service [String]\n * @param shouldCache [Boolean]\n * @returns {String}\n */\n getSigningKey: function getSigningKey(\n credentials,\n date,\n region,\n service,\n shouldCache\n ) {\n var cacheKey = [credentials.accessKeyId, date, region, service].join('_');",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "101653214",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1350,
"pr_file": "lib/signers/v4_credentials.js",
"discussion_id": "101653214",
"commented_code": "@@ -0,0 +1,95 @@\n+var AWS = require('../core');\n+\n+/**\n+ * @api private\n+ */\n+var cachedSecret = {};\n+\n+/**\n+ * @api private\n+ */\n+var cacheQueue = [];\n+\n+/**\n+ * @api private\n+ */\n+var maxCacheEntries = 50;\n+\n+/**\n+ * @api private\n+ */\n+var v4Identifier = 'aws4_request';\n+\n+AWS.Signers.v4_credentials = {\n+ /**\n+ * @api private\n+ *\n+ * @param date [String]\n+ * @param region [String]\n+ * @param serviceName [String]\n+ * @returns {string}\n+ */\n+ createScope: function createScope(date, region, serviceName) {\n+ return [\n+ date.substr(0, 8),\n+ region,\n+ serviceName,\n+ v4Identifier\n+ ].join('/');\n+ },\n+\n+ /**\n+ * @api private\n+ *\n+ * @param credentials [Credentials]\n+ * @param date [String]\n+ * @param region [String]\n+ * @param service [String]\n+ * @param shouldCache [Boolean]\n+ * @returns {String}\n+ */\n+ getSigningKey: function getSigningKey(\n+ credentials,\n+ date,\n+ region,\n+ service,\n+ shouldCache\n+ ) {\n+ var cacheKey = [credentials.accessKeyId, date, region, service].join('_');",
"comment_created_at": "2017-02-16T23:25:57+00:00",
"comment_author": "chrisradek",
"comment_body": "We had 2 issues reported because this cache key wasn't unique enough:\r\nhttps://github.com/aws/aws-sdk-js/pull/1054\r\n\r\nYou may need to pass the serviceClientId as well.\r\n",
"pr_file_module": null
},
{
"comment_id": "101667655",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1350,
"pr_file": "lib/signers/v4_credentials.js",
"discussion_id": "101653214",
"commented_code": "@@ -0,0 +1,95 @@\n+var AWS = require('../core');\n+\n+/**\n+ * @api private\n+ */\n+var cachedSecret = {};\n+\n+/**\n+ * @api private\n+ */\n+var cacheQueue = [];\n+\n+/**\n+ * @api private\n+ */\n+var maxCacheEntries = 50;\n+\n+/**\n+ * @api private\n+ */\n+var v4Identifier = 'aws4_request';\n+\n+AWS.Signers.v4_credentials = {\n+ /**\n+ * @api private\n+ *\n+ * @param date [String]\n+ * @param region [String]\n+ * @param serviceName [String]\n+ * @returns {string}\n+ */\n+ createScope: function createScope(date, region, serviceName) {\n+ return [\n+ date.substr(0, 8),\n+ region,\n+ serviceName,\n+ v4Identifier\n+ ].join('/');\n+ },\n+\n+ /**\n+ * @api private\n+ *\n+ * @param credentials [Credentials]\n+ * @param date [String]\n+ * @param region [String]\n+ * @param service [String]\n+ * @param shouldCache [Boolean]\n+ * @returns {String}\n+ */\n+ getSigningKey: function getSigningKey(\n+ credentials,\n+ date,\n+ region,\n+ service,\n+ shouldCache\n+ ) {\n+ var cacheKey = [credentials.accessKeyId, date, region, service].join('_');",
"comment_created_at": "2017-02-17T01:16:05+00:00",
"comment_author": "jeskew",
"comment_body": "The serviceClientId is orthogonal to the uniqueness of this cache key, but I see your point. I'll use an HMAC of the AKID and secret key.",
"pr_file_module": null
}
]
},
{
"discussion_id": "229471764",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"created_at": "2018-10-30T20:23:49+00:00",
"commented_code": "var AWS = require('./core');\nvar util = require('./util');\nvar ENDPOINT_OPERATION_MAX_RETRIES = 60;\nvar endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n\n/**\n * Generate key to index the endpoints in the cache\n * @return [map<String,String>] object with keys to index endpoints.\n * @api private\n */\nfunction getCacheKey(request) {\n var service = request.service;\n var api = service.api || {};\n var operations = api.operations;\n var identifiers = {};\n if (operations[request.operation] && operations[request.operation].name) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229471764",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"discussion_id": "229471764",
"commented_code": "@@ -0,0 +1,340 @@\n+var AWS = require('./core');\n+var util = require('./util');\n+var ENDPOINT_OPERATION_MAX_RETRIES = 60;\n+var endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n+\n+/**\n+ * Generate key to index the endpoints in the cache\n+ * @return [map<String,String>] object with keys to index endpoints.\n+ * @api private\n+ */\n+function getCacheKey(request) {\n+ var service = request.service;\n+ var api = service.api || {};\n+ var operations = api.operations;\n+ var identifiers = {};\n+ if (operations[request.operation] && operations[request.operation].name) {",
"comment_created_at": "2018-10-30T20:23:49+00:00",
"comment_author": "chrisradek",
"comment_body": "I think operation is only required if custom identifiers are defined for an operation. Is that not the case? Might help to keep the size of your cache down if we omitted operation if it isn't needed.",
"pr_file_module": null
}
]
}
]

View File

@@ -49,85 +49,3 @@ function getCacheKey(request) {
```
This approach prevents memory issues in applications that might generate many cache entries over time, while maintaining the performance benefits of caching.
[
{
"discussion_id": "70883152",
"pr_number": 1054,
"pr_file": "lib/signers/v4.js",
"created_at": "2016-07-14T21:02:02+00:00",
"commented_code": "return AWS.util.crypto.hmac(kCredentials, this.stringToSign(datetime), 'hex');\n }\n\n cachedSecret[this.serviceName] = {\n cachedSecret[cacheIdentifier] = {\n region: this.request.region, date: date,\n key: kCredentials, akid: credentials.accessKeyId\n };",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "70883152",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1054,
"pr_file": "lib/signers/v4.js",
"discussion_id": "70883152",
"commented_code": "@@ -117,13 +118,13 @@ AWS.Signers.V4 = inherit(AWS.Signers.RequestSigner, {\n return AWS.util.crypto.hmac(kCredentials, this.stringToSign(datetime), 'hex');\n }\n \n- cachedSecret[this.serviceName] = {\n+ cachedSecret[cacheIdentifier] = {\n region: this.request.region, date: date,\n key: kCredentials, akid: credentials.accessKeyId\n };",
"comment_created_at": "2016-07-14T21:02:02+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "We should put a maximum size constraint on `cachedSecret`. Before, there was only one cache per service (and only the last signature key for that service is kept), so the cache was necessarily bounded by the number of services. There is no upper bound on the number of service clients generated. Some applications could be generating a new service client for every request. The service client objects get garbage-collected eventually, but the cache doesn't and could grow to millions of objects fairly quickly and hog a lot of memory. Perhaps we could have a `cacheIdentifierQueue` of some sort so that when it's length reaches a certain number, we can get the oldest `cacheIdentifier` and delete it from `cachedSecret`.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "101653214",
"pr_number": 1350,
"pr_file": "lib/signers/v4_credentials.js",
"created_at": "2017-02-16T23:25:57+00:00",
"commented_code": "var AWS = require('../core');\n\n/**\n * @api private\n */\nvar cachedSecret = {};\n\n/**\n * @api private\n */\nvar cacheQueue = [];\n\n/**\n * @api private\n */\nvar maxCacheEntries = 50;\n\n/**\n * @api private\n */\nvar v4Identifier = 'aws4_request';\n\nAWS.Signers.v4_credentials = {\n /**\n * @api private\n *\n * @param date [String]\n * @param region [String]\n * @param serviceName [String]\n * @returns {string}\n */\n createScope: function createScope(date, region, serviceName) {\n return [\n date.substr(0, 8),\n region,\n serviceName,\n v4Identifier\n ].join('/');\n },\n\n /**\n * @api private\n *\n * @param credentials [Credentials]\n * @param date [String]\n * @param region [String]\n * @param service [String]\n * @param shouldCache [Boolean]\n * @returns {String}\n */\n getSigningKey: function getSigningKey(\n credentials,\n date,\n region,\n service,\n shouldCache\n ) {\n var cacheKey = [credentials.accessKeyId, date, region, service].join('_');",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "101653214",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1350,
"pr_file": "lib/signers/v4_credentials.js",
"discussion_id": "101653214",
"commented_code": "@@ -0,0 +1,95 @@\n+var AWS = require('../core');\n+\n+/**\n+ * @api private\n+ */\n+var cachedSecret = {};\n+\n+/**\n+ * @api private\n+ */\n+var cacheQueue = [];\n+\n+/**\n+ * @api private\n+ */\n+var maxCacheEntries = 50;\n+\n+/**\n+ * @api private\n+ */\n+var v4Identifier = 'aws4_request';\n+\n+AWS.Signers.v4_credentials = {\n+ /**\n+ * @api private\n+ *\n+ * @param date [String]\n+ * @param region [String]\n+ * @param serviceName [String]\n+ * @returns {string}\n+ */\n+ createScope: function createScope(date, region, serviceName) {\n+ return [\n+ date.substr(0, 8),\n+ region,\n+ serviceName,\n+ v4Identifier\n+ ].join('/');\n+ },\n+\n+ /**\n+ * @api private\n+ *\n+ * @param credentials [Credentials]\n+ * @param date [String]\n+ * @param region [String]\n+ * @param service [String]\n+ * @param shouldCache [Boolean]\n+ * @returns {String}\n+ */\n+ getSigningKey: function getSigningKey(\n+ credentials,\n+ date,\n+ region,\n+ service,\n+ shouldCache\n+ ) {\n+ var cacheKey = [credentials.accessKeyId, date, region, service].join('_');",
"comment_created_at": "2017-02-16T23:25:57+00:00",
"comment_author": "chrisradek",
"comment_body": "We had 2 issues reported because this cache key wasn't unique enough:\r\nhttps://github.com/aws/aws-sdk-js/pull/1054\r\n\r\nYou may need to pass the serviceClientId as well.\r\n",
"pr_file_module": null
},
{
"comment_id": "101667655",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1350,
"pr_file": "lib/signers/v4_credentials.js",
"discussion_id": "101653214",
"commented_code": "@@ -0,0 +1,95 @@\n+var AWS = require('../core');\n+\n+/**\n+ * @api private\n+ */\n+var cachedSecret = {};\n+\n+/**\n+ * @api private\n+ */\n+var cacheQueue = [];\n+\n+/**\n+ * @api private\n+ */\n+var maxCacheEntries = 50;\n+\n+/**\n+ * @api private\n+ */\n+var v4Identifier = 'aws4_request';\n+\n+AWS.Signers.v4_credentials = {\n+ /**\n+ * @api private\n+ *\n+ * @param date [String]\n+ * @param region [String]\n+ * @param serviceName [String]\n+ * @returns {string}\n+ */\n+ createScope: function createScope(date, region, serviceName) {\n+ return [\n+ date.substr(0, 8),\n+ region,\n+ serviceName,\n+ v4Identifier\n+ ].join('/');\n+ },\n+\n+ /**\n+ * @api private\n+ *\n+ * @param credentials [Credentials]\n+ * @param date [String]\n+ * @param region [String]\n+ * @param service [String]\n+ * @param shouldCache [Boolean]\n+ * @returns {String}\n+ */\n+ getSigningKey: function getSigningKey(\n+ credentials,\n+ date,\n+ region,\n+ service,\n+ shouldCache\n+ ) {\n+ var cacheKey = [credentials.accessKeyId, date, region, service].join('_');",
"comment_created_at": "2017-02-17T01:16:05+00:00",
"comment_author": "jeskew",
"comment_body": "The serviceClientId is orthogonal to the uniqueness of this cache key, but I see your point. I'll use an HMAC of the AKID and secret key.",
"pr_file_module": null
}
]
},
{
"discussion_id": "229471764",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"created_at": "2018-10-30T20:23:49+00:00",
"commented_code": "var AWS = require('./core');\nvar util = require('./util');\nvar ENDPOINT_OPERATION_MAX_RETRIES = 60;\nvar endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n\n/**\n * Generate key to index the endpoints in the cache\n * @return [map<String,String>] object with keys to index endpoints.\n * @api private\n */\nfunction getCacheKey(request) {\n var service = request.service;\n var api = service.api || {};\n var operations = api.operations;\n var identifiers = {};\n if (operations[request.operation] && operations[request.operation].name) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "229471764",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2253,
"pr_file": "lib/discover_endpoint.js",
"discussion_id": "229471764",
"commented_code": "@@ -0,0 +1,340 @@\n+var AWS = require('./core');\n+var util = require('./util');\n+var ENDPOINT_OPERATION_MAX_RETRIES = 60;\n+var endpointDiscoveryEnabledEnv = 'AWS_ENABLE_ENDPOINT_DISCOVERY';\n+\n+/**\n+ * Generate key to index the endpoints in the cache\n+ * @return [map<String,String>] object with keys to index endpoints.\n+ * @api private\n+ */\n+function getCacheKey(request) {\n+ var service = request.service;\n+ var api = service.api || {};\n+ var operations = api.operations;\n+ var identifiers = {};\n+ if (operations[request.operation] && operations[request.operation].name) {",
"comment_created_at": "2018-10-30T20:23:49+00:00",
"comment_author": "chrisradek",
"comment_body": "I think operation is only required if custom identifiers are defined for an operation. Is that not the case? Might help to keep the size of your cache down if we omitted operation if it isn't needed.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,90 @@
[
{
"discussion_id": "131797509",
"pr_number": 1662,
"pr_file": "scripts/lib/ts-generator.js",
"created_at": "2017-08-08T00:36:36+00:00",
"commented_code": "code += tabs(tabCount) + 'export type ' + shapeKey + ' = Date;\\n';\n } else if (type === 'boolean') {\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = boolean;\\n';\n } else if (type === 'blob' || type === 'binary') {\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string;\\n';\n } else if (type === 'blob' || type === 'binary') { \n code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string'\n + self.addReadableType(shapeKey)",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "131797509",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1662,
"pr_file": "scripts/lib/ts-generator.js",
"discussion_id": "131797509",
"commented_code": "@@ -300,8 +333,10 @@ TSGenerator.prototype.generateTypingsFromShape = function generateTypingsFromSha\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = Date;\\n';\n } else if (type === 'boolean') {\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = boolean;\\n';\n- } else if (type === 'blob' || type === 'binary') {\n- code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string;\\n';\n+ } else if (type === 'blob' || type === 'binary') { \n+ code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string'\n+ + self.addReadableType(shapeKey)",
"comment_created_at": "2017-08-08T00:36:36+00:00",
"comment_author": "jeskew",
"comment_body": "Style nit: continuation lines (lines that are part of the same statement as the preceding line) should be indented.",
"pr_file_module": null
}
]
},
{
"discussion_id": "138405077",
"pr_number": 1716,
"pr_file": "test/service.spec.js",
"created_at": "2017-09-12T16:46:44+00:00",
"commented_code": "});\n });\n });\n return describe('customizeRequests', function() {\n describe('customizeRequests', function() {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "138405077",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1716,
"pr_file": "test/service.spec.js",
"discussion_id": "138405077",
"commented_code": "@@ -718,7 +728,7 @@\n });\n });\n });\n- return describe('customizeRequests', function() {\n+ describe('customizeRequests', function() {",
"comment_created_at": "2017-09-12T16:46:44+00:00",
"comment_author": "jeskew",
"comment_body": "Ditto re: empty lines. One should be between each test suite, too.",
"pr_file_module": null
}
]
},
{
"discussion_id": "147299241",
"pr_number": 1782,
"pr_file": "test/event_listeners.spec.js",
"created_at": "2017-10-27T00:17:07+00:00",
"commented_code": "return expect(data).to.match(match);\n });\n });\n describe('logging sensitive information', function() {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "147299241",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1782,
"pr_file": "test/event_listeners.spec.js",
"discussion_id": "147299241",
"commented_code": "@@ -801,6 +802,156 @@\n return expect(data).to.match(match);\n });\n });\n+ describe('logging sensitive information', function() {",
"comment_created_at": "2017-10-27T00:17:07+00:00",
"comment_author": "jeskew",
"comment_body": "Make sure to keep tests and suites separated by empty lines.",
"pr_file_module": null
}
]
},
{
"discussion_id": "193825440",
"pr_number": 2074,
"pr_file": "lib/event_listeners.js",
"created_at": "2018-06-07T17:16:08+00:00",
"commented_code": "});\n\n httpResp.on('end', function onEnd() {\n if (!stream || !stream.didCallback) {\n if ((AWS.HttpClient.streamsApiVersion !== 2) ||\n // don't concat response chunks when using event streams with streams unless response is unsuccessful\n (!(operation.hasEventOutput && service.successfulResponse(resp)) &&\n (!stream || !stream.didCallback))",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "193825440",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2074,
"pr_file": "lib/event_listeners.js",
"discussion_id": "193825440",
"commented_code": "@@ -264,7 +277,11 @@ AWS.EventListeners = {\n });\n \n httpResp.on('end', function onEnd() {\n- if (!stream || !stream.didCallback) {\n+ if ((AWS.HttpClient.streamsApiVersion !== 2) ||\n+ // don't concat response chunks when using event streams with streams unless response is unsuccessful\n+ (!(operation.hasEventOutput && service.successfulResponse(resp)) &&\n+ (!stream || !stream.didCallback))",
"comment_created_at": "2018-06-07T17:16:08+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "There's no issue here but a little hard to follow. Will using another if statement be more readable? like this:\r\n\r\n```javascript\r\nif (!stream || !stream.didCallback) {\r\n//don't concat response chunks when using event streams unless response is unsuccessful\r\n if ((AWS.HttpClient.streamsApiVersion === 2) && operation.hasEventOutput && service.successfulResponse(resp) ) {\r\n return\r\n }\r\n resp.request.emit('httpDone');\r\n done();\r\n}\r\n```\r\nI think it's more readable(?) It's your call.",
"pr_file_module": null
}
]
}
]

View File

@@ -38,95 +38,3 @@ if (!stream || !stream.didCallback) {
```
Consistent formatting makes code easier to scan, understand, and maintain, reducing the cognitive load for everyone working with the codebase.
[
{
"discussion_id": "131797509",
"pr_number": 1662,
"pr_file": "scripts/lib/ts-generator.js",
"created_at": "2017-08-08T00:36:36+00:00",
"commented_code": "code += tabs(tabCount) + 'export type ' + shapeKey + ' = Date;\\n';\n } else if (type === 'boolean') {\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = boolean;\\n';\n } else if (type === 'blob' || type === 'binary') {\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string;\\n';\n } else if (type === 'blob' || type === 'binary') { \n code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string'\n + self.addReadableType(shapeKey)",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "131797509",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1662,
"pr_file": "scripts/lib/ts-generator.js",
"discussion_id": "131797509",
"commented_code": "@@ -300,8 +333,10 @@ TSGenerator.prototype.generateTypingsFromShape = function generateTypingsFromSha\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = Date;\\n';\n } else if (type === 'boolean') {\n code += tabs(tabCount) + 'export type ' + shapeKey + ' = boolean;\\n';\n- } else if (type === 'blob' || type === 'binary') {\n- code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string;\\n';\n+ } else if (type === 'blob' || type === 'binary') { \n+ code += tabs(tabCount) + 'export type ' + shapeKey + ' = Buffer|Uint8Array|Blob|string'\n+ + self.addReadableType(shapeKey)",
"comment_created_at": "2017-08-08T00:36:36+00:00",
"comment_author": "jeskew",
"comment_body": "Style nit: continuation lines (lines that are part of the same statement as the preceding line) should be indented.",
"pr_file_module": null
}
]
},
{
"discussion_id": "138405077",
"pr_number": 1716,
"pr_file": "test/service.spec.js",
"created_at": "2017-09-12T16:46:44+00:00",
"commented_code": "});\n });\n });\n return describe('customizeRequests', function() {\n describe('customizeRequests', function() {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "138405077",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1716,
"pr_file": "test/service.spec.js",
"discussion_id": "138405077",
"commented_code": "@@ -718,7 +728,7 @@\n });\n });\n });\n- return describe('customizeRequests', function() {\n+ describe('customizeRequests', function() {",
"comment_created_at": "2017-09-12T16:46:44+00:00",
"comment_author": "jeskew",
"comment_body": "Ditto re: empty lines. One should be between each test suite, too.",
"pr_file_module": null
}
]
},
{
"discussion_id": "147299241",
"pr_number": 1782,
"pr_file": "test/event_listeners.spec.js",
"created_at": "2017-10-27T00:17:07+00:00",
"commented_code": "return expect(data).to.match(match);\n });\n });\n describe('logging sensitive information', function() {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "147299241",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1782,
"pr_file": "test/event_listeners.spec.js",
"discussion_id": "147299241",
"commented_code": "@@ -801,6 +802,156 @@\n return expect(data).to.match(match);\n });\n });\n+ describe('logging sensitive information', function() {",
"comment_created_at": "2017-10-27T00:17:07+00:00",
"comment_author": "jeskew",
"comment_body": "Make sure to keep tests and suites separated by empty lines.",
"pr_file_module": null
}
]
},
{
"discussion_id": "193825440",
"pr_number": 2074,
"pr_file": "lib/event_listeners.js",
"created_at": "2018-06-07T17:16:08+00:00",
"commented_code": "});\n\n httpResp.on('end', function onEnd() {\n if (!stream || !stream.didCallback) {\n if ((AWS.HttpClient.streamsApiVersion !== 2) ||\n // don't concat response chunks when using event streams with streams unless response is unsuccessful\n (!(operation.hasEventOutput && service.successfulResponse(resp)) &&\n (!stream || !stream.didCallback))",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "193825440",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2074,
"pr_file": "lib/event_listeners.js",
"discussion_id": "193825440",
"commented_code": "@@ -264,7 +277,11 @@ AWS.EventListeners = {\n });\n \n httpResp.on('end', function onEnd() {\n- if (!stream || !stream.didCallback) {\n+ if ((AWS.HttpClient.streamsApiVersion !== 2) ||\n+ // don't concat response chunks when using event streams with streams unless response is unsuccessful\n+ (!(operation.hasEventOutput && service.successfulResponse(resp)) &&\n+ (!stream || !stream.didCallback))",
"comment_created_at": "2018-06-07T17:16:08+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "There's no issue here but a little hard to follow. Will using another if statement be more readable? like this:\r\n\r\n```javascript\r\nif (!stream || !stream.didCallback) {\r\n//don't concat response chunks when using event streams unless response is unsuccessful\r\n if ((AWS.HttpClient.streamsApiVersion === 2) && operation.hasEventOutput && service.successfulResponse(resp) ) {\r\n return\r\n }\r\n resp.request.emit('httpDone');\r\n done();\r\n}\r\n```\r\nI think it's more readable(?) It's your call.",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,46 @@
[
{
"discussion_id": "102779549",
"pr_number": 1370,
"pr_file": "ts/dynamodb.ts",
"created_at": "2017-02-23T18:14:29+00:00",
"commented_code": "}\n};",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "102779549",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1370,
"pr_file": "ts/dynamodb.ts",
"discussion_id": "102779549",
"commented_code": "@@ -9,6 +9,18 @@ const params: DynamoDB.DocumentClient.GetItemInput = {\n }\n };\n ",
"comment_created_at": "2017-02-23T18:14:29+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you add tests to make sure we can explicitly set a variable to the `DynamoDB.Converter` type? Also, would be nice to verify we can access `DocumentClient.ConverterOptions`.\r\n\r\nCan be simple, like:\r\n```javascript\r\nconst converter: Converter = DynamoDB.Converter;\r\n// and a test for input with converter options\r\nconst converterOptions: DynamoDB.DocumentClient.ConverterOptions = {convertEmptyValues: true};\r\nDynamoDB.Converter.input('string', converterOptions);\r\n```\r\n\r\nHow hard would it be to also expose ConverterOptions on the Converter namespace? Just feels a little odd having to access it off the DocumentClient namespace instead.",
"pr_file_module": null
}
]
},
{
"discussion_id": "130689743",
"pr_number": 1655,
"pr_file": "lib/credentials/credential_provider_chain.d.ts",
"created_at": "2017-08-01T18:29:43+00:00",
"commented_code": "*/\n resolve(callback:(err: AWSError, credentials: Credentials) => void): CredentialProviderChain;\n /**\n * Return a Promise on resolve() function\n */\n resolvePromise(resolve?:(credentials: Credentials) => void, reject?:(err: AWSError) => void): Promise<any>;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "130689743",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1655,
"pr_file": "lib/credentials/credential_provider_chain.d.ts",
"discussion_id": "130689743",
"commented_code": "@@ -10,6 +10,10 @@ export class CredentialProviderChain extends Credentials {\n */\n resolve(callback:(err: AWSError, credentials: Credentials) => void): CredentialProviderChain;\n /**\n+ * Return a Promise on resolve() function\n+ */\n+ resolvePromise(resolve?:(credentials: Credentials) => void, reject?:(err: AWSError) => void): Promise<any>;",
"comment_created_at": "2017-08-01T18:29:43+00:00",
"comment_author": "jeskew",
"comment_body": "This isn't quite right. The return value should use `Credentials` instead of `any`, and the method itself doesn't take any callbacks.",
"pr_file_module": null
}
]
}
]

View File

@@ -23,51 +23,3 @@ const converter: any = DynamoDB.Converter;
const options: DynamoDB.DocumentClient.ConverterOptions = { /* ... */ }; // accessing through DocumentClient when logically belongs to Converter
function resolvePromise(): Promise<any> { /* ... */ }
```
[
{
"discussion_id": "102779549",
"pr_number": 1370,
"pr_file": "ts/dynamodb.ts",
"created_at": "2017-02-23T18:14:29+00:00",
"commented_code": "}\n};",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "102779549",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1370,
"pr_file": "ts/dynamodb.ts",
"discussion_id": "102779549",
"commented_code": "@@ -9,6 +9,18 @@ const params: DynamoDB.DocumentClient.GetItemInput = {\n }\n };\n ",
"comment_created_at": "2017-02-23T18:14:29+00:00",
"comment_author": "chrisradek",
"comment_body": "Can you add tests to make sure we can explicitly set a variable to the `DynamoDB.Converter` type? Also, would be nice to verify we can access `DocumentClient.ConverterOptions`.\r\n\r\nCan be simple, like:\r\n```javascript\r\nconst converter: Converter = DynamoDB.Converter;\r\n// and a test for input with converter options\r\nconst converterOptions: DynamoDB.DocumentClient.ConverterOptions = {convertEmptyValues: true};\r\nDynamoDB.Converter.input('string', converterOptions);\r\n```\r\n\r\nHow hard would it be to also expose ConverterOptions on the Converter namespace? Just feels a little odd having to access it off the DocumentClient namespace instead.",
"pr_file_module": null
}
]
},
{
"discussion_id": "130689743",
"pr_number": 1655,
"pr_file": "lib/credentials/credential_provider_chain.d.ts",
"created_at": "2017-08-01T18:29:43+00:00",
"commented_code": "*/\n resolve(callback:(err: AWSError, credentials: Credentials) => void): CredentialProviderChain;\n /**\n * Return a Promise on resolve() function\n */\n resolvePromise(resolve?:(credentials: Credentials) => void, reject?:(err: AWSError) => void): Promise<any>;",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "130689743",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1655,
"pr_file": "lib/credentials/credential_provider_chain.d.ts",
"discussion_id": "130689743",
"commented_code": "@@ -10,6 +10,10 @@ export class CredentialProviderChain extends Credentials {\n */\n resolve(callback:(err: AWSError, credentials: Credentials) => void): CredentialProviderChain;\n /**\n+ * Return a Promise on resolve() function\n+ */\n+ resolvePromise(resolve?:(credentials: Credentials) => void, reject?:(err: AWSError) => void): Promise<any>;",
"comment_created_at": "2017-08-01T18:29:43+00:00",
"comment_author": "jeskew",
"comment_body": "This isn't quite right. The return value should use `Credentials` instead of `any`, and the method itself doesn't take any callbacks.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,106 @@
[
{
"discussion_id": "82888371",
"pr_number": 1079,
"pr_file": "lib/util.js",
"created_at": "2016-10-11T21:03:18+00:00",
"commented_code": "/**\n * @api private\n */\n addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n PromiseDependency = PromiseDependency || null;\n if (!PromiseDependency && typeof Promise !== 'undefined') {\n PromiseDependency = Promise;\n }\n if (typeof PromiseDependency !== 'function') {\n delete constructor.prototype.promise;\n return;\n }\n constructor.prototype.promise = function promise() {\n var self = this;\n return new PromiseDependency(function(resolve, reject) {\n self.on('complete', function(resp) {\n if (resp.error) {\n reject(resp.error);\n } else {\n resolve(resp.data);\n }\n addPromises: function addPromises(constructors, PromiseDependency) {\n if (!AWS) AWS = require('./core');\n if (!AWS.config) require('./config');\n if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n if (typeof PromiseDependency !== 'function') var deletePromise = true;\n if (!Array.isArray(constructors)) constructors = [constructors];\n\n var promisifyMethod = function(methodName) {\n return function promise() {\n var self = this;\n return new PromiseDependency(function(resolve, reject) {\n self[methodName](function(err, data) {\n if (err) {\n reject(err);\n } else {\n resolve(data);\n }\n });\n });\n self.runTo();\n });\n };\n };\n\n for (var ind = 0; ind < constructors.length; ind++) {\n var constructor = constructors[ind];\n switch (constructor.name) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "82888371",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1079,
"pr_file": "lib/util.js",
"discussion_id": "82888371",
"commented_code": "@@ -748,27 +748,74 @@ var util = {\n /**\n * @api private\n */\n- addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n- PromiseDependency = PromiseDependency || null;\n- if (!PromiseDependency && typeof Promise !== 'undefined') {\n- PromiseDependency = Promise;\n- }\n- if (typeof PromiseDependency !== 'function') {\n- delete constructor.prototype.promise;\n- return;\n- }\n- constructor.prototype.promise = function promise() {\n- var self = this;\n- return new PromiseDependency(function(resolve, reject) {\n- self.on('complete', function(resp) {\n- if (resp.error) {\n- reject(resp.error);\n- } else {\n- resolve(resp.data);\n- }\n+ addPromises: function addPromises(constructors, PromiseDependency) {\n+ if (!AWS) AWS = require('./core');\n+ if (!AWS.config) require('./config');\n+ if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n+ if (typeof PromiseDependency !== 'function') var deletePromise = true;\n+ if (!Array.isArray(constructors)) constructors = [constructors];\n+\n+ var promisifyMethod = function(methodName) {\n+ return function promise() {\n+ var self = this;\n+ return new PromiseDependency(function(resolve, reject) {\n+ self[methodName](function(err, data) {\n+ if (err) {\n+ reject(err);\n+ } else {\n+ resolve(data);\n+ }\n+ });\n });\n- self.runTo();\n- });\n+ };\n+ };\n+\n+ for (var ind = 0; ind < constructors.length; ind++) {\n+ var constructor = constructors[ind];\n+ switch (constructor.name) {",
"comment_created_at": "2016-10-11T21:03:18+00:00",
"comment_author": "chrisradek",
"comment_body": "It looks like `constructor.name` may not work in all the browsers we support and could have issues when minifiers are used:\nhttps://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#Browser_compatibility\n\nCan you do an equality check against the constructor instead?\n",
"pr_file_module": null
},
{
"comment_id": "82890974",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1079,
"pr_file": "lib/util.js",
"discussion_id": "82888371",
"commented_code": "@@ -748,27 +748,74 @@ var util = {\n /**\n * @api private\n */\n- addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n- PromiseDependency = PromiseDependency || null;\n- if (!PromiseDependency && typeof Promise !== 'undefined') {\n- PromiseDependency = Promise;\n- }\n- if (typeof PromiseDependency !== 'function') {\n- delete constructor.prototype.promise;\n- return;\n- }\n- constructor.prototype.promise = function promise() {\n- var self = this;\n- return new PromiseDependency(function(resolve, reject) {\n- self.on('complete', function(resp) {\n- if (resp.error) {\n- reject(resp.error);\n- } else {\n- resolve(resp.data);\n- }\n+ addPromises: function addPromises(constructors, PromiseDependency) {\n+ if (!AWS) AWS = require('./core');\n+ if (!AWS.config) require('./config');\n+ if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n+ if (typeof PromiseDependency !== 'function') var deletePromise = true;\n+ if (!Array.isArray(constructors)) constructors = [constructors];\n+\n+ var promisifyMethod = function(methodName) {\n+ return function promise() {\n+ var self = this;\n+ return new PromiseDependency(function(resolve, reject) {\n+ self[methodName](function(err, data) {\n+ if (err) {\n+ reject(err);\n+ } else {\n+ resolve(data);\n+ }\n+ });\n });\n- self.runTo();\n- });\n+ };\n+ };\n+\n+ for (var ind = 0; ind < constructors.length; ind++) {\n+ var constructor = constructors[ind];\n+ switch (constructor.name) {",
"comment_created_at": "2016-10-11T21:16:42+00:00",
"comment_author": "chrisradek",
"comment_body": "Actually, what do you think about giving each class that should return a promise a static method that accepts a Promise constructor, then the class can control how it should promisify itself?\n\nThe pros to that approach would be the logic for adding promises would be controlled by each class, rather than defined in a long if/switch statement within a utility function. If the method to promisify a class was named the same for all classes, you can just check if the constructor has that method then call it, instead of maintaining a list of enums to check a class against. You could still make the `promisifyMethod` a utility method if that reduces code duplication.\n",
"pr_file_module": null
},
{
"comment_id": "83119747",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1079,
"pr_file": "lib/util.js",
"discussion_id": "82888371",
"commented_code": "@@ -748,27 +748,74 @@ var util = {\n /**\n * @api private\n */\n- addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n- PromiseDependency = PromiseDependency || null;\n- if (!PromiseDependency && typeof Promise !== 'undefined') {\n- PromiseDependency = Promise;\n- }\n- if (typeof PromiseDependency !== 'function') {\n- delete constructor.prototype.promise;\n- return;\n- }\n- constructor.prototype.promise = function promise() {\n- var self = this;\n- return new PromiseDependency(function(resolve, reject) {\n- self.on('complete', function(resp) {\n- if (resp.error) {\n- reject(resp.error);\n- } else {\n- resolve(resp.data);\n- }\n+ addPromises: function addPromises(constructors, PromiseDependency) {\n+ if (!AWS) AWS = require('./core');\n+ if (!AWS.config) require('./config');\n+ if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n+ if (typeof PromiseDependency !== 'function') var deletePromise = true;\n+ if (!Array.isArray(constructors)) constructors = [constructors];\n+\n+ var promisifyMethod = function(methodName) {\n+ return function promise() {\n+ var self = this;\n+ return new PromiseDependency(function(resolve, reject) {\n+ self[methodName](function(err, data) {\n+ if (err) {\n+ reject(err);\n+ } else {\n+ resolve(data);\n+ }\n+ });\n });\n- self.runTo();\n- });\n+ };\n+ };\n+\n+ for (var ind = 0; ind < constructors.length; ind++) {\n+ var constructor = constructors[ind];\n+ switch (constructor.name) {",
"comment_created_at": "2016-10-12T23:29:17+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "The reason I originally checked `constructor.name` is because I hadn't originally planned on requiring `core.js`, so I didn't have access to the actual constructors for an equality check before I added that require statement. I may be refactoring the require out anyway (as discussed above). I'll look into adding the static method for each class.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "138417207",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"created_at": "2017-09-12T17:36:01+00:00",
"commented_code": "return uploader;\n }\n});\n\n\n/**\n * @api private\n */\nAWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "138417207",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2017-09-12T17:36:01+00:00",
"comment_author": "chrisradek",
"comment_body": "The `AWS.util.promisifyMethod` function currently only works for functions that accept a callback function as the first parameter. For example, `AWS.Request.send` and `AWS.Credentials.get` both accept just a callback.\r\n\r\nThe `s3.getSignedUrl` method accepts an operation name, params, and a callback, so using this method won't work. You should be able to set `getSignedUrlPromise` to a function that returns a new Promise. This promise can simply wrap the getSignedUrl function.",
"pr_file_module": null
},
{
"comment_id": "138644601",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2017-09-13T14:55:49+00:00",
"comment_author": "IsaiahJTurner",
"comment_body": "Happy to do that but that solution could lead replicated code down the line. For example, if/when this functionality is added to `AWS.CloudFront.Signer.getSignedUrl`, we'd need to wrap it with the same logic.\r\n\r\nWhat do you think about modifying `AWS.util.promisifyMethod` so that the generated promise function accepts arguments? \r\n\r\nSomething like this:\r\n```js\r\nfunction promisifyMethod(methodName, PromiseDependency) {\r\n return function promise() {\r\n var self = this;\r\n var args = Array.prototype.slice.call(arguments);\r\n return new PromiseDependency(function(resolve, reject) {\r\n args.push(function(err, data) {\r\n if (err) {\r\n reject(err);\r\n } else {\r\n resolve(data);\r\n }\r\n });\r\n self[methodName].apply(null, args);\r\n });\r\n };\r\n}\r\n```\r\nMy solution does lead to one of two downsides:\r\n- Supplying too many arguments will give unexpected behavior like a \"callback is not a function\" error.\r\n- To prevent that with a more informative error like \"Too many arguments supplied\" I'd need to read `self[methodName].length` to get the expected arguments count but this would be incompatible with any functions that support the arguments style `Array.prototype.push([element1[, ...[, elementN]]])` since `Array.prototype.push.length` would only return 1. I'm not sure if any functions in the SDK work like this.\r\n\r\nRegardless of which downside chosen, I still think improving `AWS.util.promisifyMethod` rather than wrapping the function directly is the best option since it is the most extensible. Also, how `AWS.util.promisifyMethod` currently works in contrast with how other `promisify` functions within other JS libraries like Bluebird, es6-promisify, and promisify-node work. ",
"pr_file_module": null
},
{
"comment_id": "138655481",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2017-09-13T15:30:28+00:00",
"comment_author": "IsaiahJTurner",
"comment_body": "Went ahead and pushed my concept. Let me know what you think, can easily change it back and use your idea.",
"pr_file_module": null
},
{
"comment_id": "317850908",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2019-08-27T00:32:33+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Confirmed that the [bluebird](http://bluebirdjs.com/docs/api/promise.promisify.html) also use the last argument as the callback function. The idea of appending the promise-determining callback function to the `arguments` looks good to me.",
"pr_file_module": null
}
]
}
]

View File

@@ -38,111 +38,3 @@ This pattern ensures:
- Support for both simple callback methods and multi-parameter methods
- Proper error handling and promise rejection
- Compatibility with different promise implementations
[
{
"discussion_id": "82888371",
"pr_number": 1079,
"pr_file": "lib/util.js",
"created_at": "2016-10-11T21:03:18+00:00",
"commented_code": "/**\n * @api private\n */\n addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n PromiseDependency = PromiseDependency || null;\n if (!PromiseDependency && typeof Promise !== 'undefined') {\n PromiseDependency = Promise;\n }\n if (typeof PromiseDependency !== 'function') {\n delete constructor.prototype.promise;\n return;\n }\n constructor.prototype.promise = function promise() {\n var self = this;\n return new PromiseDependency(function(resolve, reject) {\n self.on('complete', function(resp) {\n if (resp.error) {\n reject(resp.error);\n } else {\n resolve(resp.data);\n }\n addPromises: function addPromises(constructors, PromiseDependency) {\n if (!AWS) AWS = require('./core');\n if (!AWS.config) require('./config');\n if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n if (typeof PromiseDependency !== 'function') var deletePromise = true;\n if (!Array.isArray(constructors)) constructors = [constructors];\n\n var promisifyMethod = function(methodName) {\n return function promise() {\n var self = this;\n return new PromiseDependency(function(resolve, reject) {\n self[methodName](function(err, data) {\n if (err) {\n reject(err);\n } else {\n resolve(data);\n }\n });\n });\n self.runTo();\n });\n };\n };\n\n for (var ind = 0; ind < constructors.length; ind++) {\n var constructor = constructors[ind];\n switch (constructor.name) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "82888371",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1079,
"pr_file": "lib/util.js",
"discussion_id": "82888371",
"commented_code": "@@ -748,27 +748,74 @@ var util = {\n /**\n * @api private\n */\n- addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n- PromiseDependency = PromiseDependency || null;\n- if (!PromiseDependency && typeof Promise !== 'undefined') {\n- PromiseDependency = Promise;\n- }\n- if (typeof PromiseDependency !== 'function') {\n- delete constructor.prototype.promise;\n- return;\n- }\n- constructor.prototype.promise = function promise() {\n- var self = this;\n- return new PromiseDependency(function(resolve, reject) {\n- self.on('complete', function(resp) {\n- if (resp.error) {\n- reject(resp.error);\n- } else {\n- resolve(resp.data);\n- }\n+ addPromises: function addPromises(constructors, PromiseDependency) {\n+ if (!AWS) AWS = require('./core');\n+ if (!AWS.config) require('./config');\n+ if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n+ if (typeof PromiseDependency !== 'function') var deletePromise = true;\n+ if (!Array.isArray(constructors)) constructors = [constructors];\n+\n+ var promisifyMethod = function(methodName) {\n+ return function promise() {\n+ var self = this;\n+ return new PromiseDependency(function(resolve, reject) {\n+ self[methodName](function(err, data) {\n+ if (err) {\n+ reject(err);\n+ } else {\n+ resolve(data);\n+ }\n+ });\n });\n- self.runTo();\n- });\n+ };\n+ };\n+\n+ for (var ind = 0; ind < constructors.length; ind++) {\n+ var constructor = constructors[ind];\n+ switch (constructor.name) {",
"comment_created_at": "2016-10-11T21:03:18+00:00",
"comment_author": "chrisradek",
"comment_body": "It looks like `constructor.name` may not work in all the browsers we support and could have issues when minifiers are used:\nhttps://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#Browser_compatibility\n\nCan you do an equality check against the constructor instead?\n",
"pr_file_module": null
},
{
"comment_id": "82890974",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1079,
"pr_file": "lib/util.js",
"discussion_id": "82888371",
"commented_code": "@@ -748,27 +748,74 @@ var util = {\n /**\n * @api private\n */\n- addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n- PromiseDependency = PromiseDependency || null;\n- if (!PromiseDependency && typeof Promise !== 'undefined') {\n- PromiseDependency = Promise;\n- }\n- if (typeof PromiseDependency !== 'function') {\n- delete constructor.prototype.promise;\n- return;\n- }\n- constructor.prototype.promise = function promise() {\n- var self = this;\n- return new PromiseDependency(function(resolve, reject) {\n- self.on('complete', function(resp) {\n- if (resp.error) {\n- reject(resp.error);\n- } else {\n- resolve(resp.data);\n- }\n+ addPromises: function addPromises(constructors, PromiseDependency) {\n+ if (!AWS) AWS = require('./core');\n+ if (!AWS.config) require('./config');\n+ if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n+ if (typeof PromiseDependency !== 'function') var deletePromise = true;\n+ if (!Array.isArray(constructors)) constructors = [constructors];\n+\n+ var promisifyMethod = function(methodName) {\n+ return function promise() {\n+ var self = this;\n+ return new PromiseDependency(function(resolve, reject) {\n+ self[methodName](function(err, data) {\n+ if (err) {\n+ reject(err);\n+ } else {\n+ resolve(data);\n+ }\n+ });\n });\n- self.runTo();\n- });\n+ };\n+ };\n+\n+ for (var ind = 0; ind < constructors.length; ind++) {\n+ var constructor = constructors[ind];\n+ switch (constructor.name) {",
"comment_created_at": "2016-10-11T21:16:42+00:00",
"comment_author": "chrisradek",
"comment_body": "Actually, what do you think about giving each class that should return a promise a static method that accepts a Promise constructor, then the class can control how it should promisify itself?\n\nThe pros to that approach would be the logic for adding promises would be controlled by each class, rather than defined in a long if/switch statement within a utility function. If the method to promisify a class was named the same for all classes, you can just check if the constructor has that method then call it, instead of maintaining a list of enums to check a class against. You could still make the `promisifyMethod` a utility method if that reduces code duplication.\n",
"pr_file_module": null
},
{
"comment_id": "83119747",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1079,
"pr_file": "lib/util.js",
"discussion_id": "82888371",
"commented_code": "@@ -748,27 +748,74 @@ var util = {\n /**\n * @api private\n */\n- addPromisesToRequests: function addPromisesToRequests(constructor, PromiseDependency) {\n- PromiseDependency = PromiseDependency || null;\n- if (!PromiseDependency && typeof Promise !== 'undefined') {\n- PromiseDependency = Promise;\n- }\n- if (typeof PromiseDependency !== 'function') {\n- delete constructor.prototype.promise;\n- return;\n- }\n- constructor.prototype.promise = function promise() {\n- var self = this;\n- return new PromiseDependency(function(resolve, reject) {\n- self.on('complete', function(resp) {\n- if (resp.error) {\n- reject(resp.error);\n- } else {\n- resolve(resp.data);\n- }\n+ addPromises: function addPromises(constructors, PromiseDependency) {\n+ if (!AWS) AWS = require('./core');\n+ if (!AWS.config) require('./config');\n+ if (PromiseDependency === undefined) PromiseDependency = AWS.config.getPromisesDependency();\n+ if (typeof PromiseDependency !== 'function') var deletePromise = true;\n+ if (!Array.isArray(constructors)) constructors = [constructors];\n+\n+ var promisifyMethod = function(methodName) {\n+ return function promise() {\n+ var self = this;\n+ return new PromiseDependency(function(resolve, reject) {\n+ self[methodName](function(err, data) {\n+ if (err) {\n+ reject(err);\n+ } else {\n+ resolve(data);\n+ }\n+ });\n });\n- self.runTo();\n- });\n+ };\n+ };\n+\n+ for (var ind = 0; ind < constructors.length; ind++) {\n+ var constructor = constructors[ind];\n+ switch (constructor.name) {",
"comment_created_at": "2016-10-12T23:29:17+00:00",
"comment_author": "LiuJoyceC",
"comment_body": "The reason I originally checked `constructor.name` is because I hadn't originally planned on requiring `core.js`, so I didn't have access to the actual constructors for an equality check before I added that require statement. I may be refactoring the require out anyway (as discussed above). I'll look into adding the static method for each class.\n",
"pr_file_module": null
}
]
},
{
"discussion_id": "138417207",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"created_at": "2017-09-12T17:36:01+00:00",
"commented_code": "return uploader;\n }\n});\n\n\n/**\n * @api private\n */\nAWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "138417207",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2017-09-12T17:36:01+00:00",
"comment_author": "chrisradek",
"comment_body": "The `AWS.util.promisifyMethod` function currently only works for functions that accept a callback function as the first parameter. For example, `AWS.Request.send` and `AWS.Credentials.get` both accept just a callback.\r\n\r\nThe `s3.getSignedUrl` method accepts an operation name, params, and a callback, so using this method won't work. You should be able to set `getSignedUrlPromise` to a function that returns a new Promise. This promise can simply wrap the getSignedUrl function.",
"pr_file_module": null
},
{
"comment_id": "138644601",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2017-09-13T14:55:49+00:00",
"comment_author": "IsaiahJTurner",
"comment_body": "Happy to do that but that solution could lead replicated code down the line. For example, if/when this functionality is added to `AWS.CloudFront.Signer.getSignedUrl`, we'd need to wrap it with the same logic.\r\n\r\nWhat do you think about modifying `AWS.util.promisifyMethod` so that the generated promise function accepts arguments? \r\n\r\nSomething like this:\r\n```js\r\nfunction promisifyMethod(methodName, PromiseDependency) {\r\n return function promise() {\r\n var self = this;\r\n var args = Array.prototype.slice.call(arguments);\r\n return new PromiseDependency(function(resolve, reject) {\r\n args.push(function(err, data) {\r\n if (err) {\r\n reject(err);\r\n } else {\r\n resolve(data);\r\n }\r\n });\r\n self[methodName].apply(null, args);\r\n });\r\n };\r\n}\r\n```\r\nMy solution does lead to one of two downsides:\r\n- Supplying too many arguments will give unexpected behavior like a \"callback is not a function\" error.\r\n- To prevent that with a more informative error like \"Too many arguments supplied\" I'd need to read `self[methodName].length` to get the expected arguments count but this would be incompatible with any functions that support the arguments style `Array.prototype.push([element1[, ...[, elementN]]])` since `Array.prototype.push.length` would only return 1. I'm not sure if any functions in the SDK work like this.\r\n\r\nRegardless of which downside chosen, I still think improving `AWS.util.promisifyMethod` rather than wrapping the function directly is the best option since it is the most extensible. Also, how `AWS.util.promisifyMethod` currently works in contrast with how other `promisify` functions within other JS libraries like Bluebird, es6-promisify, and promisify-node work. ",
"pr_file_module": null
},
{
"comment_id": "138655481",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2017-09-13T15:30:28+00:00",
"comment_author": "IsaiahJTurner",
"comment_body": "Went ahead and pushed my concept. Let me know what you think, can easily change it back and use your idea.",
"pr_file_module": null
},
{
"comment_id": "317850908",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1711,
"pr_file": "lib/services/s3.js",
"discussion_id": "138417207",
"commented_code": "@@ -1068,3 +1114,20 @@ AWS.util.update(AWS.S3.prototype, {\n return uploader;\n }\n });\n+\n+\n+/**\n+ * @api private\n+ */\n+AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) {\n+ this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency);",
"comment_created_at": "2019-08-27T00:32:33+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Confirmed that the [bluebird](http://bluebirdjs.com/docs/api/promise.promisify.html) also use the last argument as the callback function. The idea of appending the promise-determining callback function to the `arguments` looks good to me.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,172 @@
[
{
"discussion_id": "182547608",
"pr_number": 2014,
"pr_file": "test/browserHashes.spec.js",
"created_at": "2018-04-18T19:46:02+00:00",
"commented_code": "if (truncate) {\n digest = digest.slice(0, truncate);\n }\n expect(digest.toString('hex')).to.equal(expected);\n //in node <= 0.10 digest sometimes returns a Dataview, should be buffer.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "182547608",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2014,
"pr_file": "test/browserHashes.spec.js",
"discussion_id": "182547608",
"commented_code": "@@ -43,7 +43,11 @@ describe('Browser hash implementations', function() {\n if (truncate) {\n digest = digest.slice(0, truncate);\n }\n- expect(digest.toString('hex')).to.equal(expected);\n+ //in node <= 0.10 digest sometimes returns a Dataview, should be buffer.",
"comment_created_at": "2018-04-18T19:46:02+00:00",
"comment_author": "chrisradek",
"comment_body": "Are these tests running in node.js? This should only be running in browser environments, and is using the 3rd party `Buffer` package instead of node.js' `Buffer` package. We can place browser-specific tests in a separate folder and exclude them from being run by mocha in the npm unit script.",
"pr_file_module": null
},
{
"comment_id": "182572206",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2014,
"pr_file": "test/browserHashes.spec.js",
"discussion_id": "182547608",
"commented_code": "@@ -43,7 +43,11 @@ describe('Browser hash implementations', function() {\n if (truncate) {\n digest = digest.slice(0, truncate);\n }\n- expect(digest.toString('hex')).to.equal(expected);\n+ //in node <= 0.10 digest sometimes returns a Dataview, should be buffer.",
"comment_created_at": "2018-04-18T21:16:49+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Ah right! The change mean to fix the test. But actually we can remove it out of node test. I didn't realize this is browser-only although the name already told me",
"pr_file_module": null
}
]
},
{
"discussion_id": "184547814",
"pr_number": 1956,
"pr_file": "test/integration_test/helpers.js",
"created_at": "2018-04-26T22:22:30+00:00",
"commented_code": "(function() {\n global.expect = require('chai').expect;\n module.exports = {\n sharedBucket: 'aws-sdk-js-integration',",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "184547814",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/helpers.js",
"discussion_id": "184547814",
"commented_code": "@@ -0,0 +1,25 @@\n+(function() {\n+ global.expect = require('chai').expect;\n+ module.exports = {\n+ sharedBucket: 'aws-sdk-js-integration',",
"comment_created_at": "2018-04-26T22:22:30+00:00",
"comment_author": "chrisradek",
"comment_body": "Probably want to append a timestamp to this as well to make it somewhat unique",
"pr_file_module": null
},
{
"comment_id": "184557074",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/helpers.js",
"discussion_id": "184547814",
"commented_code": "@@ -0,0 +1,25 @@\n+(function() {\n+ global.expect = require('chai').expect;\n+ module.exports = {\n+ sharedBucket: 'aws-sdk-js-integration',",
"comment_created_at": "2018-04-26T23:19:53+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "This is for sharing the test bucket. Maybe this time occasionally we fail to delete the bucket. But next time we will delete this bucket.",
"pr_file_module": null
}
]
},
{
"discussion_id": "184550574",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"created_at": "2018-04-26T22:38:16+00:00",
"commented_code": "var helpers = require('../helpers');\nvar AWS = helpers.AWS;\nvar bucketName = helpers.sharedBucket;\ndescribe('download integrity', function() {\n var params;\n var s3;\n function putObject(customParams, done) {\n params = {\n Bucket: customParams.Bucket || params.Bucket,\n Key: customParams.Key || params.Key,\n };\n s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n }).then(function() {\n return s3.putObject({\n Bucket: params.Bucket,\n Key: params.Key,\n Body: customParams.Body || params.Body\n }).promise();\n }).then(function() {\n return s3.waitFor('objectExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise();\n }).then(function() { \n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot put object: ' + err);\n exit(1);\n });\n }\n\n function delectBucket(done) {\n s3.listObjectVersions({\n Bucket: params.Bucket\n }).promise().then(function(data) {\n var removeObjectsParams = {\n Bucket: params.Bucket,\n Delete: {\n Quiet: false,\n Objects: []\n }\n }\n for (var version of data.Versions) {\n removeObjectsParams.Delete.Objects.push({\n Key: version.Key,\n VersionId: version.VersionId === 'null' ? null : version.VersionId,\n });\n }\n return removeObjectsParams;\n }).then(function(removeObjectsParams) {\n return s3.deleteObjects(removeObjectsParams).promise();\n }).then(function() {\n return s3.waitFor('objectNotExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise()\n }).then(function() {\n return s3.deleteBucket({Bucket: params.Bucket}).promise();\n }).then(function(data) {\n return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n }).then(function() {\n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot delete bucket: ' + err);\n exit(1);\n })\n }\n\n before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "184550574",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T22:38:16+00:00",
"comment_author": "chrisradek",
"comment_body": "Why don't you create the bucket used by all the tests in the `before` step? We shouldn't have to create a new bucket for every single test, just this suite of tests. \r\n\r\nI also wouldn't mix the `putObject` method with `createBucket`. `putObject` is doing too much, and adds 'global' (across tests) state. For example, you don't directly pass it the bucket or key, instead relying on a closure that every test has access to (and can change). That could lead to tricky edge cases coming up later that are hard to debug.",
"pr_file_module": null
},
{
"comment_id": "184559113",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T23:33:37+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Yes, you can pass in the bucket and key, but right, it will change the global state. But capsulizing them will also guarantee `putObject` always succeed(bucket always there).",
"pr_file_module": null
},
{
"comment_id": "184560586",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T23:44:33+00:00",
"comment_author": "chrisradek",
"comment_body": "But you could also make sure the bucket is there in the `before` hook. Just call `done()` after the `waitFor` method completes. Then you also only need to create it once; I don't think there's a reason we need to create a new bucket for every test since we aren't testing any bucket-specific configurations here. Creating a new bucket with each test also creates more points of failure (rate/resource limits, for example).\r\n",
"pr_file_module": null
},
{
"comment_id": "184561348",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T23:50:42+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "yea. I will move the `putObject` to `before` trait. And use timestamp in bucket name. I was thinking that we can write less code if we are going to add more tests here. But now I agree that this is an overkill and not worthwhile. ",
"pr_file_module": null
}
]
},
{
"discussion_id": "184550641",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"created_at": "2018-04-26T22:38:41+00:00",
"commented_code": "var helpers = require('../helpers');\nvar AWS = helpers.AWS;\nvar bucketName = helpers.sharedBucket;\ndescribe('download integrity', function() {\n var params;\n var s3;\n function putObject(customParams, done) {\n params = {\n Bucket: customParams.Bucket || params.Bucket,\n Key: customParams.Key || params.Key,\n };\n s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n }).then(function() {\n return s3.putObject({\n Bucket: params.Bucket,\n Key: params.Key,\n Body: customParams.Body || params.Body\n }).promise();\n }).then(function() {\n return s3.waitFor('objectExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise();\n }).then(function() { \n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot put object: ' + err);\n exit(1);\n });\n }\n\n function delectBucket(done) {\n s3.listObjectVersions({\n Bucket: params.Bucket\n }).promise().then(function(data) {\n var removeObjectsParams = {\n Bucket: params.Bucket,\n Delete: {\n Quiet: false,\n Objects: []\n }\n }\n for (var version of data.Versions) {\n removeObjectsParams.Delete.Objects.push({\n Key: version.Key,\n VersionId: version.VersionId === 'null' ? null : version.VersionId,\n });\n }\n return removeObjectsParams;\n }).then(function(removeObjectsParams) {\n return s3.deleteObjects(removeObjectsParams).promise();\n }).then(function() {\n return s3.waitFor('objectNotExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise()\n }).then(function() {\n return s3.deleteBucket({Bucket: params.Bucket}).promise();\n }).then(function(data) {\n return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n }).then(function() {\n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot delete bucket: ' + err);\n exit(1);\n })\n }\n\n before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});\n })\n\n beforeEach('setup bucket and object...', function(done) {\n params = {\n Bucket: bucketName,\n Key: 'key',\n };\n putObject({Body: 'this is a test!'}, done);\n })\n\n afterEach('delete bucket...', function(done) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "184550641",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550641",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});\n+ })\n+\n+ beforeEach('setup bucket and object...', function(done) {\n+ params = {\n+ Bucket: bucketName,\n+ Key: 'key',\n+ };\n+ putObject({Body: 'this is a test!'}, done);\n+ })\n+\n+ afterEach('delete bucket...', function(done) {",
"comment_created_at": "2018-04-26T22:38:41+00:00",
"comment_author": "chrisradek",
"comment_body": "If you `createBucket` in the `before` hook, you can `deleteBucket` in the `after` hook!",
"pr_file_module": null
}
]
},
{
"discussion_id": "265341875",
"pr_number": 2559,
"pr_file": "test/credentials.spec.js",
"created_at": "2019-03-13T21:37:20+00:00",
"commented_code": "creds.get();\n return validateCredentials(creds);\n });\n it('loads via credential_process', function(done) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "265341875",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2559,
"pr_file": "test/credentials.spec.js",
"discussion_id": "265341875",
"commented_code": "@@ -438,6 +438,124 @@\n creds.get();\n return validateCredentials(creds);\n });\n+ it('loads via credential_process', function(done) {",
"comment_created_at": "2019-03-13T21:37:20+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Nit: you can put these unit tests to a test suite(like leading with `describe('credential process')`). So than you can clear the spies easily. ",
"pr_file_module": null
}
]
}
]

View File

@@ -52,177 +52,3 @@ describe('S3 object operations', function() {
```
This approach prevents resource exhaustion, improves test execution speed, and reduces flakiness from race conditions or rate limiting. Separating environment-specific tests (browser vs. Node.js) into different folders further improves organization and prevents execution in incorrect environments.
[
{
"discussion_id": "182547608",
"pr_number": 2014,
"pr_file": "test/browserHashes.spec.js",
"created_at": "2018-04-18T19:46:02+00:00",
"commented_code": "if (truncate) {\n digest = digest.slice(0, truncate);\n }\n expect(digest.toString('hex')).to.equal(expected);\n //in node <= 0.10 digest sometimes returns a Dataview, should be buffer.",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "182547608",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2014,
"pr_file": "test/browserHashes.spec.js",
"discussion_id": "182547608",
"commented_code": "@@ -43,7 +43,11 @@ describe('Browser hash implementations', function() {\n if (truncate) {\n digest = digest.slice(0, truncate);\n }\n- expect(digest.toString('hex')).to.equal(expected);\n+ //in node <= 0.10 digest sometimes returns a Dataview, should be buffer.",
"comment_created_at": "2018-04-18T19:46:02+00:00",
"comment_author": "chrisradek",
"comment_body": "Are these tests running in node.js? This should only be running in browser environments, and is using the 3rd party `Buffer` package instead of node.js' `Buffer` package. We can place browser-specific tests in a separate folder and exclude them from being run by mocha in the npm unit script.",
"pr_file_module": null
},
{
"comment_id": "182572206",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2014,
"pr_file": "test/browserHashes.spec.js",
"discussion_id": "182547608",
"commented_code": "@@ -43,7 +43,11 @@ describe('Browser hash implementations', function() {\n if (truncate) {\n digest = digest.slice(0, truncate);\n }\n- expect(digest.toString('hex')).to.equal(expected);\n+ //in node <= 0.10 digest sometimes returns a Dataview, should be buffer.",
"comment_created_at": "2018-04-18T21:16:49+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Ah right! The change mean to fix the test. But actually we can remove it out of node test. I didn't realize this is browser-only although the name already told me",
"pr_file_module": null
}
]
},
{
"discussion_id": "184547814",
"pr_number": 1956,
"pr_file": "test/integration_test/helpers.js",
"created_at": "2018-04-26T22:22:30+00:00",
"commented_code": "(function() {\n global.expect = require('chai').expect;\n module.exports = {\n sharedBucket: 'aws-sdk-js-integration',",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "184547814",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/helpers.js",
"discussion_id": "184547814",
"commented_code": "@@ -0,0 +1,25 @@\n+(function() {\n+ global.expect = require('chai').expect;\n+ module.exports = {\n+ sharedBucket: 'aws-sdk-js-integration',",
"comment_created_at": "2018-04-26T22:22:30+00:00",
"comment_author": "chrisradek",
"comment_body": "Probably want to append a timestamp to this as well to make it somewhat unique",
"pr_file_module": null
},
{
"comment_id": "184557074",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/helpers.js",
"discussion_id": "184547814",
"commented_code": "@@ -0,0 +1,25 @@\n+(function() {\n+ global.expect = require('chai').expect;\n+ module.exports = {\n+ sharedBucket: 'aws-sdk-js-integration',",
"comment_created_at": "2018-04-26T23:19:53+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "This is for sharing the test bucket. Maybe this time occasionally we fail to delete the bucket. But next time we will delete this bucket.",
"pr_file_module": null
}
]
},
{
"discussion_id": "184550574",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"created_at": "2018-04-26T22:38:16+00:00",
"commented_code": "var helpers = require('../helpers');\nvar AWS = helpers.AWS;\nvar bucketName = helpers.sharedBucket;\ndescribe('download integrity', function() {\n var params;\n var s3;\n function putObject(customParams, done) {\n params = {\n Bucket: customParams.Bucket || params.Bucket,\n Key: customParams.Key || params.Key,\n };\n s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n }).then(function() {\n return s3.putObject({\n Bucket: params.Bucket,\n Key: params.Key,\n Body: customParams.Body || params.Body\n }).promise();\n }).then(function() {\n return s3.waitFor('objectExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise();\n }).then(function() { \n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot put object: ' + err);\n exit(1);\n });\n }\n\n function delectBucket(done) {\n s3.listObjectVersions({\n Bucket: params.Bucket\n }).promise().then(function(data) {\n var removeObjectsParams = {\n Bucket: params.Bucket,\n Delete: {\n Quiet: false,\n Objects: []\n }\n }\n for (var version of data.Versions) {\n removeObjectsParams.Delete.Objects.push({\n Key: version.Key,\n VersionId: version.VersionId === 'null' ? null : version.VersionId,\n });\n }\n return removeObjectsParams;\n }).then(function(removeObjectsParams) {\n return s3.deleteObjects(removeObjectsParams).promise();\n }).then(function() {\n return s3.waitFor('objectNotExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise()\n }).then(function() {\n return s3.deleteBucket({Bucket: params.Bucket}).promise();\n }).then(function(data) {\n return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n }).then(function() {\n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot delete bucket: ' + err);\n exit(1);\n })\n }\n\n before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "184550574",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T22:38:16+00:00",
"comment_author": "chrisradek",
"comment_body": "Why don't you create the bucket used by all the tests in the `before` step? We shouldn't have to create a new bucket for every single test, just this suite of tests. \r\n\r\nI also wouldn't mix the `putObject` method with `createBucket`. `putObject` is doing too much, and adds 'global' (across tests) state. For example, you don't directly pass it the bucket or key, instead relying on a closure that every test has access to (and can change). That could lead to tricky edge cases coming up later that are hard to debug.",
"pr_file_module": null
},
{
"comment_id": "184559113",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T23:33:37+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Yes, you can pass in the bucket and key, but right, it will change the global state. But capsulizing them will also guarantee `putObject` always succeed(bucket always there).",
"pr_file_module": null
},
{
"comment_id": "184560586",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T23:44:33+00:00",
"comment_author": "chrisradek",
"comment_body": "But you could also make sure the bucket is there in the `before` hook. Just call `done()` after the `waitFor` method completes. Then you also only need to create it once; I don't think there's a reason we need to create a new bucket for every test since we aren't testing any bucket-specific configurations here. Creating a new bucket with each test also creates more points of failure (rate/resource limits, for example).\r\n",
"pr_file_module": null
},
{
"comment_id": "184561348",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550574",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});",
"comment_created_at": "2018-04-26T23:50:42+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "yea. I will move the `putObject` to `before` trait. And use timestamp in bucket name. I was thinking that we can write less code if we are going to add more tests here. But now I agree that this is an overkill and not worthwhile. ",
"pr_file_module": null
}
]
},
{
"discussion_id": "184550641",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"created_at": "2018-04-26T22:38:41+00:00",
"commented_code": "var helpers = require('../helpers');\nvar AWS = helpers.AWS;\nvar bucketName = helpers.sharedBucket;\ndescribe('download integrity', function() {\n var params;\n var s3;\n function putObject(customParams, done) {\n params = {\n Bucket: customParams.Bucket || params.Bucket,\n Key: customParams.Key || params.Key,\n };\n s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n }).then(function() {\n return s3.putObject({\n Bucket: params.Bucket,\n Key: params.Key,\n Body: customParams.Body || params.Body\n }).promise();\n }).then(function() {\n return s3.waitFor('objectExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise();\n }).then(function() { \n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot put object: ' + err);\n exit(1);\n });\n }\n\n function delectBucket(done) {\n s3.listObjectVersions({\n Bucket: params.Bucket\n }).promise().then(function(data) {\n var removeObjectsParams = {\n Bucket: params.Bucket,\n Delete: {\n Quiet: false,\n Objects: []\n }\n }\n for (var version of data.Versions) {\n removeObjectsParams.Delete.Objects.push({\n Key: version.Key,\n VersionId: version.VersionId === 'null' ? null : version.VersionId,\n });\n }\n return removeObjectsParams;\n }).then(function(removeObjectsParams) {\n return s3.deleteObjects(removeObjectsParams).promise();\n }).then(function() {\n return s3.waitFor('objectNotExists', {\n Bucket: params.Bucket,\n Key: params.Key,\n }).promise()\n }).then(function() {\n return s3.deleteBucket({Bucket: params.Bucket}).promise();\n }).then(function(data) {\n return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n }).then(function() {\n if (typeof done === 'function') done();\n }).catch(function(err) {\n throw new Error('Cannot delete bucket: ' + err);\n exit(1);\n })\n }\n\n before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});\n })\n\n beforeEach('setup bucket and object...', function(done) {\n params = {\n Bucket: bucketName,\n Key: 'key',\n };\n putObject({Body: 'this is a test!'}, done);\n })\n\n afterEach('delete bucket...', function(done) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "184550641",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1956,
"pr_file": "test/integration_test/s3/getObject.spec.js",
"discussion_id": "184550641",
"commented_code": "@@ -0,0 +1,209 @@\n+var helpers = require('../helpers');\n+var AWS = helpers.AWS;\n+var bucketName = helpers.sharedBucket;\n+describe('download integrity', function() {\n+ var params;\n+ var s3;\n+ function putObject(customParams, done) {\n+ params = {\n+ Bucket: customParams.Bucket || params.Bucket,\n+ Key: customParams.Key || params.Key,\n+ };\n+ s3.createBucket({Bucket: params.Bucket}).promise().then(function() {\n+ return s3.waitFor('bucketExists', {Bucket: params.Bucket}).promise()\n+ }).then(function() {\n+ return s3.putObject({\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ Body: customParams.Body || params.Body\n+ }).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise();\n+ }).then(function() { \n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot put object: ' + err);\n+ exit(1);\n+ });\n+ }\n+\n+ function delectBucket(done) {\n+ s3.listObjectVersions({\n+ Bucket: params.Bucket\n+ }).promise().then(function(data) {\n+ var removeObjectsParams = {\n+ Bucket: params.Bucket,\n+ Delete: {\n+ Quiet: false,\n+ Objects: []\n+ }\n+ }\n+ for (var version of data.Versions) {\n+ removeObjectsParams.Delete.Objects.push({\n+ Key: version.Key,\n+ VersionId: version.VersionId === 'null' ? null : version.VersionId,\n+ });\n+ }\n+ return removeObjectsParams;\n+ }).then(function(removeObjectsParams) {\n+ return s3.deleteObjects(removeObjectsParams).promise();\n+ }).then(function() {\n+ return s3.waitFor('objectNotExists', {\n+ Bucket: params.Bucket,\n+ Key: params.Key,\n+ }).promise()\n+ }).then(function() {\n+ return s3.deleteBucket({Bucket: params.Bucket}).promise();\n+ }).then(function(data) {\n+ return s3.waitFor('bucketNotExists', {Bucket: params.Bucket}).promise();\n+ }).then(function() {\n+ if (typeof done === 'function') done();\n+ }).catch(function(err) {\n+ throw new Error('Cannot delete bucket: ' + err);\n+ exit(1);\n+ })\n+ }\n+\n+ before('setup s3 client with responseChecksumAlgorithm equals \\'md5\\'', function() {\n+ s3 = new AWS.S3({responseChecksumAlgorithm: 'md5'});\n+ })\n+\n+ beforeEach('setup bucket and object...', function(done) {\n+ params = {\n+ Bucket: bucketName,\n+ Key: 'key',\n+ };\n+ putObject({Body: 'this is a test!'}, done);\n+ })\n+\n+ afterEach('delete bucket...', function(done) {",
"comment_created_at": "2018-04-26T22:38:41+00:00",
"comment_author": "chrisradek",
"comment_body": "If you `createBucket` in the `before` hook, you can `deleteBucket` in the `after` hook!",
"pr_file_module": null
}
]
},
{
"discussion_id": "265341875",
"pr_number": 2559,
"pr_file": "test/credentials.spec.js",
"created_at": "2019-03-13T21:37:20+00:00",
"commented_code": "creds.get();\n return validateCredentials(creds);\n });\n it('loads via credential_process', function(done) {",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "265341875",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 2559,
"pr_file": "test/credentials.spec.js",
"discussion_id": "265341875",
"commented_code": "@@ -438,6 +438,124 @@\n creds.get();\n return validateCredentials(creds);\n });\n+ it('loads via credential_process', function(done) {",
"comment_created_at": "2019-03-13T21:37:20+00:00",
"comment_author": "AllanZhengYP",
"comment_body": "Nit: you can put these unit tests to a test suite(like leading with `describe('credential process')`). So than you can clear the spies easily. ",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,92 @@
[
{
"discussion_id": "107227865",
"pr_number": 1418,
"pr_file": "test/config.spec.coffee",
"created_at": "2017-03-21T17:51:13+00:00",
"commented_code": "expect(configure(maxRetries: 2).maxRetries).to.equal(2)\n\n describe 'retryDelayOptions', ->\n it 'defaults to \"base: 100\"', ->",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "107227865",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1418,
"pr_file": "test/config.spec.coffee",
"discussion_id": "107227865",
"commented_code": "@@ -60,8 +60,6 @@ describe 'AWS.Config', ->\n expect(configure(maxRetries: 2).maxRetries).to.equal(2)\n \n describe 'retryDelayOptions', ->\n- it 'defaults to \"base: 100\"', ->",
"comment_created_at": "2017-03-21T17:51:13+00:00",
"comment_author": "jeskew",
"comment_body": "Optional: you might want to add a test to service.spec.coffee verifying that a normal client still gets a retry base of 100 set.",
"pr_file_module": null
},
{
"comment_id": "107261089",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1418,
"pr_file": "test/config.spec.coffee",
"discussion_id": "107227865",
"commented_code": "@@ -60,8 +60,6 @@ describe 'AWS.Config', ->\n expect(configure(maxRetries: 2).maxRetries).to.equal(2)\n \n describe 'retryDelayOptions', ->\n- it 'defaults to \"base: 100\"', ->",
"comment_created_at": "2017-03-21T20:06:21+00:00",
"comment_author": "chrisradek",
"comment_body": "There is a separate test that implicitly tests this, but happy to add an explicit test.",
"pr_file_module": null
}
]
},
{
"discussion_id": "111287813",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"created_at": "2017-04-13T00:13:45+00:00",
"commented_code": "validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/)\n\n it 'loads credentials from path defined in AWS_SHARED_CREDENTIALS_FILE if AWS_SDK_LOAD_CONFIG is set', ->\n process.env.AWS_SDK_LOAD_CONFIG = '1'\n process.env.AWS_SHARED_CREDENTIALS_FILE = '/path/to/aws/credentials'\n mock = '''\n [default]\n aws_access_key_id = akid\n aws_secret_access_key = secret\n aws_session_token = session\n '''\n helpers.spyOn(AWS.util, 'readFileSync').andReturn(mock)\n\n creds = new AWS.SharedIniFileCredentials()\n creds.get();\n validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]config/)\n expect(AWS.util.readFileSync.calls[1].arguments[0]).to.equal(process.env.AWS_SHARED_CREDENTIALS_FILE)\n\n it 'loads credentials from ~/.aws/config if AWS_SDK_LOAD_CONFIG is set', ->",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "111287813",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"discussion_id": "111287813",
"commented_code": "@@ -209,6 +233,53 @@ if AWS.util.isNode()\n validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/)\n \n+ it 'loads credentials from path defined in AWS_SHARED_CREDENTIALS_FILE if AWS_SDK_LOAD_CONFIG is set', ->\n+ process.env.AWS_SDK_LOAD_CONFIG = '1'\n+ process.env.AWS_SHARED_CREDENTIALS_FILE = '/path/to/aws/credentials'\n+ mock = '''\n+ [default]\n+ aws_access_key_id = akid\n+ aws_secret_access_key = secret\n+ aws_session_token = session\n+ '''\n+ helpers.spyOn(AWS.util, 'readFileSync').andReturn(mock)\n+\n+ creds = new AWS.SharedIniFileCredentials()\n+ creds.get();\n+ validateCredentials(creds)\n+ expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]config/)\n+ expect(AWS.util.readFileSync.calls[1].arguments[0]).to.equal(process.env.AWS_SHARED_CREDENTIALS_FILE)\n+\n+ it 'loads credentials from ~/.aws/config if AWS_SDK_LOAD_CONFIG is set', ->",
"comment_created_at": "2017-04-13T00:13:45+00:00",
"comment_author": "chrisradek",
"comment_body": "Can we add a test to make sure the credentials from `~/.aws/credentials` is used preferentially over the credentials in `~/.aws/config` if the same profile exists in both files?",
"pr_file_module": null
},
{
"comment_id": "111301175",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"discussion_id": "111287813",
"commented_code": "@@ -209,6 +233,53 @@ if AWS.util.isNode()\n validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/)\n \n+ it 'loads credentials from path defined in AWS_SHARED_CREDENTIALS_FILE if AWS_SDK_LOAD_CONFIG is set', ->\n+ process.env.AWS_SDK_LOAD_CONFIG = '1'\n+ process.env.AWS_SHARED_CREDENTIALS_FILE = '/path/to/aws/credentials'\n+ mock = '''\n+ [default]\n+ aws_access_key_id = akid\n+ aws_secret_access_key = secret\n+ aws_session_token = session\n+ '''\n+ helpers.spyOn(AWS.util, 'readFileSync').andReturn(mock)\n+\n+ creds = new AWS.SharedIniFileCredentials()\n+ creds.get();\n+ validateCredentials(creds)\n+ expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]config/)\n+ expect(AWS.util.readFileSync.calls[1].arguments[0]).to.equal(process.env.AWS_SHARED_CREDENTIALS_FILE)\n+\n+ it 'loads credentials from ~/.aws/config if AWS_SDK_LOAD_CONFIG is set', ->",
"comment_created_at": "2017-04-13T02:38:43+00:00",
"comment_author": "jeskew",
"comment_body": "I'll add a test for that.",
"pr_file_module": null
}
]
},
{
"discussion_id": "111434683",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"created_at": "2017-04-13T16:46:30+00:00",
"commented_code": "expect(creds.expireTime).to.eql(new Date(0))\n done()\n\n it 'will assume a role from the credentials file whose source profile is defined in the config file', (done) ->\n process.env.AWS_SDK_LOAD_CONFIG = '1'\n helpers.spyOn(AWS.util, 'readFileSync').andCallFake (path) ->\n if (path.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/))\n '''\n [default]\n aws_access_key_id = akid\n aws_secret_access_key = secret\n role_arn = arn\n source_profile = foo\n '''\n else\n '''\n [profile foo]\n aws_access_key_id = akid2\n aws_secret_access_key = secret2\n '''\n helpers.mockHttpResponse 200, {}, '''",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "111434683",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"discussion_id": "111434683",
"commented_code": "@@ -370,6 +491,84 @@ if AWS.util.isNode()\n expect(creds.expireTime).to.eql(new Date(0))\n done()\n \n+ it 'will assume a role from the credentials file whose source profile is defined in the config file', (done) ->\n+ process.env.AWS_SDK_LOAD_CONFIG = '1'\n+ helpers.spyOn(AWS.util, 'readFileSync').andCallFake (path) ->\n+ if (path.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/))\n+ '''\n+ [default]\n+ aws_access_key_id = akid\n+ aws_secret_access_key = secret\n+ role_arn = arn\n+ source_profile = foo\n+ '''\n+ else\n+ '''\n+ [profile foo]\n+ aws_access_key_id = akid2\n+ aws_secret_access_key = secret2\n+ '''\n+ helpers.mockHttpResponse 200, {}, '''",
"comment_created_at": "2017-04-13T16:46:30+00:00",
"comment_author": "chrisradek",
"comment_body": "I'm not sure this test is actually ensuring that the creds from `profile foo` are used instead of `default`.\r\n\r\nWhat do you think about spying on `AWS.STS` or `AWS.Credentials` to get the accessKeyId that was used as the source?",
"pr_file_module": null
}
]
}
]

View File

@@ -41,97 +41,3 @@ it('loads credentials from preferred source when available in multiple locations
```
Don't just verify that configuration loads successfully—explicitly test that the correct values from the highest-priority source are being used when the same configuration option exists in multiple places.
[
{
"discussion_id": "107227865",
"pr_number": 1418,
"pr_file": "test/config.spec.coffee",
"created_at": "2017-03-21T17:51:13+00:00",
"commented_code": "expect(configure(maxRetries: 2).maxRetries).to.equal(2)\n\n describe 'retryDelayOptions', ->\n it 'defaults to \"base: 100\"', ->",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "107227865",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1418,
"pr_file": "test/config.spec.coffee",
"discussion_id": "107227865",
"commented_code": "@@ -60,8 +60,6 @@ describe 'AWS.Config', ->\n expect(configure(maxRetries: 2).maxRetries).to.equal(2)\n \n describe 'retryDelayOptions', ->\n- it 'defaults to \"base: 100\"', ->",
"comment_created_at": "2017-03-21T17:51:13+00:00",
"comment_author": "jeskew",
"comment_body": "Optional: you might want to add a test to service.spec.coffee verifying that a normal client still gets a retry base of 100 set.",
"pr_file_module": null
},
{
"comment_id": "107261089",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1418,
"pr_file": "test/config.spec.coffee",
"discussion_id": "107227865",
"commented_code": "@@ -60,8 +60,6 @@ describe 'AWS.Config', ->\n expect(configure(maxRetries: 2).maxRetries).to.equal(2)\n \n describe 'retryDelayOptions', ->\n- it 'defaults to \"base: 100\"', ->",
"comment_created_at": "2017-03-21T20:06:21+00:00",
"comment_author": "chrisradek",
"comment_body": "There is a separate test that implicitly tests this, but happy to add an explicit test.",
"pr_file_module": null
}
]
},
{
"discussion_id": "111287813",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"created_at": "2017-04-13T00:13:45+00:00",
"commented_code": "validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/)\n\n it 'loads credentials from path defined in AWS_SHARED_CREDENTIALS_FILE if AWS_SDK_LOAD_CONFIG is set', ->\n process.env.AWS_SDK_LOAD_CONFIG = '1'\n process.env.AWS_SHARED_CREDENTIALS_FILE = '/path/to/aws/credentials'\n mock = '''\n [default]\n aws_access_key_id = akid\n aws_secret_access_key = secret\n aws_session_token = session\n '''\n helpers.spyOn(AWS.util, 'readFileSync').andReturn(mock)\n\n creds = new AWS.SharedIniFileCredentials()\n creds.get();\n validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]config/)\n expect(AWS.util.readFileSync.calls[1].arguments[0]).to.equal(process.env.AWS_SHARED_CREDENTIALS_FILE)\n\n it 'loads credentials from ~/.aws/config if AWS_SDK_LOAD_CONFIG is set', ->",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "111287813",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"discussion_id": "111287813",
"commented_code": "@@ -209,6 +233,53 @@ if AWS.util.isNode()\n validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/)\n \n+ it 'loads credentials from path defined in AWS_SHARED_CREDENTIALS_FILE if AWS_SDK_LOAD_CONFIG is set', ->\n+ process.env.AWS_SDK_LOAD_CONFIG = '1'\n+ process.env.AWS_SHARED_CREDENTIALS_FILE = '/path/to/aws/credentials'\n+ mock = '''\n+ [default]\n+ aws_access_key_id = akid\n+ aws_secret_access_key = secret\n+ aws_session_token = session\n+ '''\n+ helpers.spyOn(AWS.util, 'readFileSync').andReturn(mock)\n+\n+ creds = new AWS.SharedIniFileCredentials()\n+ creds.get();\n+ validateCredentials(creds)\n+ expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]config/)\n+ expect(AWS.util.readFileSync.calls[1].arguments[0]).to.equal(process.env.AWS_SHARED_CREDENTIALS_FILE)\n+\n+ it 'loads credentials from ~/.aws/config if AWS_SDK_LOAD_CONFIG is set', ->",
"comment_created_at": "2017-04-13T00:13:45+00:00",
"comment_author": "chrisradek",
"comment_body": "Can we add a test to make sure the credentials from `~/.aws/credentials` is used preferentially over the credentials in `~/.aws/config` if the same profile exists in both files?",
"pr_file_module": null
},
{
"comment_id": "111301175",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"discussion_id": "111287813",
"commented_code": "@@ -209,6 +233,53 @@ if AWS.util.isNode()\n validateCredentials(creds)\n expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/)\n \n+ it 'loads credentials from path defined in AWS_SHARED_CREDENTIALS_FILE if AWS_SDK_LOAD_CONFIG is set', ->\n+ process.env.AWS_SDK_LOAD_CONFIG = '1'\n+ process.env.AWS_SHARED_CREDENTIALS_FILE = '/path/to/aws/credentials'\n+ mock = '''\n+ [default]\n+ aws_access_key_id = akid\n+ aws_secret_access_key = secret\n+ aws_session_token = session\n+ '''\n+ helpers.spyOn(AWS.util, 'readFileSync').andReturn(mock)\n+\n+ creds = new AWS.SharedIniFileCredentials()\n+ creds.get();\n+ validateCredentials(creds)\n+ expect(AWS.util.readFileSync.calls[0].arguments[0]).to.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]config/)\n+ expect(AWS.util.readFileSync.calls[1].arguments[0]).to.equal(process.env.AWS_SHARED_CREDENTIALS_FILE)\n+\n+ it 'loads credentials from ~/.aws/config if AWS_SDK_LOAD_CONFIG is set', ->",
"comment_created_at": "2017-04-13T02:38:43+00:00",
"comment_author": "jeskew",
"comment_body": "I'll add a test for that.",
"pr_file_module": null
}
]
},
{
"discussion_id": "111434683",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"created_at": "2017-04-13T16:46:30+00:00",
"commented_code": "expect(creds.expireTime).to.eql(new Date(0))\n done()\n\n it 'will assume a role from the credentials file whose source profile is defined in the config file', (done) ->\n process.env.AWS_SDK_LOAD_CONFIG = '1'\n helpers.spyOn(AWS.util, 'readFileSync').andCallFake (path) ->\n if (path.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/))\n '''\n [default]\n aws_access_key_id = akid\n aws_secret_access_key = secret\n role_arn = arn\n source_profile = foo\n '''\n else\n '''\n [profile foo]\n aws_access_key_id = akid2\n aws_secret_access_key = secret2\n '''\n helpers.mockHttpResponse 200, {}, '''",
"repo_full_name": "aws/aws-sdk-js",
"discussion_comments": [
{
"comment_id": "111434683",
"repo_full_name": "aws/aws-sdk-js",
"pr_number": 1391,
"pr_file": "test/credentials.spec.coffee",
"discussion_id": "111434683",
"commented_code": "@@ -370,6 +491,84 @@ if AWS.util.isNode()\n expect(creds.expireTime).to.eql(new Date(0))\n done()\n \n+ it 'will assume a role from the credentials file whose source profile is defined in the config file', (done) ->\n+ process.env.AWS_SDK_LOAD_CONFIG = '1'\n+ helpers.spyOn(AWS.util, 'readFileSync').andCallFake (path) ->\n+ if (path.match(/[\\/\\\\]home[\\/\\\\]user[\\/\\\\].aws[\\/\\\\]credentials/))\n+ '''\n+ [default]\n+ aws_access_key_id = akid\n+ aws_secret_access_key = secret\n+ role_arn = arn\n+ source_profile = foo\n+ '''\n+ else\n+ '''\n+ [profile foo]\n+ aws_access_key_id = akid2\n+ aws_secret_access_key = secret2\n+ '''\n+ helpers.mockHttpResponse 200, {}, '''",
"comment_created_at": "2017-04-13T16:46:30+00:00",
"comment_author": "chrisradek",
"comment_body": "I'm not sure this test is actually ensuring that the creds from `profile foo` are used instead of `default`.\r\n\r\nWhat do you think about spying on `AWS.STS` or `AWS.Credentials` to get the accessKeyId that was used as the source?",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,58 @@
[
{
"discussion_id": "2164733100",
"pr_number": 50834,
"pr_file": "sdk/netapp/Azure.ResourceManager.NetApp/src/Azure.ResourceManager.NetApp.csproj",
"created_at": "2025-06-24T19:20:37+00:00",
"commented_code": "<Project Sdk=\"Microsoft.NET.Sdk\">\n<Project Sdk=\"Microsoft.NET.Sdk\">",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2164733100",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50834,
"pr_file": "sdk/netapp/Azure.ResourceManager.NetApp/src/Azure.ResourceManager.NetApp.csproj",
"discussion_id": "2164733100",
"commented_code": "@@ -1,6 +1,6 @@\n-<Project Sdk=\"Microsoft.NET.Sdk\">\n+<Project Sdk=\"Microsoft.NET.Sdk\">",
"comment_created_at": "2025-06-24T19:20:37+00:00",
"comment_author": "Copilot",
"comment_body": "There is a hidden BOM character at the start of the file. Removing it will prevent encoding-related issues in CI and editors.\n```suggestion\n<Project Sdk=\"Microsoft.NET.Sdk\">\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2151210517",
"pr_number": 49826,
"pr_file": "sdk/managednetworkfabric/Azure.ResourceManager.ManagedNetworkFabric/src/Azure.ResourceManager.ManagedNetworkFabric.csproj",
"created_at": "2025-06-17T02:43:55+00:00",
"commented_code": "<Project Sdk=\"Microsoft.NET.Sdk\">\n<Project Sdk=\"Microsoft.NET.Sdk\">\n <PropertyGroup>\n <Version>1.2.0-beta.1</Version>\n <!--The ApiCompatVersion is managed automatically and should not generally be modified manually.-->\n <ApiCompatVersion>1.1.2</ApiCompatVersion>",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2151210517",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49826,
"pr_file": "sdk/managednetworkfabric/Azure.ResourceManager.ManagedNetworkFabric/src/Azure.ResourceManager.ManagedNetworkFabric.csproj",
"discussion_id": "2151210517",
"commented_code": "@@ -1,10 +1,8 @@\n-<Project Sdk=\"Microsoft.NET.Sdk\">\n+<Project Sdk=\"Microsoft.NET.Sdk\">\n <PropertyGroup>\n <Version>1.2.0-beta.1</Version>\n- <!--The ApiCompatVersion is managed automatically and should not generally be modified manually.-->\n- <ApiCompatVersion>1.1.2</ApiCompatVersion>",
"comment_created_at": "2025-06-17T02:43:55+00:00",
"comment_author": "ArcturusZhang",
"comment_body": "I believe you have met build failures about the apicompat and found out removing this would \"fix\" the error.\r\nBut this is only hiding the issue instead of resolving it.\r\nPlease revert this back and let the pipeline fails. We will find other ways to introduce real fixes to those errors.",
"pr_file_module": null
},
{
"comment_id": "2152337212",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49826,
"pr_file": "sdk/managednetworkfabric/Azure.ResourceManager.ManagedNetworkFabric/src/Azure.ResourceManager.ManagedNetworkFabric.csproj",
"discussion_id": "2151210517",
"commented_code": "@@ -1,10 +1,8 @@\n-<Project Sdk=\"Microsoft.NET.Sdk\">\n+<Project Sdk=\"Microsoft.NET.Sdk\">\n <PropertyGroup>\n <Version>1.2.0-beta.1</Version>\n- <!--The ApiCompatVersion is managed automatically and should not generally be modified manually.-->\n- <ApiCompatVersion>1.1.2</ApiCompatVersion>",
"comment_created_at": "2025-06-17T13:51:09+00:00",
"comment_author": "nafizhaider32",
"comment_body": "done",
"pr_file_module": null
}
]
}
]

View File

@@ -29,63 +29,3 @@ Examples:
```
When CI/CD pipelines fail, investigate the root cause rather than making superficial changes that only hide the problem. This ensures that real issues are properly addressed, maintaining the integrity and reliability of your build and deployment processes.
[
{
"discussion_id": "2164733100",
"pr_number": 50834,
"pr_file": "sdk/netapp/Azure.ResourceManager.NetApp/src/Azure.ResourceManager.NetApp.csproj",
"created_at": "2025-06-24T19:20:37+00:00",
"commented_code": "<Project Sdk=\"Microsoft.NET.Sdk\">\n\ufeff<Project Sdk=\"Microsoft.NET.Sdk\">",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2164733100",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50834,
"pr_file": "sdk/netapp/Azure.ResourceManager.NetApp/src/Azure.ResourceManager.NetApp.csproj",
"discussion_id": "2164733100",
"commented_code": "@@ -1,6 +1,6 @@\n-<Project Sdk=\"Microsoft.NET.Sdk\">\n+\ufeff<Project Sdk=\"Microsoft.NET.Sdk\">",
"comment_created_at": "2025-06-24T19:20:37+00:00",
"comment_author": "Copilot",
"comment_body": "There is a hidden BOM character at the start of the file. Removing it will prevent encoding-related issues in CI and editors.\n```suggestion\n<Project Sdk=\"Microsoft.NET.Sdk\">\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2151210517",
"pr_number": 49826,
"pr_file": "sdk/managednetworkfabric/Azure.ResourceManager.ManagedNetworkFabric/src/Azure.ResourceManager.ManagedNetworkFabric.csproj",
"created_at": "2025-06-17T02:43:55+00:00",
"commented_code": "<Project Sdk=\"Microsoft.NET.Sdk\">\n\ufeff<Project Sdk=\"Microsoft.NET.Sdk\">\n <PropertyGroup>\n <Version>1.2.0-beta.1</Version>\n <!--The ApiCompatVersion is managed automatically and should not generally be modified manually.-->\n <ApiCompatVersion>1.1.2</ApiCompatVersion>",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2151210517",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49826,
"pr_file": "sdk/managednetworkfabric/Azure.ResourceManager.ManagedNetworkFabric/src/Azure.ResourceManager.ManagedNetworkFabric.csproj",
"discussion_id": "2151210517",
"commented_code": "@@ -1,10 +1,8 @@\n-<Project Sdk=\"Microsoft.NET.Sdk\">\n+\ufeff<Project Sdk=\"Microsoft.NET.Sdk\">\n <PropertyGroup>\n <Version>1.2.0-beta.1</Version>\n- <!--The ApiCompatVersion is managed automatically and should not generally be modified manually.-->\n- <ApiCompatVersion>1.1.2</ApiCompatVersion>",
"comment_created_at": "2025-06-17T02:43:55+00:00",
"comment_author": "ArcturusZhang",
"comment_body": "I believe you have met build failures about the apicompat and found out removing this would \"fix\" the error.\r\nBut this is only hiding the issue instead of resolving it.\r\nPlease revert this back and let the pipeline fails. We will find other ways to introduce real fixes to those errors.",
"pr_file_module": null
},
{
"comment_id": "2152337212",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49826,
"pr_file": "sdk/managednetworkfabric/Azure.ResourceManager.ManagedNetworkFabric/src/Azure.ResourceManager.ManagedNetworkFabric.csproj",
"discussion_id": "2151210517",
"commented_code": "@@ -1,10 +1,8 @@\n-<Project Sdk=\"Microsoft.NET.Sdk\">\n+\ufeff<Project Sdk=\"Microsoft.NET.Sdk\">\n <PropertyGroup>\n <Version>1.2.0-beta.1</Version>\n- <!--The ApiCompatVersion is managed automatically and should not generally be modified manually.-->\n- <ApiCompatVersion>1.1.2</ApiCompatVersion>",
"comment_created_at": "2025-06-17T13:51:09+00:00",
"comment_author": "nafizhaider32",
"comment_body": "done",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,94 @@
[
{
"discussion_id": "2169802413",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"created_at": "2025-06-26T19:21:52+00:00",
"commented_code": "<PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2169802413",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-26T19:21:52+00:00",
"comment_author": "jsquire",
"comment_body": "@KrzysztofCwalina: Would you please confirm approval of this dependency and, if approved, whether that is specific to AI Agents or generally?",
"pr_file_module": null
},
{
"comment_id": "2170313212",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-26T23:53:17+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "yes. it's approved for this package. ",
"pr_file_module": null
},
{
"comment_id": "2170487188",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-27T01:41:08+00:00",
"comment_author": "jsquire",
"comment_body": "@dmytrostruk: Since the dependency approval is specific to this library, we'll need to create a conditional block for it so that it is not usable across the repository. Please follow the pattern [here](https://github.com/Azure/azure-sdk-for-net/blob/main/eng/Packages.Data.props#L209) and create a block right above the one for `AI.Projects`",
"pr_file_module": null
},
{
"comment_id": "2172731404",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-27T19:32:07+00:00",
"comment_author": "dmytrostruk",
"comment_body": "@jsquire Done.",
"pr_file_module": null
},
{
"comment_id": "2173045145",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-28T00:37:20+00:00",
"comment_author": "jsquire",
"comment_body": "Thanks. Looks good. Once we're able to work through the account configuration, I'll remove my block.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2093720588",
"pr_number": 50097,
"pr_file": "eng/Packages.Data.props",
"created_at": "2025-05-16T21:59:26+00:00",
"commented_code": "<PackageReference Update=\"Microsoft.CodeAnalysis.CSharp.Workspaces\" Version=\"4.8.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n <PackageReference Update=\"Microsoft.Data.SqlClient\" Version=\"5.2.2\" />\n <PackageReference Update=\"Microsoft.Extensions.AI\" Version=\"9.5.0\" /> <!-- Approved for Azure.AI.Inference.Tests -->",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2093720588",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50097,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2093720588",
"commented_code": "@@ -374,7 +379,9 @@\n <PackageReference Update=\"Microsoft.CodeAnalysis.CSharp.Workspaces\" Version=\"4.8.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n <PackageReference Update=\"Microsoft.Data.SqlClient\" Version=\"5.2.2\" />\n+ <PackageReference Update=\"Microsoft.Extensions.AI\" Version=\"9.5.0\" /> <!-- Approved for Azure.AI.Inference.Tests -->",
"comment_created_at": "2025-05-16T21:59:26+00:00",
"comment_author": "jsquire",
"comment_body": "```suggestion\r\n <PackageReference Update=\"Microsoft.Extensions.AI\" Version=\"9.5.0\" /> <!-- 9.x approved for test project use, as there is no 8.x version available. -->\r\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -23,99 +23,3 @@ All AI-related dependencies (Microsoft.Extensions.AI.*, etc.) require explicit a
<PackageReference Update="Microsoft.Extensions.AI.Abstractions" Version="9.6.0"/>
</ItemGroup>
```
[
{
"discussion_id": "2169802413",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"created_at": "2025-06-26T19:21:52+00:00",
"commented_code": "<PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2169802413",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-26T19:21:52+00:00",
"comment_author": "jsquire",
"comment_body": "@KrzysztofCwalina: Would you please confirm approval of this dependency and, if approved, whether that is specific to AI Agents or generally?",
"pr_file_module": null
},
{
"comment_id": "2170313212",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-26T23:53:17+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "yes. it's approved for this package. ",
"pr_file_module": null
},
{
"comment_id": "2170487188",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-27T01:41:08+00:00",
"comment_author": "jsquire",
"comment_body": "@dmytrostruk: Since the dependency approval is specific to this library, we'll need to create a conditional block for it so that it is not usable across the repository. Please follow the pattern [here](https://github.com/Azure/azure-sdk-for-net/blob/main/eng/Packages.Data.props#L209) and create a block right above the one for `AI.Projects`",
"pr_file_module": null
},
{
"comment_id": "2172731404",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-27T19:32:07+00:00",
"comment_author": "dmytrostruk",
"comment_body": "@jsquire Done.",
"pr_file_module": null
},
{
"comment_id": "2173045145",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50898,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2169802413",
"commented_code": "@@ -103,6 +103,7 @@\n <PackageReference Update=\"System.ValueTuple\" Version=\"4.5.0\" />\n <PackageReference Update=\"Microsoft.Bcl.AsyncInterfaces\" Version=\"8.0.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n+\t\t<PackageReference Update=\"Microsoft.Extensions.AI.Abstractions\" Version=\"9.6.0\"/>",
"comment_created_at": "2025-06-28T00:37:20+00:00",
"comment_author": "jsquire",
"comment_body": "Thanks. Looks good. Once we're able to work through the account configuration, I'll remove my block.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2093720588",
"pr_number": 50097,
"pr_file": "eng/Packages.Data.props",
"created_at": "2025-05-16T21:59:26+00:00",
"commented_code": "<PackageReference Update=\"Microsoft.CodeAnalysis.CSharp.Workspaces\" Version=\"4.8.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n <PackageReference Update=\"Microsoft.Data.SqlClient\" Version=\"5.2.2\" />\n <PackageReference Update=\"Microsoft.Extensions.AI\" Version=\"9.5.0\" /> <!-- Approved for Azure.AI.Inference.Tests -->",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2093720588",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50097,
"pr_file": "eng/Packages.Data.props",
"discussion_id": "2093720588",
"commented_code": "@@ -374,7 +379,9 @@\n <PackageReference Update=\"Microsoft.CodeAnalysis.CSharp.Workspaces\" Version=\"4.8.0\" />\n <PackageReference Update=\"Microsoft.CSharp\" Version=\"4.7.0\" />\n <PackageReference Update=\"Microsoft.Data.SqlClient\" Version=\"5.2.2\" />\n+ <PackageReference Update=\"Microsoft.Extensions.AI\" Version=\"9.5.0\" /> <!-- Approved for Azure.AI.Inference.Tests -->",
"comment_created_at": "2025-05-16T21:59:26+00:00",
"comment_author": "jsquire",
"comment_body": "```suggestion\r\n <PackageReference Update=\"Microsoft.Extensions.AI\" Version=\"9.5.0\" /> <!-- 9.x approved for test project use, as there is no 8.x version available. -->\r\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,128 @@
[
{
"discussion_id": "2145400353",
"pr_number": 50610,
"pr_file": "eng/common/pipelines/templates/steps/install-dotnet.yml",
"created_at": "2025-06-13T15:51:09+00:00",
"commented_code": "parameters:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2145400353",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50610,
"pr_file": "eng/common/pipelines/templates/steps/install-dotnet.yml",
"discussion_id": "2145400353",
"commented_code": "@@ -0,0 +1,28 @@\n+parameters:",
"comment_created_at": "2025-06-13T15:51:09+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot remove this and use the existing https://github.com/Azure/azure-sdk-for-net/blob/main/eng/pipelines/templates/steps/install-dotnet.yml",
"pr_file_module": null
},
{
"comment_id": "2145412524",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50610,
"pr_file": "eng/common/pipelines/templates/steps/install-dotnet.yml",
"discussion_id": "2145400353",
"commented_code": "@@ -0,0 +1,28 @@\n+parameters:",
"comment_created_at": "2025-06-13T15:54:53+00:00",
"comment_author": "Copilot",
"comment_body": "Done! Removed the duplicate file and updated both CI files to use the existing template at `/eng/pipelines/templates/steps/install-dotnet.yml`. Changes in commit 346bb80b4.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2136686191",
"pr_number": 50505,
"pr_file": "eng/pipelines/templates/variables/image.yml",
"created_at": "2025-06-09T23:58:08+00:00",
"commented_code": "variables:\n - name: LINUXPOOL\n value: azsdk-pool-mms-ubuntu-2004-general\n value: azsdk-pool\n - name: WINDOWSPOOL\n value: azsdk-pool-mms-win-2022-general\n - name: MACPOOL\n value: Azure Pipelines\n\n - name: LINUXVMIMAGE\n value: azsdk-pool-mms-ubuntu-2004-1espt\n value: ubuntu-24.04\n - name: LINUXNEXTVMIMAGE\n value: ubuntu-22.04\n - name: WINDOWSVMIMAGE",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2136686191",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/templates/variables/image.yml",
"discussion_id": "2136686191",
"commented_code": "@@ -2,14 +2,14 @@\n \n variables:\n - name: LINUXPOOL\n- value: azsdk-pool-mms-ubuntu-2004-general\n+ value: azsdk-pool\n - name: WINDOWSPOOL\n value: azsdk-pool-mms-win-2022-general\n - name: MACPOOL\n value: Azure Pipelines\n \n - name: LINUXVMIMAGE\n- value: azsdk-pool-mms-ubuntu-2004-1espt\n+ value: ubuntu-24.04\n - name: LINUXNEXTVMIMAGE\n value: ubuntu-22.04\n - name: WINDOWSVMIMAGE",
"comment_created_at": "2025-06-09T23:58:08+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot update `azsdk-pool-mms-win-2022-1espt` to `windows-2022` everywhere",
"pr_file_module": null
},
{
"comment_id": "2136698127",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/templates/variables/image.yml",
"discussion_id": "2136686191",
"commented_code": "@@ -2,14 +2,14 @@\n \n variables:\n - name: LINUXPOOL\n- value: azsdk-pool-mms-ubuntu-2004-general\n+ value: azsdk-pool\n - name: WINDOWSPOOL\n value: azsdk-pool-mms-win-2022-general\n - name: MACPOOL\n value: Azure Pipelines\n \n - name: LINUXVMIMAGE\n- value: azsdk-pool-mms-ubuntu-2004-1espt\n+ value: ubuntu-24.04\n - name: LINUXNEXTVMIMAGE\n value: ubuntu-22.04\n - name: WINDOWSVMIMAGE",
"comment_created_at": "2025-06-10T00:14:51+00:00",
"comment_author": "Copilot",
"comment_body": "Updated all references to `azsdk-pool-mms-win-2022-1espt` to use `windows-2022` everywhere. Updated the WINDOWSVMIMAGE variable in image.yml and the sourceAnalysisPool image in 1es-redirect.yml. (4380fd8)",
"pr_file_module": null
}
]
},
{
"discussion_id": "2136720980",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"created_at": "2025-06-10T00:48:28+00:00",
"commented_code": "- eng/CredScanSuppression.json\n\npool:\n name: azsdk-pool-mms-win-2022-general\n name: azsdk-pool\n vmImage: windows-2022",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2136720980",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T00:48:28+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot anywhere you see a `vmImage:` convert it to a demand using `demand: $(LinuxImageDemand)` and make sure the job or stage is including the variables template `/eng/pipelines/templates/variables/image.yml` so we can use the variables in there. Also update the pool.name to use the correct variable from image.yml",
"pr_file_module": null
},
{
"comment_id": "2136728696",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T00:58:35+00:00",
"comment_author": "Copilot",
"comment_body": "Converted all vmImage references to demand pattern as requested. Added WindowsImageDemand and MacImageDemand variables to image.yml template, updated pool.name references to use variables from image.yml, and ensured all affected pipeline files include the image variables template. (bcb4f3c)",
"pr_file_module": null
},
{
"comment_id": "2136786256",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T02:15:25+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot Undo the changes for the MacImageDemand as mac doesn't use demands. ",
"pr_file_module": null
},
{
"comment_id": "2136796277",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T02:26:59+00:00",
"comment_author": "Copilot",
"comment_body": "Undid the MacImageDemand changes as requested. Removed the MacImageDemand variable and reverted all macOS pipeline configurations back to using vmImage pattern instead of demands. Windows and Linux continue to use the demand pattern. (91b95ee)",
"pr_file_module": null
}
]
}
]

View File

@@ -42,133 +42,3 @@ jobs:
```
This approach makes it easier to update image configurations across all pipelines and supports platform-specific configuration patterns.
[
{
"discussion_id": "2145400353",
"pr_number": 50610,
"pr_file": "eng/common/pipelines/templates/steps/install-dotnet.yml",
"created_at": "2025-06-13T15:51:09+00:00",
"commented_code": "parameters:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2145400353",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50610,
"pr_file": "eng/common/pipelines/templates/steps/install-dotnet.yml",
"discussion_id": "2145400353",
"commented_code": "@@ -0,0 +1,28 @@\n+parameters:",
"comment_created_at": "2025-06-13T15:51:09+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot remove this and use the existing https://github.com/Azure/azure-sdk-for-net/blob/main/eng/pipelines/templates/steps/install-dotnet.yml",
"pr_file_module": null
},
{
"comment_id": "2145412524",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50610,
"pr_file": "eng/common/pipelines/templates/steps/install-dotnet.yml",
"discussion_id": "2145400353",
"commented_code": "@@ -0,0 +1,28 @@\n+parameters:",
"comment_created_at": "2025-06-13T15:54:53+00:00",
"comment_author": "Copilot",
"comment_body": "Done! Removed the duplicate file and updated both CI files to use the existing template at `/eng/pipelines/templates/steps/install-dotnet.yml`. Changes in commit 346bb80b4.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2136686191",
"pr_number": 50505,
"pr_file": "eng/pipelines/templates/variables/image.yml",
"created_at": "2025-06-09T23:58:08+00:00",
"commented_code": "variables:\n - name: LINUXPOOL\n value: azsdk-pool-mms-ubuntu-2004-general\n value: azsdk-pool\n - name: WINDOWSPOOL\n value: azsdk-pool-mms-win-2022-general\n - name: MACPOOL\n value: Azure Pipelines\n\n - name: LINUXVMIMAGE\n value: azsdk-pool-mms-ubuntu-2004-1espt\n value: ubuntu-24.04\n - name: LINUXNEXTVMIMAGE\n value: ubuntu-22.04\n - name: WINDOWSVMIMAGE",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2136686191",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/templates/variables/image.yml",
"discussion_id": "2136686191",
"commented_code": "@@ -2,14 +2,14 @@\n \n variables:\n - name: LINUXPOOL\n- value: azsdk-pool-mms-ubuntu-2004-general\n+ value: azsdk-pool\n - name: WINDOWSPOOL\n value: azsdk-pool-mms-win-2022-general\n - name: MACPOOL\n value: Azure Pipelines\n \n - name: LINUXVMIMAGE\n- value: azsdk-pool-mms-ubuntu-2004-1espt\n+ value: ubuntu-24.04\n - name: LINUXNEXTVMIMAGE\n value: ubuntu-22.04\n - name: WINDOWSVMIMAGE",
"comment_created_at": "2025-06-09T23:58:08+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot update `azsdk-pool-mms-win-2022-1espt` to `windows-2022` everywhere",
"pr_file_module": null
},
{
"comment_id": "2136698127",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/templates/variables/image.yml",
"discussion_id": "2136686191",
"commented_code": "@@ -2,14 +2,14 @@\n \n variables:\n - name: LINUXPOOL\n- value: azsdk-pool-mms-ubuntu-2004-general\n+ value: azsdk-pool\n - name: WINDOWSPOOL\n value: azsdk-pool-mms-win-2022-general\n - name: MACPOOL\n value: Azure Pipelines\n \n - name: LINUXVMIMAGE\n- value: azsdk-pool-mms-ubuntu-2004-1espt\n+ value: ubuntu-24.04\n - name: LINUXNEXTVMIMAGE\n value: ubuntu-22.04\n - name: WINDOWSVMIMAGE",
"comment_created_at": "2025-06-10T00:14:51+00:00",
"comment_author": "Copilot",
"comment_body": "Updated all references to `azsdk-pool-mms-win-2022-1espt` to use `windows-2022` everywhere. Updated the WINDOWSVMIMAGE variable in image.yml and the sourceAnalysisPool image in 1es-redirect.yml. (4380fd8)",
"pr_file_module": null
}
]
},
{
"discussion_id": "2136720980",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"created_at": "2025-06-10T00:48:28+00:00",
"commented_code": "- eng/CredScanSuppression.json\n\npool:\n name: azsdk-pool-mms-win-2022-general\n name: azsdk-pool\n vmImage: windows-2022",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2136720980",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T00:48:28+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot anywhere you see a `vmImage:` convert it to a demand using `demand: $(LinuxImageDemand)` and make sure the job or stage is including the variables template `/eng/pipelines/templates/variables/image.yml` so we can use the variables in there. Also update the pool.name to use the correct variable from image.yml",
"pr_file_module": null
},
{
"comment_id": "2136728696",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T00:58:35+00:00",
"comment_author": "Copilot",
"comment_body": "Converted all vmImage references to demand pattern as requested. Added WindowsImageDemand and MacImageDemand variables to image.yml template, updated pool.name references to use variables from image.yml, and ensured all affected pipeline files include the image variables template. (bcb4f3c)",
"pr_file_module": null
},
{
"comment_id": "2136786256",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T02:15:25+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot Undo the changes for the MacImageDemand as mac doesn't use demands. ",
"pr_file_module": null
},
{
"comment_id": "2136796277",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2136720980",
"commented_code": "@@ -10,7 +10,7 @@ pr:\n - eng/CredScanSuppression.json\n \n pool:\n- name: azsdk-pool-mms-win-2022-general\n+ name: azsdk-pool\n vmImage: windows-2022",
"comment_created_at": "2025-06-10T02:26:59+00:00",
"comment_author": "Copilot",
"comment_body": "Undid the MacImageDemand changes as requested. Removed the MacImageDemand variable and reverted all macOS pipeline configurations back to using vmImage pattern instead of demands. Windows and Linux continue to use the demand pattern. (91b95ee)",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,156 @@
[
{
"discussion_id": "2174433301",
"pr_number": 50954,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"created_at": "2025-06-30T07:41:56+00:00",
"commented_code": "}\n // TODO: handle parents\n // Handle resource name - the last contextual parameter\n else if (parameter.Name.Equals(ContextualParameters.Last(), StringComparison.InvariantCultureIgnoreCase))\n else if (parameter.Name.Equals(ContextualParameters.LastOrDefault(), StringComparison.InvariantCultureIgnoreCase))",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2174433301",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50954,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"discussion_id": "2174433301",
"commented_code": "@@ -357,7 +357,7 @@ public ValueExpression[] PopulateArguments(\n }\n // TODO: handle parents\n // Handle resource name - the last contextual parameter\n- else if (parameter.Name.Equals(ContextualParameters.Last(), StringComparison.InvariantCultureIgnoreCase))\n+ else if (parameter.Name.Equals(ContextualParameters.LastOrDefault(), StringComparison.InvariantCultureIgnoreCase))",
"comment_created_at": "2025-06-30T07:41:56+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider storing the result of `ContextualParameters.LastOrDefault()` in a local variable and checking for null before calling `Equals` to improve readability and make the null comparison explicit.\n```suggestion\n var lastContextualParameter = ContextualParameters.LastOrDefault();\n if (lastContextualParameter != null && parameter.Name.Equals(lastContextualParameter, StringComparison.InvariantCultureIgnoreCase))\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2167764022",
"pr_number": 50890,
"pr_file": "sdk/tables/Azure.Data.Tables/src/TableErrorCode.cs",
"created_at": "2025-06-25T22:41:33+00:00",
"commented_code": "public static bool operator ==(TableErrorCode left, TableErrorCode right) => left.Equals(right);\n /// <summary> Determines if two <see cref=\"TableErrorCode\"/> values are not the same. </summary>\n public static bool operator !=(TableErrorCode left, TableErrorCode right) => !left.Equals(right);\n\n /// <summary> Determines if a <see cref=\"TableErrorCode\"/> and a string are equal. </summary>\n public static bool operator ==(TableErrorCode code, string value) => value != null && code.Equals(value);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2167764022",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50890,
"pr_file": "sdk/tables/Azure.Data.Tables/src/TableErrorCode.cs",
"discussion_id": "2167764022",
"commented_code": "@@ -215,6 +215,19 @@ public TableErrorCode(string value)\n public static bool operator ==(TableErrorCode left, TableErrorCode right) => left.Equals(right);\n /// <summary> Determines if two <see cref=\"TableErrorCode\"/> values are not the same. </summary>\n public static bool operator !=(TableErrorCode left, TableErrorCode right) => !left.Equals(right);\n+\n+ /// <summary> Determines if a <see cref=\"TableErrorCode\"/> and a string are equal. </summary>\n+ public static bool operator ==(TableErrorCode code, string value) => value != null && code.Equals(value);",
"comment_created_at": "2025-06-25T22:41:33+00:00",
"comment_author": "Copilot",
"comment_body": "If TableErrorCode can ever be null, consider adding a null check on the 'code' parameter before calling code.Equals(value) to avoid a potential NullReferenceException.\n```suggestion\n public static bool operator ==(TableErrorCode code, string value) => code != null && value != null && code.Equals(value);\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156832113",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/JsonAsserts.cs",
"created_at": "2025-06-19T11:54:22+00:00",
"commented_code": "Assert.AreEqual(expected, text);\n }\n\n public static void AssertConverterSerialization(string expected, object model, JsonSerializerOptions options = default)\n public static void AssertConverterSerialization<T>(string expected, T model, ModelReaderWriterOptions? options = null)\n {\n using var memoryStream = new MemoryStream();\n\n using (var writer = new Utf8JsonWriter(memoryStream))\n {\n JsonSerializer.Serialize(writer, model, options);\n var jsonModel = model as IJsonModel<T>;\n jsonModel?.Write(writer, options ?? new ModelReaderWriterOptions(\"W\"));",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156832113",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/JsonAsserts.cs",
"discussion_id": "2156832113",
"commented_code": "@@ -22,13 +25,14 @@ public static void AssertSerialization(string expected, IUtf8JsonSerializable se\n Assert.AreEqual(expected, text);\n }\n \n- public static void AssertConverterSerialization(string expected, object model, JsonSerializerOptions options = default)\n+ public static void AssertConverterSerialization<T>(string expected, T model, ModelReaderWriterOptions? options = null)\n {\n using var memoryStream = new MemoryStream();\n \n using (var writer = new Utf8JsonWriter(memoryStream))\n {\n- JsonSerializer.Serialize(writer, model, options);\n+ var jsonModel = model as IJsonModel<T>;\n+ jsonModel?.Write(writer, options ?? new ModelReaderWriterOptions(\"W\"));",
"comment_created_at": "2025-06-19T11:54:22+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] If the cast to IJsonModel<T> fails (resulting in jsonModel being null), the subsequent call to Write may silently be skipped; consider adding an assertion or fallback to handle unexpected types.\n```suggestion\n if (jsonModel == null)\n {\n throw new InvalidOperationException($\"The provided model is not of type {typeof(IJsonModel<T>)}.\");\n }\n jsonModel.Write(writer, options ?? new ModelReaderWriterOptions(\"W\"));\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141812407",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"created_at": "2025-06-12T06:25:42+00:00",
"commented_code": "if (convenienceMethod != null)\n {\n var resource = convenienceMethod.Signature.Parameters\n .Single(p => p.Type.Equals(ResourceData.Type));\n .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n arguments.Add(resource);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141812407",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"discussion_id": "2141812407",
"commented_code": "@@ -335,7 +330,7 @@ public ValueExpression[] PopulateArguments(\n if (convenienceMethod != null)\n {\n var resource = convenienceMethod.Signature.Parameters\n- .Single(p => p.Type.Equals(ResourceData.Type));\n+ .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n arguments.Add(resource);",
"comment_created_at": "2025-06-12T06:25:42+00:00",
"comment_author": "Copilot",
"comment_body": "Using `Single(...)` will throw if multiple parameters match or none match. Consider using `FirstOrDefault(...)` and handling a null result explicitly.\n```suggestion\n .FirstOrDefault(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n if (resource != null)\n {\n arguments.Add(resource);\n }\n else\n {\n // Handle the case where no matching parameter is found\n throw new InvalidOperationException(\"No matching parameter found for ResourceData.Type or RequestContent.\");\n }\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141812419",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/OperationMethodProviders/ResourceOperationMethodProvider.cs",
"created_at": "2025-06-12T06:25:42+00:00",
"commented_code": "private TryExpression BuildTryExpression()\n {\n var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(CancellationToken)));\n\n var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.FirstOrDefault(p => p.Type.Equals(typeof(CancellationToken)));\n var requestMethod = _resourceClientProvider.GetClientProvider().GetRequestMethodByOperation(_serviceMethod.Operation);\n\n var tryStatements = new List<MethodBodyStatement>\n var tryStatements = new List<MethodBodyStatement>();\n VariableExpression contextVariable;\n\n // If the cancellation token parameter is not provided, we create a default one otherwise we use the provided one.\n // This is to ensure that the RequestContext is always created with a cancellation token.\n if (cancellationTokenParameter != null)\n {\n ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out var contextVariable)\n };\n tryStatements.Add(ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out contextVariable));\n }\n else\n {\n contextVariable = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(RequestContext))).AsExpression();",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141812419",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/OperationMethodProviders/ResourceOperationMethodProvider.cs",
"discussion_id": "2141812419",
"commented_code": "@@ -87,14 +87,22 @@ protected virtual MethodSignature CreateSignature()\n \n private TryExpression BuildTryExpression()\n {\n- var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(CancellationToken)));\n-\n+ var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.FirstOrDefault(p => p.Type.Equals(typeof(CancellationToken)));\n var requestMethod = _resourceClientProvider.GetClientProvider().GetRequestMethodByOperation(_serviceMethod.Operation);\n \n- var tryStatements = new List<MethodBodyStatement>\n+ var tryStatements = new List<MethodBodyStatement>();\n+ VariableExpression contextVariable;\n+\n+ // If the cancellation token parameter is not provided, we create a default one otherwise we use the provided one.\n+ // This is to ensure that the RequestContext is always created with a cancellation token.\n+ if (cancellationTokenParameter != null)\n {\n- ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out var contextVariable)\n- };\n+ tryStatements.Add(ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out contextVariable));\n+ }\n+ else\n+ {\n+ contextVariable = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(RequestContext))).AsExpression();",
"comment_created_at": "2025-06-12T06:25:42+00:00",
"comment_author": "Copilot",
"comment_body": "This `Single(...)` will throw if the `RequestContext` parameter is missing. Consider using `FirstOrDefault(...)` with a fallback or validating existence before calling `Single`.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124906481",
"pr_number": 50391,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/src/Custom/UploadFileRequest.Serialization.cs",
"created_at": "2025-06-03T20:52:54+00:00",
"commented_code": "internal virtual MultipartFormDataRequestContent ToMultipartRequestContent()\n {\n MultipartFormDataRequestContent content = new();\n content.Add(Data, \"file\", Filename);\n ContentDispositionHeaderValue header = new(\"form-data\") { Name = \"file\"};\n var _dataStream = new StreamContent(Data);\n if (System.Linq.Enumerable.Any(Filename, c => c > 127))\n {\n header.FileNameStar = Filename;\n }\n else\n {\n header.FileName = Filename;\n }\n _dataStream.Headers.ContentDisposition = header;\n (content.HttpContent as System.Net.Http.MultipartFormDataContent).Add(_dataStream, \"file\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124906481",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50391,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/src/Custom/UploadFileRequest.Serialization.cs",
"discussion_id": "2124906481",
"commented_code": "@@ -19,7 +21,18 @@ internal partial class UploadFileRequest : IUtf8JsonSerializable\n internal virtual MultipartFormDataRequestContent ToMultipartRequestContent()\n {\n MultipartFormDataRequestContent content = new();\n- content.Add(Data, \"file\", Filename);\n+ ContentDispositionHeaderValue header = new(\"form-data\") { Name = \"file\"};\n+ var _dataStream = new StreamContent(Data);\n+ if (System.Linq.Enumerable.Any(Filename, c => c > 127))\n+ {\n+ header.FileNameStar = Filename;\n+ }\n+ else\n+ {\n+ header.FileName = Filename;\n+ }\n+ _dataStream.Headers.ContentDisposition = header;\n+ (content.HttpContent as System.Net.Http.MultipartFormDataContent).Add(_dataStream, \"file\");",
"comment_created_at": "2025-06-03T20:52:54+00:00",
"comment_author": "Copilot",
"comment_body": "Using an 'as' cast here can yield null and lead to a NullReferenceException. Consider using a direct cast or checking for null before calling Add(), or refactor to expose the underlying MultipartFormDataContent safely.\n```suggestion\n ((System.Net.Http.MultipartFormDataContent)content.HttpContent).Add(_dataStream, \"file\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2108313204",
"pr_number": 50274,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Extensions/ClientProviderExtensions.cs",
"created_at": "2025-05-27T06:40:48+00:00",
"commented_code": "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nusing Microsoft.TypeSpec.Generator.ClientModel.Providers;\nusing Microsoft.TypeSpec.Generator.Input;\nusing Microsoft.TypeSpec.Generator.Providers;\n\nnamespace Azure.Generator.Management.Extensions\n{\n internal static class ClientProviderExtensions\n {\n public static MethodProvider GetConvenienceMethodByOperation(this ClientProvider clientProvider, InputOperation operation, bool isAsync)\n {\n var methods = clientProvider.GetMethodCollectionByOperation(operation);\n return isAsync ? methods[^1] : methods[^2];",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2108313204",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50274,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Extensions/ClientProviderExtensions.cs",
"discussion_id": "2108313204",
"commented_code": "@@ -0,0 +1,18 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+using Microsoft.TypeSpec.Generator.ClientModel.Providers;\n+using Microsoft.TypeSpec.Generator.Input;\n+using Microsoft.TypeSpec.Generator.Providers;\n+\n+namespace Azure.Generator.Management.Extensions\n+{\n+ internal static class ClientProviderExtensions\n+ {\n+ public static MethodProvider GetConvenienceMethodByOperation(this ClientProvider clientProvider, InputOperation operation, bool isAsync)\n+ {\n+ var methods = clientProvider.GetMethodCollectionByOperation(operation);\n+ return isAsync ? methods[^1] : methods[^2];",
"comment_created_at": "2025-05-27T06:40:48+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding a guard check to ensure the 'methods' collection has at least two elements before indexing; this will prevent potential runtime errors if the collection contains fewer items than expected.",
"pr_file_module": null
}
]
}
]

View File

@@ -59,161 +59,3 @@ else
{
// Handle the case where no matching parameter exists
}
[
{
"discussion_id": "2174433301",
"pr_number": 50954,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"created_at": "2025-06-30T07:41:56+00:00",
"commented_code": "}\n // TODO: handle parents\n // Handle resource name - the last contextual parameter\n else if (parameter.Name.Equals(ContextualParameters.Last(), StringComparison.InvariantCultureIgnoreCase))\n else if (parameter.Name.Equals(ContextualParameters.LastOrDefault(), StringComparison.InvariantCultureIgnoreCase))",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2174433301",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50954,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"discussion_id": "2174433301",
"commented_code": "@@ -357,7 +357,7 @@ public ValueExpression[] PopulateArguments(\n }\n // TODO: handle parents\n // Handle resource name - the last contextual parameter\n- else if (parameter.Name.Equals(ContextualParameters.Last(), StringComparison.InvariantCultureIgnoreCase))\n+ else if (parameter.Name.Equals(ContextualParameters.LastOrDefault(), StringComparison.InvariantCultureIgnoreCase))",
"comment_created_at": "2025-06-30T07:41:56+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider storing the result of `ContextualParameters.LastOrDefault()` in a local variable and checking for null before calling `Equals` to improve readability and make the null comparison explicit.\n```suggestion\n var lastContextualParameter = ContextualParameters.LastOrDefault();\n if (lastContextualParameter != null && parameter.Name.Equals(lastContextualParameter, StringComparison.InvariantCultureIgnoreCase))\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2167764022",
"pr_number": 50890,
"pr_file": "sdk/tables/Azure.Data.Tables/src/TableErrorCode.cs",
"created_at": "2025-06-25T22:41:33+00:00",
"commented_code": "public static bool operator ==(TableErrorCode left, TableErrorCode right) => left.Equals(right);\n /// <summary> Determines if two <see cref=\"TableErrorCode\"/> values are not the same. </summary>\n public static bool operator !=(TableErrorCode left, TableErrorCode right) => !left.Equals(right);\n\n /// <summary> Determines if a <see cref=\"TableErrorCode\"/> and a string are equal. </summary>\n public static bool operator ==(TableErrorCode code, string value) => value != null && code.Equals(value);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2167764022",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50890,
"pr_file": "sdk/tables/Azure.Data.Tables/src/TableErrorCode.cs",
"discussion_id": "2167764022",
"commented_code": "@@ -215,6 +215,19 @@ public TableErrorCode(string value)\n public static bool operator ==(TableErrorCode left, TableErrorCode right) => left.Equals(right);\n /// <summary> Determines if two <see cref=\"TableErrorCode\"/> values are not the same. </summary>\n public static bool operator !=(TableErrorCode left, TableErrorCode right) => !left.Equals(right);\n+\n+ /// <summary> Determines if a <see cref=\"TableErrorCode\"/> and a string are equal. </summary>\n+ public static bool operator ==(TableErrorCode code, string value) => value != null && code.Equals(value);",
"comment_created_at": "2025-06-25T22:41:33+00:00",
"comment_author": "Copilot",
"comment_body": "If TableErrorCode can ever be null, consider adding a null check on the 'code' parameter before calling code.Equals(value) to avoid a potential NullReferenceException.\n```suggestion\n public static bool operator ==(TableErrorCode code, string value) => code != null && value != null && code.Equals(value);\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156832113",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/JsonAsserts.cs",
"created_at": "2025-06-19T11:54:22+00:00",
"commented_code": "Assert.AreEqual(expected, text);\n }\n\n public static void AssertConverterSerialization(string expected, object model, JsonSerializerOptions options = default)\n public static void AssertConverterSerialization<T>(string expected, T model, ModelReaderWriterOptions? options = null)\n {\n using var memoryStream = new MemoryStream();\n\n using (var writer = new Utf8JsonWriter(memoryStream))\n {\n JsonSerializer.Serialize(writer, model, options);\n var jsonModel = model as IJsonModel<T>;\n jsonModel?.Write(writer, options ?? new ModelReaderWriterOptions(\"W\"));",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156832113",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/JsonAsserts.cs",
"discussion_id": "2156832113",
"commented_code": "@@ -22,13 +25,14 @@ public static void AssertSerialization(string expected, IUtf8JsonSerializable se\n Assert.AreEqual(expected, text);\n }\n \n- public static void AssertConverterSerialization(string expected, object model, JsonSerializerOptions options = default)\n+ public static void AssertConverterSerialization<T>(string expected, T model, ModelReaderWriterOptions? options = null)\n {\n using var memoryStream = new MemoryStream();\n \n using (var writer = new Utf8JsonWriter(memoryStream))\n {\n- JsonSerializer.Serialize(writer, model, options);\n+ var jsonModel = model as IJsonModel<T>;\n+ jsonModel?.Write(writer, options ?? new ModelReaderWriterOptions(\"W\"));",
"comment_created_at": "2025-06-19T11:54:22+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] If the cast to IJsonModel<T> fails (resulting in jsonModel being null), the subsequent call to Write may silently be skipped; consider adding an assertion or fallback to handle unexpected types.\n```suggestion\n if (jsonModel == null)\n {\n throw new InvalidOperationException($\"The provided model is not of type {typeof(IJsonModel<T>)}.\");\n }\n jsonModel.Write(writer, options ?? new ModelReaderWriterOptions(\"W\"));\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141812407",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"created_at": "2025-06-12T06:25:42+00:00",
"commented_code": "if (convenienceMethod != null)\n {\n var resource = convenienceMethod.Signature.Parameters\n .Single(p => p.Type.Equals(ResourceData.Type));\n .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n arguments.Add(resource);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141812407",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"discussion_id": "2141812407",
"commented_code": "@@ -335,7 +330,7 @@ public ValueExpression[] PopulateArguments(\n if (convenienceMethod != null)\n {\n var resource = convenienceMethod.Signature.Parameters\n- .Single(p => p.Type.Equals(ResourceData.Type));\n+ .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n arguments.Add(resource);",
"comment_created_at": "2025-06-12T06:25:42+00:00",
"comment_author": "Copilot",
"comment_body": "Using `Single(...)` will throw if multiple parameters match or none match. Consider using `FirstOrDefault(...)` and handling a null result explicitly.\n```suggestion\n .FirstOrDefault(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n if (resource != null)\n {\n arguments.Add(resource);\n }\n else\n {\n // Handle the case where no matching parameter is found\n throw new InvalidOperationException(\"No matching parameter found for ResourceData.Type or RequestContent.\");\n }\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141812419",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/OperationMethodProviders/ResourceOperationMethodProvider.cs",
"created_at": "2025-06-12T06:25:42+00:00",
"commented_code": "private TryExpression BuildTryExpression()\n {\n var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(CancellationToken)));\n\n var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.FirstOrDefault(p => p.Type.Equals(typeof(CancellationToken)));\n var requestMethod = _resourceClientProvider.GetClientProvider().GetRequestMethodByOperation(_serviceMethod.Operation);\n\n var tryStatements = new List<MethodBodyStatement>\n var tryStatements = new List<MethodBodyStatement>();\n VariableExpression contextVariable;\n\n // If the cancellation token parameter is not provided, we create a default one otherwise we use the provided one.\n // This is to ensure that the RequestContext is always created with a cancellation token.\n if (cancellationTokenParameter != null)\n {\n ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out var contextVariable)\n };\n tryStatements.Add(ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out contextVariable));\n }\n else\n {\n contextVariable = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(RequestContext))).AsExpression();",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141812419",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50568,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/OperationMethodProviders/ResourceOperationMethodProvider.cs",
"discussion_id": "2141812419",
"commented_code": "@@ -87,14 +87,22 @@ protected virtual MethodSignature CreateSignature()\n \n private TryExpression BuildTryExpression()\n {\n- var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(CancellationToken)));\n-\n+ var cancellationTokenParameter = _convenienceMethod.Signature.Parameters.FirstOrDefault(p => p.Type.Equals(typeof(CancellationToken)));\n var requestMethod = _resourceClientProvider.GetClientProvider().GetRequestMethodByOperation(_serviceMethod.Operation);\n \n- var tryStatements = new List<MethodBodyStatement>\n+ var tryStatements = new List<MethodBodyStatement>();\n+ VariableExpression contextVariable;\n+\n+ // If the cancellation token parameter is not provided, we create a default one otherwise we use the provided one.\n+ // This is to ensure that the RequestContext is always created with a cancellation token.\n+ if (cancellationTokenParameter != null)\n {\n- ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out var contextVariable)\n- };\n+ tryStatements.Add(ResourceMethodSnippets.CreateRequestContext(cancellationTokenParameter, out contextVariable));\n+ }\n+ else\n+ {\n+ contextVariable = _convenienceMethod.Signature.Parameters.Single(p => p.Type.Equals(typeof(RequestContext))).AsExpression();",
"comment_created_at": "2025-06-12T06:25:42+00:00",
"comment_author": "Copilot",
"comment_body": "This `Single(...)` will throw if the `RequestContext` parameter is missing. Consider using `FirstOrDefault(...)` with a fallback or validating existence before calling `Single`.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124906481",
"pr_number": 50391,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/src/Custom/UploadFileRequest.Serialization.cs",
"created_at": "2025-06-03T20:52:54+00:00",
"commented_code": "internal virtual MultipartFormDataRequestContent ToMultipartRequestContent()\n {\n MultipartFormDataRequestContent content = new();\n content.Add(Data, \"file\", Filename);\n ContentDispositionHeaderValue header = new(\"form-data\") { Name = \"file\"};\n var _dataStream = new StreamContent(Data);\n if (System.Linq.Enumerable.Any(Filename, c => c > 127))\n {\n header.FileNameStar = Filename;\n }\n else\n {\n header.FileName = Filename;\n }\n _dataStream.Headers.ContentDisposition = header;\n (content.HttpContent as System.Net.Http.MultipartFormDataContent).Add(_dataStream, \"file\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124906481",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50391,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/src/Custom/UploadFileRequest.Serialization.cs",
"discussion_id": "2124906481",
"commented_code": "@@ -19,7 +21,18 @@ internal partial class UploadFileRequest : IUtf8JsonSerializable\n internal virtual MultipartFormDataRequestContent ToMultipartRequestContent()\n {\n MultipartFormDataRequestContent content = new();\n- content.Add(Data, \"file\", Filename);\n+ ContentDispositionHeaderValue header = new(\"form-data\") { Name = \"file\"};\n+ var _dataStream = new StreamContent(Data);\n+ if (System.Linq.Enumerable.Any(Filename, c => c > 127))\n+ {\n+ header.FileNameStar = Filename;\n+ }\n+ else\n+ {\n+ header.FileName = Filename;\n+ }\n+ _dataStream.Headers.ContentDisposition = header;\n+ (content.HttpContent as System.Net.Http.MultipartFormDataContent).Add(_dataStream, \"file\");",
"comment_created_at": "2025-06-03T20:52:54+00:00",
"comment_author": "Copilot",
"comment_body": "Using an 'as' cast here can yield null and lead to a NullReferenceException. Consider using a direct cast or checking for null before calling Add(), or refactor to expose the underlying MultipartFormDataContent safely.\n```suggestion\n ((System.Net.Http.MultipartFormDataContent)content.HttpContent).Add(_dataStream, \"file\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2108313204",
"pr_number": 50274,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Extensions/ClientProviderExtensions.cs",
"created_at": "2025-05-27T06:40:48+00:00",
"commented_code": "\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nusing Microsoft.TypeSpec.Generator.ClientModel.Providers;\nusing Microsoft.TypeSpec.Generator.Input;\nusing Microsoft.TypeSpec.Generator.Providers;\n\nnamespace Azure.Generator.Management.Extensions\n{\n internal static class ClientProviderExtensions\n {\n public static MethodProvider GetConvenienceMethodByOperation(this ClientProvider clientProvider, InputOperation operation, bool isAsync)\n {\n var methods = clientProvider.GetMethodCollectionByOperation(operation);\n return isAsync ? methods[^1] : methods[^2];",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2108313204",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50274,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Extensions/ClientProviderExtensions.cs",
"discussion_id": "2108313204",
"commented_code": "@@ -0,0 +1,18 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+using Microsoft.TypeSpec.Generator.ClientModel.Providers;\n+using Microsoft.TypeSpec.Generator.Input;\n+using Microsoft.TypeSpec.Generator.Providers;\n+\n+namespace Azure.Generator.Management.Extensions\n+{\n+ internal static class ClientProviderExtensions\n+ {\n+ public static MethodProvider GetConvenienceMethodByOperation(this ClientProvider clientProvider, InputOperation operation, bool isAsync)\n+ {\n+ var methods = clientProvider.GetMethodCollectionByOperation(operation);\n+ return isAsync ? methods[^1] : methods[^2];",
"comment_created_at": "2025-05-27T06:40:48+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding a guard check to ensure the 'methods' collection has at least two elements before indexing; this will prevent potential runtime errors if the collection contains fewer items than expected.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,24 @@
[
{
"discussion_id": "2152576930",
"pr_number": 50644,
"pr_file": "sdk/core/System.ClientModel/tests/Auth/AuthenticationTokenProviderTests.cs",
"created_at": "2025-06-17T15:31:33+00:00",
"commented_code": "options.Transport = new MockPipelineTransport(\"foo\",\n m =>\n {\n // Assert that the request has no authentication headers\n Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n m.TryGetProperty(typeof(GetTokenOptions), out var flowsObj);\n if (\n flowsObj == null ||\n (flowsObj is Dictionary<string, object>[] flowsArr && flowsArr.Length == 0)\n )\n {\n // Only assert no Authorization header if operation does not override the service level flows.\n Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n }",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2152576930",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50644,
"pr_file": "sdk/core/System.ClientModel/tests/Auth/AuthenticationTokenProviderTests.cs",
"discussion_id": "2152576930",
"commented_code": "@@ -123,17 +194,27 @@ public NoAuthClient(Uri uri, AuthenticationTokenProvider credential)\n options.Transport = new MockPipelineTransport(\"foo\",\n m =>\n {\n- // Assert that the request has no authentication headers\n- Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n+ m.TryGetProperty(typeof(GetTokenOptions), out var flowsObj);\n+ if (\n+ flowsObj == null ||\n+ (flowsObj is Dictionary<string, object>[] flowsArr && flowsArr.Length == 0)\n+ )\n+ {\n+ // Only assert no Authorization header if operation does not override the service level flows.\n+ Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n+ }",
"comment_created_at": "2025-06-17T15:31:33+00:00",
"comment_author": "christothes",
"comment_body": "we need an else block here to assert that the Authorization header is present and populated.",
"pr_file_module": null
}
]
}
]

View File

@@ -33,29 +33,3 @@ else
```
This pattern ensures your authentication mechanisms work correctly in all scenarios, which is critical for maintaining security boundaries.
[
{
"discussion_id": "2152576930",
"pr_number": 50644,
"pr_file": "sdk/core/System.ClientModel/tests/Auth/AuthenticationTokenProviderTests.cs",
"created_at": "2025-06-17T15:31:33+00:00",
"commented_code": "options.Transport = new MockPipelineTransport(\"foo\",\n m =>\n {\n // Assert that the request has no authentication headers\n Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n m.TryGetProperty(typeof(GetTokenOptions), out var flowsObj);\n if (\n flowsObj == null ||\n (flowsObj is Dictionary<string, object>[] flowsArr && flowsArr.Length == 0)\n )\n {\n // Only assert no Authorization header if operation does not override the service level flows.\n Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n }",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2152576930",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50644,
"pr_file": "sdk/core/System.ClientModel/tests/Auth/AuthenticationTokenProviderTests.cs",
"discussion_id": "2152576930",
"commented_code": "@@ -123,17 +194,27 @@ public NoAuthClient(Uri uri, AuthenticationTokenProvider credential)\n options.Transport = new MockPipelineTransport(\"foo\",\n m =>\n {\n- // Assert that the request has no authentication headers\n- Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n+ m.TryGetProperty(typeof(GetTokenOptions), out var flowsObj);\n+ if (\n+ flowsObj == null ||\n+ (flowsObj is Dictionary<string, object>[] flowsArr && flowsArr.Length == 0)\n+ )\n+ {\n+ // Only assert no Authorization header if operation does not override the service level flows.\n+ Assert.IsFalse(m.Request.Headers.TryGetValue(\"Authorization\", out _), \"Request should not have an Authorization header.\");\n+ }",
"comment_created_at": "2025-06-17T15:31:33+00:00",
"comment_author": "christothes",
"comment_body": "we need an else block here to assert that the Authorization header is present and populated.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,94 @@
[
{
"discussion_id": "2159314989",
"pr_number": 50751,
"pr_file": "sdk/resources/Azure.ResourceManager.Resources.Bicep/assets.json",
"created_at": "2025-06-20T16:08:34+00:00",
"commented_code": "{\n \"AssetsRepo\": \"Azure/azure-sdk-assets\",\n \"AssetsRepoPrefixPath\": \"net\",\n \"TagPrefix\": \"net/bicep/Azure.ResourceManager.Resources.Bicep\",\n \"Tag\": \"\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2159314989",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50751,
"pr_file": "sdk/resources/Azure.ResourceManager.Resources.Bicep/assets.json",
"discussion_id": "2159314989",
"commented_code": "@@ -0,0 +1,6 @@\n+{\n+ \"AssetsRepo\": \"Azure/azure-sdk-assets\",\n+ \"AssetsRepoPrefixPath\": \"net\",\n+ \"TagPrefix\": \"net/bicep/Azure.ResourceManager.Resources.Bicep\",\n+ \"Tag\": \"\"",
"comment_created_at": "2025-06-20T16:08:34+00:00",
"comment_author": "Copilot",
"comment_body": "Populate the `Tag` field with the appropriate version or tag (e.g., `1.0.0-beta.1`) so that asset publishing pipelines can correctly identify and publish the Bicep package.\n```suggestion\n \"Tag\": \"1.0.0\"\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2136717637",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"created_at": "2025-06-10T00:43:19+00:00",
"commented_code": "{\n \"Agent\": {\n \"msi_image\": {\n \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2136717637",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-10T00:43:19+00:00",
"comment_author": "weshaggard",
"comment_body": "@christothes do you know if this custom pool is still needed? What is special about it?",
"pr_file_module": null
},
{
"comment_id": "2138048426",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-10T14:24:22+00:00",
"comment_author": "christothes",
"comment_body": "It's used for Managed Identity live tests. @benbp set it up for me a while back so that it had a user-assigned managed identity associated with it. ",
"pr_file_module": null
},
{
"comment_id": "2138051652",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-10T14:25:48+00:00",
"comment_author": "weshaggard",
"comment_body": "@christothes do you know if it is needed any longer given we use the container image? Or is this a separate set of tests? We would like to eliminate one-off agent pools if we can.",
"pr_file_module": null
},
{
"comment_id": "2139138199",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-11T03:51:54+00:00",
"comment_author": "weshaggard",
"comment_body": "Digging in this configuration hasn't even attempted to run for almost a year because of the [filtering](https://github.com/Azure/azure-sdk-for-net/blob/main/eng/common/pipelines/templates/jobs/generate-job-matrix.yml#L70) we did as part of the 1ES PT work. I'm not going to block my PR on the identity tests as they are also failing for other reasons currently. We should circle back and decide if there is enough value in these to maintain this extra pool or not. ",
"pr_file_module": null
},
{
"comment_id": "2140307880",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-11T14:13:09+00:00",
"comment_author": "christothes",
"comment_body": "created https://github.com/Azure/azure-sdk-for-net/issues/50546 to track.",
"pr_file_module": null
}
]
}
]

View File

@@ -25,99 +25,3 @@ Ensure all CI/CD configuration files have their required fields properly populat
```
When specifying environment resources such as VM images or test pools, ensure they reference valid, current resources and periodically evaluate if custom resources are still needed to avoid maintaining unnecessary infrastructure.
[
{
"discussion_id": "2159314989",
"pr_number": 50751,
"pr_file": "sdk/resources/Azure.ResourceManager.Resources.Bicep/assets.json",
"created_at": "2025-06-20T16:08:34+00:00",
"commented_code": "{\n \"AssetsRepo\": \"Azure/azure-sdk-assets\",\n \"AssetsRepoPrefixPath\": \"net\",\n \"TagPrefix\": \"net/bicep/Azure.ResourceManager.Resources.Bicep\",\n \"Tag\": \"\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2159314989",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50751,
"pr_file": "sdk/resources/Azure.ResourceManager.Resources.Bicep/assets.json",
"discussion_id": "2159314989",
"commented_code": "@@ -0,0 +1,6 @@\n+{\n+ \"AssetsRepo\": \"Azure/azure-sdk-assets\",\n+ \"AssetsRepoPrefixPath\": \"net\",\n+ \"TagPrefix\": \"net/bicep/Azure.ResourceManager.Resources.Bicep\",\n+ \"Tag\": \"\"",
"comment_created_at": "2025-06-20T16:08:34+00:00",
"comment_author": "Copilot",
"comment_body": "Populate the `Tag` field with the appropriate version or tag (e.g., `1.0.0-beta.1`) so that asset publishing pipelines can correctly identify and publish the Bicep package.\n```suggestion\n \"Tag\": \"1.0.0\"\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2136717637",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"created_at": "2025-06-10T00:43:19+00:00",
"commented_code": "{\n \"Agent\": {\n \"msi_image\": {\n \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2136717637",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-10T00:43:19+00:00",
"comment_author": "weshaggard",
"comment_body": "@christothes do you know if this custom pool is still needed? What is special about it?",
"pr_file_module": null
},
{
"comment_id": "2138048426",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-10T14:24:22+00:00",
"comment_author": "christothes",
"comment_body": "It's used for Managed Identity live tests. @benbp set it up for me a while back so that it had a user-assigned managed identity associated with it. ",
"pr_file_module": null
},
{
"comment_id": "2138051652",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-10T14:25:48+00:00",
"comment_author": "weshaggard",
"comment_body": "@christothes do you know if it is needed any longer given we use the container image? Or is this a separate set of tests? We would like to eliminate one-off agent pools if we can.",
"pr_file_module": null
},
{
"comment_id": "2139138199",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-11T03:51:54+00:00",
"comment_author": "weshaggard",
"comment_body": "Digging in this configuration hasn't even attempted to run for almost a year because of the [filtering](https://github.com/Azure/azure-sdk-for-net/blob/main/eng/common/pipelines/templates/jobs/generate-job-matrix.yml#L70) we did as part of the 1ES PT work. I'm not going to block my PR on the identity tests as they are also failing for other reasons currently. We should circle back and decide if there is enough value in these to maintain this extra pool or not. ",
"pr_file_module": null
},
{
"comment_id": "2140307880",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50505,
"pr_file": "sdk/identity/platform-matrix.json",
"discussion_id": "2136717637",
"commented_code": "@@ -6,7 +6,7 @@\n {\n \"Agent\": {\n \"msi_image\": {\n- \"OSVmImage\": \"env:LINUXNEXTVMIMAGE\",\n+ \"OSVmImage\": \"env:LINUXVMIMAGE\",\n \"Pool\": \"azsdk-pool-mms-ubuntu-2204-identitymsi\",",
"comment_created_at": "2025-06-11T14:13:09+00:00",
"comment_author": "christothes",
"comment_body": "created https://github.com/Azure/azure-sdk-for-net/issues/50546 to track.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,68 @@
[
{
"discussion_id": "2164984072",
"pr_number": 50844,
"pr_file": "eng/common/mcp/azure-sdk-mcp.ps1",
"created_at": "2025-06-24T21:52:22+00:00",
"commented_code": "}\n $vscodeConfig.servers = $orderedServers\n Write-Host \"Updating vscode mcp config at $vscodeConfigPath\"\n $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfig -Force\n $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfigPath -Force",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2164984072",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50844,
"pr_file": "eng/common/mcp/azure-sdk-mcp.ps1",
"discussion_id": "2164984072",
"commented_code": "@@ -51,7 +51,7 @@ if ($UpdateVsCodeConfig) {\n }\n $vscodeConfig.servers = $orderedServers\n Write-Host \"Updating vscode mcp config at $vscodeConfigPath\"\n- $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfig -Force\n+ $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfigPath -Force",
"comment_created_at": "2025-06-24T21:52:22+00:00",
"comment_author": "Copilot",
"comment_body": "Fixed the variable used in the Set-Content command to correctly reference $vscodeConfigPath instead of $vscodeConfig, ensuring the updated VSCode configuration is written to the proper file path.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2143555839",
"pr_number": 50576,
"pr_file": "eng/Directory.Build.Common.targets",
"created_at": "2025-06-12T19:58:02+00:00",
"commented_code": "<PackageReference Remove=\"System.Text.Json\" />\n <PackageReference Remove=\"System.Threading.Channels\" />\n <PackageReference Remove=\"System.Threading.Tasks.Extensions\" />\n <PackageReferences Remove=\"System.Memory\" />",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2143555839",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50576,
"pr_file": "eng/Directory.Build.Common.targets",
"discussion_id": "2143555839",
"commented_code": "@@ -111,6 +111,7 @@\n <PackageReference Remove=\"System.Text.Json\" />\n <PackageReference Remove=\"System.Threading.Channels\" />\n <PackageReference Remove=\"System.Threading.Tasks.Extensions\" />\n+ <PackageReferences Remove=\"System.Memory\" />",
"comment_created_at": "2025-06-12T19:58:02+00:00",
"comment_author": "Copilot",
"comment_body": "The added element uses 'PackageReferences' (plural) instead of the consistent 'PackageReference' (singular) used in the surrounding lines, which could prevent the package from being removed as intended. Please update the tag to '<PackageReference Remove=\"System.Memory\" />'.\n```suggestion\n <PackageReference Remove=\"System.Memory\" />\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141327303",
"pr_number": 50565,
"pr_file": "eng/common/scripts/Invoke-GitHubAPI.ps1",
"created_at": "2025-06-12T00:59:21+00:00",
"commented_code": "[ValidateNotNullOrEmpty()]\n [Parameter(Mandatory = $true)]\n $CommitHash,\n $State=\"open\"\n $State=\"open\",\n $AuthToken\n )\n $uri = \"https://api.github.com/search/issues?q=sha:$CommitHash+state:$State\"\n\n return Invoke-RestMethod `\n -Method GET `\n -Uri $uri `\n -MaximumRetryCount 3\n $params = @{\n Method = 'GET'\n Uri = $uri\n MaximumRetryCount = 3\n }\n if ($AuthToken) {\n $params.Headers = Get-GitHubApiHeaders -token $AuthToken\n }\n return Invoke-RestMethod @params",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141327303",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50565,
"pr_file": "eng/common/scripts/Invoke-GitHubAPI.ps1",
"discussion_id": "2141327303",
"commented_code": "@@ -560,12 +560,17 @@ function Search-GitHubIssues {\n [ValidateNotNullOrEmpty()]\n [Parameter(Mandatory = $true)]\n $CommitHash,\n- $State=\"open\"\n+ $State=\"open\",\n+ $AuthToken\n )\n $uri = \"https://api.github.com/search/issues?q=sha:$CommitHash+state:$State\"\n-\n- return Invoke-RestMethod `\n- -Method GET `\n- -Uri $uri `\n- -MaximumRetryCount 3\n+ $params = @{\n+ Method = 'GET'\n+ Uri = $uri\n+ MaximumRetryCount = 3\n+ }\n+ if ($AuthToken) {\n+ $params.Headers = Get-GitHubApiHeaders -token $AuthToken\n+ }\n+ return Invoke-RestMethod @params",
"comment_created_at": "2025-06-12T00:59:21+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider renaming the '$params' variable to something more descriptive (e.g. $invokeParams) to avoid potential confusion with any built-in parameter variables and to improve code clarity.\n```suggestion\n $invokeParams = @{\n Method = 'GET'\n Uri = $uri\n MaximumRetryCount = 3\n }\n if ($AuthToken) {\n $invokeParams.Headers = Get-GitHubApiHeaders -token $AuthToken\n }\n return Invoke-RestMethod @invokeParams\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -52,73 +52,3 @@ Example of consistent naming in configuration:
```
Descriptive and consistent naming reduces cognitive load for readers, prevents errors from misused variables, and makes the codebase more maintainable.
[
{
"discussion_id": "2164984072",
"pr_number": 50844,
"pr_file": "eng/common/mcp/azure-sdk-mcp.ps1",
"created_at": "2025-06-24T21:52:22+00:00",
"commented_code": "}\n $vscodeConfig.servers = $orderedServers\n Write-Host \"Updating vscode mcp config at $vscodeConfigPath\"\n $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfig -Force\n $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfigPath -Force",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2164984072",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50844,
"pr_file": "eng/common/mcp/azure-sdk-mcp.ps1",
"discussion_id": "2164984072",
"commented_code": "@@ -51,7 +51,7 @@ if ($UpdateVsCodeConfig) {\n }\n $vscodeConfig.servers = $orderedServers\n Write-Host \"Updating vscode mcp config at $vscodeConfigPath\"\n- $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfig -Force\n+ $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfigPath -Force",
"comment_created_at": "2025-06-24T21:52:22+00:00",
"comment_author": "Copilot",
"comment_body": "Fixed the variable used in the Set-Content command to correctly reference $vscodeConfigPath instead of $vscodeConfig, ensuring the updated VSCode configuration is written to the proper file path.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2143555839",
"pr_number": 50576,
"pr_file": "eng/Directory.Build.Common.targets",
"created_at": "2025-06-12T19:58:02+00:00",
"commented_code": "<PackageReference Remove=\"System.Text.Json\" />\n <PackageReference Remove=\"System.Threading.Channels\" />\n <PackageReference Remove=\"System.Threading.Tasks.Extensions\" />\n <PackageReferences Remove=\"System.Memory\" />",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2143555839",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50576,
"pr_file": "eng/Directory.Build.Common.targets",
"discussion_id": "2143555839",
"commented_code": "@@ -111,6 +111,7 @@\n <PackageReference Remove=\"System.Text.Json\" />\n <PackageReference Remove=\"System.Threading.Channels\" />\n <PackageReference Remove=\"System.Threading.Tasks.Extensions\" />\n+ <PackageReferences Remove=\"System.Memory\" />",
"comment_created_at": "2025-06-12T19:58:02+00:00",
"comment_author": "Copilot",
"comment_body": "The added element uses 'PackageReferences' (plural) instead of the consistent 'PackageReference' (singular) used in the surrounding lines, which could prevent the package from being removed as intended. Please update the tag to '<PackageReference Remove=\"System.Memory\" />'.\n```suggestion\n <PackageReference Remove=\"System.Memory\" />\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141327303",
"pr_number": 50565,
"pr_file": "eng/common/scripts/Invoke-GitHubAPI.ps1",
"created_at": "2025-06-12T00:59:21+00:00",
"commented_code": "[ValidateNotNullOrEmpty()]\n [Parameter(Mandatory = $true)]\n $CommitHash,\n $State=\"open\"\n $State=\"open\",\n $AuthToken\n )\n $uri = \"https://api.github.com/search/issues?q=sha:$CommitHash+state:$State\"\n\n return Invoke-RestMethod `\n -Method GET `\n -Uri $uri `\n -MaximumRetryCount 3\n $params = @{\n Method = 'GET'\n Uri = $uri\n MaximumRetryCount = 3\n }\n if ($AuthToken) {\n $params.Headers = Get-GitHubApiHeaders -token $AuthToken\n }\n return Invoke-RestMethod @params",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141327303",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50565,
"pr_file": "eng/common/scripts/Invoke-GitHubAPI.ps1",
"discussion_id": "2141327303",
"commented_code": "@@ -560,12 +560,17 @@ function Search-GitHubIssues {\n [ValidateNotNullOrEmpty()]\n [Parameter(Mandatory = $true)]\n $CommitHash,\n- $State=\"open\"\n+ $State=\"open\",\n+ $AuthToken\n )\n $uri = \"https://api.github.com/search/issues?q=sha:$CommitHash+state:$State\"\n-\n- return Invoke-RestMethod `\n- -Method GET `\n- -Uri $uri `\n- -MaximumRetryCount 3\n+ $params = @{\n+ Method = 'GET'\n+ Uri = $uri\n+ MaximumRetryCount = 3\n+ }\n+ if ($AuthToken) {\n+ $params.Headers = Get-GitHubApiHeaders -token $AuthToken\n+ }\n+ return Invoke-RestMethod @params",
"comment_created_at": "2025-06-12T00:59:21+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider renaming the '$params' variable to something more descriptive (e.g. $invokeParams) to avoid potential confusion with any built-in parameter variables and to improve code clarity.\n```suggestion\n $invokeParams = @{\n Method = 'GET'\n Uri = $uri\n MaximumRetryCount = 3\n }\n if ($AuthToken) {\n $invokeParams.Headers = Get-GitHubApiHeaders -token $AuthToken\n }\n return Invoke-RestMethod @invokeParams\n```",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,82 @@
[
{
"discussion_id": "2172907663",
"pr_number": 50942,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/CHANGELOG.md",
"created_at": "2025-06-27T21:43:46+00:00",
"commented_code": "# Release History\n\n## 1.1.0-beta.3 (Unreleased)\n## 1.1.0-beta.3 (2025-06-27)\n\n### Features Added\n\n- Tracing for Agents. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#tracing).\n- Convenience constructor for BingCustomSearchToolParameters\n\n### Breaking Changes\n\n### Bugs Fixed\n\n### Other Changes\n- Automatically function toolcalls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2172907663",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50942,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/CHANGELOG.md",
"discussion_id": "2172907663",
"commented_code": "@@ -1,17 +1,12 @@\n # Release History\n \n-## 1.1.0-beta.3 (Unreleased)\n+## 1.1.0-beta.3 (2025-06-27)\n \n ### Features Added\n \n - Tracing for Agents. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#tracing).\n - Convenience constructor for BingCustomSearchToolParameters\n-\n-### Breaking Changes\n-\n-### Bugs Fixed\n-\n-### Other Changes\n+- Automatically function toolcalls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"comment_created_at": "2025-06-27T21:43:46+00:00",
"comment_author": "jhakulin",
"comment_body": "Support for automatic execution of function tool calls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"pr_file_module": null
},
{
"comment_id": "2172910565",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50942,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/CHANGELOG.md",
"discussion_id": "2172907663",
"commented_code": "@@ -1,17 +1,12 @@\n # Release History\n \n-## 1.1.0-beta.3 (Unreleased)\n+## 1.1.0-beta.3 (2025-06-27)\n \n ### Features Added\n \n - Tracing for Agents. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#tracing).\n - Convenience constructor for BingCustomSearchToolParameters\n-\n-### Breaking Changes\n-\n-### Bugs Fixed\n-\n-### Other Changes\n+- Automatically function toolcalls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"comment_created_at": "2025-06-27T21:47:43+00:00",
"comment_author": "nick863",
"comment_body": "https://github.com/Azure/azure-sdk-for-net/pull/50945",
"pr_file_module": null
}
]
},
{
"discussion_id": "2110933580",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"created_at": "2025-05-28T05:15:18+00:00",
"commented_code": "### Breaking Changes\n* Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2110933580",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"discussion_id": "2110933580",
"commented_code": "@@ -19,7 +19,7 @@\n \n ### Breaking Changes\n * Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n-`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n+`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"comment_created_at": "2025-05-28T05:15:18+00:00",
"comment_author": "jhakulin",
"comment_body": "Little rephrase suggestion, could you change to notes like below?\r\n\r\nSupport for project connection string and hub-based projects has been discontinued. We recommend creating a new Azure AI Foundry resource utilizing project endpoint. If this is not possible, please pin the version of `Azure.AI.Projects` to version `1.0.0-beta.8` or earlier.\r\n\r\n",
"pr_file_module": null
},
{
"comment_id": "2110936385",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"discussion_id": "2110933580",
"commented_code": "@@ -19,7 +19,7 @@\n \n ### Breaking Changes\n * Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n-`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n+`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"comment_created_at": "2025-05-28T05:16:47+00:00",
"comment_author": "jhakulin",
"comment_body": "Updated text",
"pr_file_module": null
},
{
"comment_id": "2112481519",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"discussion_id": "2110933580",
"commented_code": "@@ -19,7 +19,7 @@\n \n ### Breaking Changes\n * Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n-`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n+`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"comment_created_at": "2025-05-28T18:10:20+00:00",
"comment_author": "nick863",
"comment_body": "Fixed",
"pr_file_module": null
}
]
}
]

View File

@@ -28,87 +28,3 @@ Example of good documentation:
```
For AI systems where behavior might not be immediately intuitive (like automatic function execution or new endpoint requirements), clear documentation helps users understand system capabilities and limitations, and reduces integration challenges.
[
{
"discussion_id": "2172907663",
"pr_number": 50942,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/CHANGELOG.md",
"created_at": "2025-06-27T21:43:46+00:00",
"commented_code": "# Release History\n\n## 1.1.0-beta.3 (Unreleased)\n## 1.1.0-beta.3 (2025-06-27)\n\n### Features Added\n\n- Tracing for Agents. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#tracing).\n- Convenience constructor for BingCustomSearchToolParameters\n\n### Breaking Changes\n\n### Bugs Fixed\n\n### Other Changes\n- Automatically function toolcalls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2172907663",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50942,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/CHANGELOG.md",
"discussion_id": "2172907663",
"commented_code": "@@ -1,17 +1,12 @@\n # Release History\n \n-## 1.1.0-beta.3 (Unreleased)\n+## 1.1.0-beta.3 (2025-06-27)\n \n ### Features Added\n \n - Tracing for Agents. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#tracing).\n - Convenience constructor for BingCustomSearchToolParameters\n-\n-### Breaking Changes\n-\n-### Bugs Fixed\n-\n-### Other Changes\n+- Automatically function toolcalls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"comment_created_at": "2025-06-27T21:43:46+00:00",
"comment_author": "jhakulin",
"comment_body": "Support for automatic execution of function tool calls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"pr_file_module": null
},
{
"comment_id": "2172910565",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50942,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/CHANGELOG.md",
"discussion_id": "2172907663",
"commented_code": "@@ -1,17 +1,12 @@\n # Release History\n \n-## 1.1.0-beta.3 (Unreleased)\n+## 1.1.0-beta.3 (2025-06-27)\n \n ### Features Added\n \n - Tracing for Agents. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#tracing).\n - Convenience constructor for BingCustomSearchToolParameters\n-\n-### Breaking Changes\n-\n-### Bugs Fixed\n-\n-### Other Changes\n+- Automatically function toolcalls. More information [here](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Agents.Persistent/README.md#function-call-executed-automatically).",
"comment_created_at": "2025-06-27T21:47:43+00:00",
"comment_author": "nick863",
"comment_body": "https://github.com/Azure/azure-sdk-for-net/pull/50945",
"pr_file_module": null
}
]
},
{
"discussion_id": "2110933580",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"created_at": "2025-05-28T05:15:18+00:00",
"commented_code": "### Breaking Changes\n* Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2110933580",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"discussion_id": "2110933580",
"commented_code": "@@ -19,7 +19,7 @@\n \n ### Breaking Changes\n * Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n-`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n+`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"comment_created_at": "2025-05-28T05:15:18+00:00",
"comment_author": "jhakulin",
"comment_body": "Little rephrase suggestion, could you change to notes like below?\r\n\r\nSupport for project connection string and hub-based projects has been discontinued. We recommend creating a new Azure AI Foundry resource utilizing project endpoint. If this is not possible, please pin the version of `Azure.AI.Projects` to version `1.0.0-beta.8` or earlier.\r\n\r\n",
"pr_file_module": null
},
{
"comment_id": "2110936385",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"discussion_id": "2110933580",
"commented_code": "@@ -19,7 +19,7 @@\n \n ### Breaking Changes\n * Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n-`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n+`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"comment_created_at": "2025-05-28T05:16:47+00:00",
"comment_author": "jhakulin",
"comment_body": "Updated text",
"pr_file_module": null
},
{
"comment_id": "2112481519",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50283,
"pr_file": "sdk/ai/Azure.AI.Projects/CHANGELOG.md",
"discussion_id": "2110933580",
"commented_code": "@@ -19,7 +19,7 @@\n \n ### Breaking Changes\n * Azure AI Foundry Project endpoint is now required to construct the `AIProjectClient`. It has the form\n-`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. \n+`https://<your-ai-services-account-name>.services.ai.azure.com/api/projects/<your-project-name>`. Find it in your AI Foundry Project Overview page. The support of connection strings as the project endpoints and hub-based projects was dropped. Please create a new Azure AI Foundry resource, which uses endpoints (recommended), or pin the version of `Azure.AI.Projects` to `1.0.0-beta.8` if it is not possible.",
"comment_created_at": "2025-05-28T18:10:20+00:00",
"comment_author": "nick863",
"comment_body": "Fixed",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,146 @@
[
{
"discussion_id": "2165164048",
"pr_number": 50850,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/src/Providers/CollectionResultDefinition.cs",
"created_at": "2025-06-25T00:03:13+00:00",
"commented_code": "// Extract next page\n doWhileStatement.Add(nextPageVariable.Assign(BuildGetNextPage(responseWithTypeVariable, responseVariable)).Terminate());\n\n // Break if the next page is null",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2165164048",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50850,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/src/Providers/CollectionResultDefinition.cs",
"discussion_id": "2165164048",
"commented_code": "@@ -213,6 +216,9 @@ private MethodBodyStatement[] BuildAsPagesMethodBody()\n // Extract next page\n doWhileStatement.Add(nextPageVariable.Assign(BuildGetNextPage(responseWithTypeVariable, responseVariable)).Terminate());\n \n+ // Break if the next page is null",
"comment_created_at": "2025-06-25T00:03:13+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider adding a code comment for clarity and remove redundant parentheses to improve readability.\n```suggestion\n // Break if the next page is null\n // Break the loop if the next page variable is null, indicating no more pages to process.\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2160611331",
"pr_number": 50777,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/test/Visitors/NamespaceVisitorTests.cs",
"created_at": "2025-06-23T02:26:18+00:00",
"commented_code": "Assert.AreEqual(\"Samples\", updatedModel!.Type.Namespace);\n }\n\n [Test]\n public void DoesNotChangeNamespaceOfCustomizedModel()\n {\n MockHelpers.LoadMockPlugin(configurationJson: \"{ \\\"package-name\\\": \\\"TestLibrary\\\", \\\"model-namespace\\\": true }\");\n var visitor = new TestNamespaceVisitor();\n var inputType = InputFactory.Model(\"TestModel\", \"Samples\");\n var model = new ModelProvider(inputType);\n\n // simulate a customized model\n model.GetType().BaseType!.GetField(\n \"_customCodeView\",\n System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance)?\n .SetValue(model, new Lazy<TypeProvider>(() => new TestTypeProvider()));",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2160611331",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50777,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/test/Visitors/NamespaceVisitorTests.cs",
"discussion_id": "2160611331",
"commented_code": "@@ -38,6 +39,25 @@ public void DoesNotUseModelsNamespaceIfConfigSetToFalse()\n Assert.AreEqual(\"Samples\", updatedModel!.Type.Namespace);\n }\n \n+ [Test]\n+ public void DoesNotChangeNamespaceOfCustomizedModel()\n+ {\n+ MockHelpers.LoadMockPlugin(configurationJson: \"{ \\\"package-name\\\": \\\"TestLibrary\\\", \\\"model-namespace\\\": true }\");\n+ var visitor = new TestNamespaceVisitor();\n+ var inputType = InputFactory.Model(\"TestModel\", \"Samples\");\n+ var model = new ModelProvider(inputType);\n+\n+ // simulate a customized model\n+ model.GetType().BaseType!.GetField(\n+ \"_customCodeView\",\n+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance)?\n+ .SetValue(model, new Lazy<TypeProvider>(() => new TestTypeProvider()));",
"comment_created_at": "2025-06-23T02:26:18+00:00",
"comment_author": "Copilot",
"comment_body": "Consider extracting the reflection logic used to set _customCodeView into a helper method or documenting its purpose to improve maintainability and reduce the risk of breakage if the field name changes.\n```suggestion\n SetCustomCodeView(model, new Lazy<TypeProvider>(() => new TestTypeProvider()));\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156832104",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/src/Common/Custom/Models/ManagedServiceIdentity.Serialization.cs",
"created_at": "2025-06-19T11:54:22+00:00",
"commented_code": "[JsonConverter(typeof(ManagedServiceIdentityConverter))]\n public partial class ManagedServiceIdentity : IJsonModel<ManagedServiceIdentity>\n {\n internal void Write(Utf8JsonWriter writer, ModelReaderWriterOptions options, JsonSerializerOptions jOptions = default)\n private const string SystemAssignedUserAssignedV3Value = \"SystemAssigned,UserAssigned\";",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156832104",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/src/Common/Custom/Models/ManagedServiceIdentity.Serialization.cs",
"discussion_id": "2156832104",
"commented_code": "@@ -18,22 +18,51 @@ namespace Azure.ResourceManager.Models\n [JsonConverter(typeof(ManagedServiceIdentityConverter))]\n public partial class ManagedServiceIdentity : IJsonModel<ManagedServiceIdentity>\n {\n- internal void Write(Utf8JsonWriter writer, ModelReaderWriterOptions options, JsonSerializerOptions jOptions = default)\n+ private const string SystemAssignedUserAssignedV3Value = \"SystemAssigned,UserAssigned\";\n+",
"comment_created_at": "2025-06-19T11:54:22+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider adding a comment explaining the rationale behind checking for the '|v3' suffix in options.Format to improve future maintainability and clarity.\n```suggestion\n\n // This method checks if the format string in options.Format ends with the \"|v3\" suffix.\n // The \"|v3\" suffix indicates that the ManagedServiceIdentity format is version 3.\n // If the suffix is present, it is removed, and the base format is returned via the 'format' parameter.\n // This allows the method to handle version-specific logic while preserving the base format.\n```",
"pr_file_module": null
},
{
"comment_id": "2157828757",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/src/Common/Custom/Models/ManagedServiceIdentity.Serialization.cs",
"discussion_id": "2156832104",
"commented_code": "@@ -18,22 +18,51 @@ namespace Azure.ResourceManager.Models\n [JsonConverter(typeof(ManagedServiceIdentityConverter))]\n public partial class ManagedServiceIdentity : IJsonModel<ManagedServiceIdentity>\n {\n- internal void Write(Utf8JsonWriter writer, ModelReaderWriterOptions options, JsonSerializerOptions jOptions = default)\n+ private const string SystemAssignedUserAssignedV3Value = \"SystemAssigned,UserAssigned\";\n+",
"comment_created_at": "2025-06-20T00:42:53+00:00",
"comment_author": "live1206",
"comment_body": "added comment after rewording based on this.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2130331367",
"pr_number": 50456,
"pr_file": "sdk/storage/Azure.Storage.Blobs/src/PageBlobClient.cs",
"created_at": "2025-06-05T20:53:25+00:00",
"commented_code": "response = await PageBlobRestClient.CreateAsync(\n contentLength: 0,\n blobContentLength: size,",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2130331367",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50456,
"pr_file": "sdk/storage/Azure.Storage.Blobs/src/PageBlobClient.cs",
"discussion_id": "2130331367",
"commented_code": "@@ -999,7 +1018,7 @@ private async Task<Response<BlobContentInfo>> CreateInternal(\n response = await PageBlobRestClient.CreateAsync(\n contentLength: 0,\n blobContentLength: size,",
"comment_created_at": "2025-06-05T20:53:25+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding an inline comment clarifying the implicit conversion of PremiumPageBlobAccessTier and the intended handling of default or null values. This will help future maintainers understand how the premium tier parameter is propagated to the REST API call.\n```suggestion\n blobContentLength: size,\n // The premiumPageBlobAccessTier parameter specifies the access tier for the page blob.\n // If null, the REST API will apply the default tier or handle it gracefully.\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124976966",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/src/Primitives/ServiceBusRetryPolicy.cs",
"created_at": "2025-06-03T21:40:01+00:00",
"commented_code": "bool logTimeoutRetriesAsVerbose = false)\n {\n var failedAttemptCount = 0;\n var tryTimeout = CalculateTryTimeout(0);\n\n TimeSpan tryTimeout = CalculateTryTimeout(0);\n if (IsServerBusy && tryTimeout < ServerBusyBaseSleepTime)\n {\n // We are in a server busy state before we start processing.\n // Since ServerBusyBaseSleepTime > remaining time for the operation, we don't wait for the entire Sleep time.\n await Task.Delay(tryTimeout, cancellationToken).ConfigureAwait(false);\n throw new ServiceBusException(\n ServerBusyExceptionMessage,\n ServiceBusFailureReason.ServiceBusy);\n while (IsServerBusy && !cancellationToken.IsCancellationRequested)",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124976966",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/src/Primitives/ServiceBusRetryPolicy.cs",
"discussion_id": "2124976966",
"commented_code": "@@ -130,17 +133,42 @@ internal async ValueTask<TResult> RunOperation<T1, TResult>(\n bool logTimeoutRetriesAsVerbose = false)\n {\n var failedAttemptCount = 0;\n+ var tryTimeout = CalculateTryTimeout(0);\n \n- TimeSpan tryTimeout = CalculateTryTimeout(0);\n if (IsServerBusy && tryTimeout < ServerBusyBaseSleepTime)\n {\n- // We are in a server busy state before we start processing.\n- // Since ServerBusyBaseSleepTime > remaining time for the operation, we don't wait for the entire Sleep time.\n- await Task.Delay(tryTimeout, cancellationToken).ConfigureAwait(false);\n- throw new ServiceBusException(\n- ServerBusyExceptionMessage,\n- ServiceBusFailureReason.ServiceBusy);\n+ while (IsServerBusy && !cancellationToken.IsCancellationRequested)",
"comment_created_at": "2025-06-03T21:40:01+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Add an inline comment within the while loop explaining the rationale behind recalculating the try timeout and the exit condition. This can help future maintainers understand how the retry logic adapts to a persisting server busy state.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2103555145",
"pr_number": 50225,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/src/DataMovementSharesExtensions.cs",
"created_at": "2025-05-22T23:21:04+00:00",
"commented_code": "try\n {\n ShareProperties properties = await parentShareClient.GetPropertiesAsync(cancellationToken).ConfigureAwait(false);\n ShareProtocols expectedProtocol = options?.ShareProtocol ?? ShareProtocols.Smb;\n ShareProtocols actualProtocol = properties.Protocols ?? ShareProtocols.Smb;\n ShareProtocol expectedProtocol = options?.ShareProtocol ?? ShareProtocol.Smb;\n ShareProtocols effectiveProtocol = properties.Protocols ?? ShareProtocols.Smb;",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2103555145",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50225,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/src/DataMovementSharesExtensions.cs",
"discussion_id": "2103555145",
"commented_code": "@@ -728,8 +728,9 @@ public static async Task ValidateProtocolAsync(\n try\n {\n ShareProperties properties = await parentShareClient.GetPropertiesAsync(cancellationToken).ConfigureAwait(false);\n- ShareProtocols expectedProtocol = options?.ShareProtocol ?? ShareProtocols.Smb;\n- ShareProtocols actualProtocol = properties.Protocols ?? ShareProtocols.Smb;\n+ ShareProtocol expectedProtocol = options?.ShareProtocol ?? ShareProtocol.Smb;\n+ ShareProtocols effectiveProtocol = properties.Protocols ?? ShareProtocols.Smb;",
"comment_created_at": "2025-05-22T23:21:04+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding a clarifying comment explaining the conversion from ShareProtocols to ShareProtocol to improve readability, as this conversion may not be immediately clear to future maintainers.\n```suggestion\n ShareProtocols effectiveProtocol = properties.Protocols ?? ShareProtocols.Smb;\n // Convert ShareProtocols (multi-valued) to ShareProtocol (single-valued).\n // If effectiveProtocol is ShareProtocols.Smb, map to ShareProtocol.Smb.\n // Otherwise, default to ShareProtocol.Nfs.\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -46,151 +46,3 @@ Add clear, concise comments that explain the "why" behind complex logic, non-obv
```
Comments should benefit future maintainers by providing context that isn't immediately obvious from the code itself. When complex code can't be simplified, comprehensive documentation becomes essential for long-term maintainability.
[
{
"discussion_id": "2165164048",
"pr_number": 50850,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/src/Providers/CollectionResultDefinition.cs",
"created_at": "2025-06-25T00:03:13+00:00",
"commented_code": "// Extract next page\n doWhileStatement.Add(nextPageVariable.Assign(BuildGetNextPage(responseWithTypeVariable, responseVariable)).Terminate());\n\n // Break if the next page is null",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2165164048",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50850,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/src/Providers/CollectionResultDefinition.cs",
"discussion_id": "2165164048",
"commented_code": "@@ -213,6 +216,9 @@ private MethodBodyStatement[] BuildAsPagesMethodBody()\n // Extract next page\n doWhileStatement.Add(nextPageVariable.Assign(BuildGetNextPage(responseWithTypeVariable, responseVariable)).Terminate());\n \n+ // Break if the next page is null",
"comment_created_at": "2025-06-25T00:03:13+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider adding a code comment for clarity and remove redundant parentheses to improve readability.\n```suggestion\n // Break if the next page is null\n // Break the loop if the next page variable is null, indicating no more pages to process.\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2160611331",
"pr_number": 50777,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/test/Visitors/NamespaceVisitorTests.cs",
"created_at": "2025-06-23T02:26:18+00:00",
"commented_code": "Assert.AreEqual(\"Samples\", updatedModel!.Type.Namespace);\n }\n\n [Test]\n public void DoesNotChangeNamespaceOfCustomizedModel()\n {\n MockHelpers.LoadMockPlugin(configurationJson: \"{ \\\"package-name\\\": \\\"TestLibrary\\\", \\\"model-namespace\\\": true }\");\n var visitor = new TestNamespaceVisitor();\n var inputType = InputFactory.Model(\"TestModel\", \"Samples\");\n var model = new ModelProvider(inputType);\n\n // simulate a customized model\n model.GetType().BaseType!.GetField(\n \"_customCodeView\",\n System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance)?\n .SetValue(model, new Lazy<TypeProvider>(() => new TestTypeProvider()));",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2160611331",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50777,
"pr_file": "eng/packages/http-client-csharp/generator/Azure.Generator/test/Visitors/NamespaceVisitorTests.cs",
"discussion_id": "2160611331",
"commented_code": "@@ -38,6 +39,25 @@ public void DoesNotUseModelsNamespaceIfConfigSetToFalse()\n Assert.AreEqual(\"Samples\", updatedModel!.Type.Namespace);\n }\n \n+ [Test]\n+ public void DoesNotChangeNamespaceOfCustomizedModel()\n+ {\n+ MockHelpers.LoadMockPlugin(configurationJson: \"{ \\\"package-name\\\": \\\"TestLibrary\\\", \\\"model-namespace\\\": true }\");\n+ var visitor = new TestNamespaceVisitor();\n+ var inputType = InputFactory.Model(\"TestModel\", \"Samples\");\n+ var model = new ModelProvider(inputType);\n+\n+ // simulate a customized model\n+ model.GetType().BaseType!.GetField(\n+ \"_customCodeView\",\n+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance)?\n+ .SetValue(model, new Lazy<TypeProvider>(() => new TestTypeProvider()));",
"comment_created_at": "2025-06-23T02:26:18+00:00",
"comment_author": "Copilot",
"comment_body": "Consider extracting the reflection logic used to set _customCodeView into a helper method or documenting its purpose to improve maintainability and reduce the risk of breakage if the field name changes.\n```suggestion\n SetCustomCodeView(model, new Lazy<TypeProvider>(() => new TestTypeProvider()));\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156832104",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/src/Common/Custom/Models/ManagedServiceIdentity.Serialization.cs",
"created_at": "2025-06-19T11:54:22+00:00",
"commented_code": "[JsonConverter(typeof(ManagedServiceIdentityConverter))]\n public partial class ManagedServiceIdentity : IJsonModel<ManagedServiceIdentity>\n {\n internal void Write(Utf8JsonWriter writer, ModelReaderWriterOptions options, JsonSerializerOptions jOptions = default)\n private const string SystemAssignedUserAssignedV3Value = \"SystemAssigned,UserAssigned\";",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156832104",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/src/Common/Custom/Models/ManagedServiceIdentity.Serialization.cs",
"discussion_id": "2156832104",
"commented_code": "@@ -18,22 +18,51 @@ namespace Azure.ResourceManager.Models\n [JsonConverter(typeof(ManagedServiceIdentityConverter))]\n public partial class ManagedServiceIdentity : IJsonModel<ManagedServiceIdentity>\n {\n- internal void Write(Utf8JsonWriter writer, ModelReaderWriterOptions options, JsonSerializerOptions jOptions = default)\n+ private const string SystemAssignedUserAssignedV3Value = \"SystemAssigned,UserAssigned\";\n+",
"comment_created_at": "2025-06-19T11:54:22+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider adding a comment explaining the rationale behind checking for the '|v3' suffix in options.Format to improve future maintainability and clarity.\n```suggestion\n\n // This method checks if the format string in options.Format ends with the \"|v3\" suffix.\n // The \"|v3\" suffix indicates that the ManagedServiceIdentity format is version 3.\n // If the suffix is present, it is removed, and the base format is returned via the 'format' parameter.\n // This allows the method to handle version-specific logic while preserving the base format.\n```",
"pr_file_module": null
},
{
"comment_id": "2157828757",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/src/Common/Custom/Models/ManagedServiceIdentity.Serialization.cs",
"discussion_id": "2156832104",
"commented_code": "@@ -18,22 +18,51 @@ namespace Azure.ResourceManager.Models\n [JsonConverter(typeof(ManagedServiceIdentityConverter))]\n public partial class ManagedServiceIdentity : IJsonModel<ManagedServiceIdentity>\n {\n- internal void Write(Utf8JsonWriter writer, ModelReaderWriterOptions options, JsonSerializerOptions jOptions = default)\n+ private const string SystemAssignedUserAssignedV3Value = \"SystemAssigned,UserAssigned\";\n+",
"comment_created_at": "2025-06-20T00:42:53+00:00",
"comment_author": "live1206",
"comment_body": "added comment after rewording based on this.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2130331367",
"pr_number": 50456,
"pr_file": "sdk/storage/Azure.Storage.Blobs/src/PageBlobClient.cs",
"created_at": "2025-06-05T20:53:25+00:00",
"commented_code": "response = await PageBlobRestClient.CreateAsync(\n contentLength: 0,\n blobContentLength: size,",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2130331367",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50456,
"pr_file": "sdk/storage/Azure.Storage.Blobs/src/PageBlobClient.cs",
"discussion_id": "2130331367",
"commented_code": "@@ -999,7 +1018,7 @@ private async Task<Response<BlobContentInfo>> CreateInternal(\n response = await PageBlobRestClient.CreateAsync(\n contentLength: 0,\n blobContentLength: size,",
"comment_created_at": "2025-06-05T20:53:25+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding an inline comment clarifying the implicit conversion of PremiumPageBlobAccessTier and the intended handling of default or null values. This will help future maintainers understand how the premium tier parameter is propagated to the REST API call.\n```suggestion\n blobContentLength: size,\n // The premiumPageBlobAccessTier parameter specifies the access tier for the page blob.\n // If null, the REST API will apply the default tier or handle it gracefully.\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124976966",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/src/Primitives/ServiceBusRetryPolicy.cs",
"created_at": "2025-06-03T21:40:01+00:00",
"commented_code": "bool logTimeoutRetriesAsVerbose = false)\n {\n var failedAttemptCount = 0;\n var tryTimeout = CalculateTryTimeout(0);\n\n TimeSpan tryTimeout = CalculateTryTimeout(0);\n if (IsServerBusy && tryTimeout < ServerBusyBaseSleepTime)\n {\n // We are in a server busy state before we start processing.\n // Since ServerBusyBaseSleepTime > remaining time for the operation, we don't wait for the entire Sleep time.\n await Task.Delay(tryTimeout, cancellationToken).ConfigureAwait(false);\n throw new ServiceBusException(\n ServerBusyExceptionMessage,\n ServiceBusFailureReason.ServiceBusy);\n while (IsServerBusy && !cancellationToken.IsCancellationRequested)",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124976966",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/src/Primitives/ServiceBusRetryPolicy.cs",
"discussion_id": "2124976966",
"commented_code": "@@ -130,17 +133,42 @@ internal async ValueTask<TResult> RunOperation<T1, TResult>(\n bool logTimeoutRetriesAsVerbose = false)\n {\n var failedAttemptCount = 0;\n+ var tryTimeout = CalculateTryTimeout(0);\n \n- TimeSpan tryTimeout = CalculateTryTimeout(0);\n if (IsServerBusy && tryTimeout < ServerBusyBaseSleepTime)\n {\n- // We are in a server busy state before we start processing.\n- // Since ServerBusyBaseSleepTime > remaining time for the operation, we don't wait for the entire Sleep time.\n- await Task.Delay(tryTimeout, cancellationToken).ConfigureAwait(false);\n- throw new ServiceBusException(\n- ServerBusyExceptionMessage,\n- ServiceBusFailureReason.ServiceBusy);\n+ while (IsServerBusy && !cancellationToken.IsCancellationRequested)",
"comment_created_at": "2025-06-03T21:40:01+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Add an inline comment within the while loop explaining the rationale behind recalculating the try timeout and the exit condition. This can help future maintainers understand how the retry logic adapts to a persisting server busy state.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2103555145",
"pr_number": 50225,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/src/DataMovementSharesExtensions.cs",
"created_at": "2025-05-22T23:21:04+00:00",
"commented_code": "try\n {\n ShareProperties properties = await parentShareClient.GetPropertiesAsync(cancellationToken).ConfigureAwait(false);\n ShareProtocols expectedProtocol = options?.ShareProtocol ?? ShareProtocols.Smb;\n ShareProtocols actualProtocol = properties.Protocols ?? ShareProtocols.Smb;\n ShareProtocol expectedProtocol = options?.ShareProtocol ?? ShareProtocol.Smb;\n ShareProtocols effectiveProtocol = properties.Protocols ?? ShareProtocols.Smb;",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2103555145",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50225,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/src/DataMovementSharesExtensions.cs",
"discussion_id": "2103555145",
"commented_code": "@@ -728,8 +728,9 @@ public static async Task ValidateProtocolAsync(\n try\n {\n ShareProperties properties = await parentShareClient.GetPropertiesAsync(cancellationToken).ConfigureAwait(false);\n- ShareProtocols expectedProtocol = options?.ShareProtocol ?? ShareProtocols.Smb;\n- ShareProtocols actualProtocol = properties.Protocols ?? ShareProtocols.Smb;\n+ ShareProtocol expectedProtocol = options?.ShareProtocol ?? ShareProtocol.Smb;\n+ ShareProtocols effectiveProtocol = properties.Protocols ?? ShareProtocols.Smb;",
"comment_created_at": "2025-05-22T23:21:04+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding a clarifying comment explaining the conversion from ShareProtocols to ShareProtocol to improve readability, as this conversion may not be immediately clear to future maintainers.\n```suggestion\n ShareProtocols effectiveProtocol = properties.Protocols ?? ShareProtocols.Smb;\n // Convert ShareProtocols (multi-valued) to ShareProtocol (single-valued).\n // If effectiveProtocol is ShareProtocols.Smb, map to ShareProtocol.Smb.\n // Otherwise, default to ShareProtocol.Nfs.\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,46 @@
[
{
"discussion_id": "2143887259",
"pr_number": 50587,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"created_at": "2025-06-13T00:09:46+00:00",
"commented_code": "Condition: true\n\nsteps:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2143887259",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50587,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"discussion_id": "2143887259",
"commented_code": "@@ -5,24 +5,25 @@ parameters:\n Condition: true\n \n steps:",
"comment_created_at": "2025-06-13T00:09:46+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding an inline comment here to clarify that this conditional wrapper restricts the following steps to pull request builds only.\n```suggestion\nsteps:\n # Restrict the following steps to pull request builds only\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2106854958",
"pr_number": 50267,
"pr_file": "sdk/mongodbatlas/Azure.ResourceManager.MongoDBAtlas/tsp-location.yaml",
"created_at": "2025-05-26T08:44:05+00:00",
"commented_code": "directory: specification/liftrmongodb/MongoDB.Atlas.Management\ncommit: 6a4f32353ce0eb59d33fd785a512cd487b81814f\nrepo: Azure/azure-rest-api-specs\nadditionalDirectories:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2106854958",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50267,
"pr_file": "sdk/mongodbatlas/Azure.ResourceManager.MongoDBAtlas/tsp-location.yaml",
"discussion_id": "2106854958",
"commented_code": "@@ -0,0 +1,4 @@\n+directory: specification/liftrmongodb/MongoDB.Atlas.Management\n+commit: 6a4f32353ce0eb59d33fd785a512cd487b81814f\n+repo: Azure/azure-rest-api-specs\n+additionalDirectories: ",
"comment_created_at": "2025-05-26T08:44:05+00:00",
"comment_author": "Copilot",
"comment_body": "The 'additionalDirectories' property is left empty; please either remove it if not needed or add a comment/documentation to clarify its intended usage.",
"pr_file_module": null
}
]
}
]

View File

@@ -32,51 +32,3 @@ additionalDirectories:
```
Clear documentation reduces the cognitive load for reviewers and future maintainers, helping them understand the code's purpose without needing to infer it from implementation details.
[
{
"discussion_id": "2143887259",
"pr_number": 50587,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"created_at": "2025-06-13T00:09:46+00:00",
"commented_code": "Condition: true\n\nsteps:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2143887259",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50587,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"discussion_id": "2143887259",
"commented_code": "@@ -5,24 +5,25 @@ parameters:\n Condition: true\n \n steps:",
"comment_created_at": "2025-06-13T00:09:46+00:00",
"comment_author": "Copilot",
"comment_body": "Consider adding an inline comment here to clarify that this conditional wrapper restricts the following steps to pull request builds only.\n```suggestion\nsteps:\n # Restrict the following steps to pull request builds only\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2106854958",
"pr_number": 50267,
"pr_file": "sdk/mongodbatlas/Azure.ResourceManager.MongoDBAtlas/tsp-location.yaml",
"created_at": "2025-05-26T08:44:05+00:00",
"commented_code": "directory: specification/liftrmongodb/MongoDB.Atlas.Management\ncommit: 6a4f32353ce0eb59d33fd785a512cd487b81814f\nrepo: Azure/azure-rest-api-specs\nadditionalDirectories:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2106854958",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50267,
"pr_file": "sdk/mongodbatlas/Azure.ResourceManager.MongoDBAtlas/tsp-location.yaml",
"discussion_id": "2106854958",
"commented_code": "@@ -0,0 +1,4 @@\n+directory: specification/liftrmongodb/MongoDB.Atlas.Management\n+commit: 6a4f32353ce0eb59d33fd785a512cd487b81814f\n+repo: Azure/azure-rest-api-specs\n+additionalDirectories: ",
"comment_created_at": "2025-05-26T08:44:05+00:00",
"comment_author": "Copilot",
"comment_body": "The 'additionalDirectories' property is left empty; please either remove it if not needed or add a comment/documentation to clarify its intended usage.",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,46 @@
[
{
"discussion_id": "2178653926",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"created_at": "2025-07-01T22:33:14+00:00",
"commented_code": ")\n\n. $PSScriptRoot/job-matrix-functions.ps1\n. $PSScriptRoot/../logging.ps1\n\nif (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n}\n$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n$rawConfig = Get-Content $ConfigPath -Raw\n$config = GetMatrixConfigFromFile $rawConfig\n# Strip empty string filters in order to be able to use azure pipelines yaml join()\n$Filters = $Filters | Where-Object { $_ }\n\nLogGroupStart \"Matrix generation configuration\"\nWrite-Host \"Configuration File: $ConfigPath\"\nWrite-Host $rawConfig\nWrite-Host \"SelectionType: $Selection\"\nWrite-Host \"DisplayNameFilter: $DisplayNameFilter\"\nWrite-Host \"Filters: $Filters\"\nWrite-Host \"Replace: $Replace\"\nWrite-Host \"NonSparseParameters: $NonSparseParameters\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2178653926",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"discussion_id": "2178653926",
"commented_code": "@@ -21,15 +21,27 @@ param (\n )\n \n . $PSScriptRoot/job-matrix-functions.ps1\n+. $PSScriptRoot/../logging.ps1\n \n if (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n }\n-$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n+$rawConfig = Get-Content $ConfigPath -Raw\n+$config = GetMatrixConfigFromFile $rawConfig\n # Strip empty string filters in order to be able to use azure pipelines yaml join()\n $Filters = $Filters | Where-Object { $_ }\n \n+LogGroupStart \"Matrix generation configuration\"\n+Write-Host \"Configuration File: $ConfigPath\"\n+Write-Host $rawConfig\n+Write-Host \"SelectionType: $Selection\"\n+Write-Host \"DisplayNameFilter: $DisplayNameFilter\"\n+Write-Host \"Filters: $Filters\"\n+Write-Host \"Replace: $Replace\"\n+Write-Host \"NonSparseParameters: $NonSparseParameters\"",
"comment_created_at": "2025-07-01T22:33:14+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] The sequence of Write-Host statements for logging could be refactored into a loop or helper function to reduce duplication and improve maintainability.\n```suggestion\n$logEntries = @{\n \"Configuration File\" = $ConfigPath\n \"Raw Configuration\" = $rawConfig\n \"SelectionType\" = $Selection\n \"DisplayNameFilter\" = $DisplayNameFilter\n \"Filters\" = $Filters\n \"Replace\" = $Replace\n \"NonSparseParameters\" = $NonSparseParameters\n}\nforeach ($key in $logEntries.Keys) {\n Write-Host \"$key: $($logEntries[$key])\"\n}\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2176461585",
"pr_number": 50970,
"pr_file": "eng/scripts/automation/GenerateAndBuildLib.ps1",
"created_at": "2025-07-01T05:30:04+00:00",
"commented_code": "if ($yml[\"parameters\"] -And $yml[\"parameters\"][\"service-dir\"]) {\n $service = $yml[\"parameters\"][\"service-dir\"][\"default\"];\n }\n if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"]) {\n if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n }\n if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]) {\n $service = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2176461585",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50970,
"pr_file": "eng/scripts/automation/GenerateAndBuildLib.ps1",
"discussion_id": "2176461585",
"commented_code": "@@ -917,8 +917,13 @@ function GetSDKProjectFolder()\n if ($yml[\"parameters\"] -And $yml[\"parameters\"][\"service-dir\"]) {\n $service = $yml[\"parameters\"][\"service-dir\"][\"default\"];\n }\n- if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n- $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n+ if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"]) {\n+ if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n+ $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n+ }\n+ if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]) {\n+ $service = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]",
"comment_created_at": "2025-07-01T05:30:04+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider assigning `$yml[\"options\"][\"@azure-tools/typespec-csharp\"]` to a local variable (e.g. `$csharpOpts`) to avoid repeated indexing and improve readability.\n```suggestion\n $csharpOpts = $yml[\"options\"][\"@azure-tools/typespec-csharp\"]\n if ($csharpOpts[\"package-dir\"]) {\n $packageDir = $csharpOpts[\"package-dir\"]\n }\n if ($csharpOpts[\"service-dir\"]) {\n $service = $csharpOpts[\"service-dir\"]\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -54,51 +54,3 @@ if ($csharpOpts["service-dir"]) {
$service = $csharpOpts["service-dir"]
}
```
[
{
"discussion_id": "2178653926",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"created_at": "2025-07-01T22:33:14+00:00",
"commented_code": ")\n\n. $PSScriptRoot/job-matrix-functions.ps1\n. $PSScriptRoot/../logging.ps1\n\nif (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n}\n$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n$rawConfig = Get-Content $ConfigPath -Raw\n$config = GetMatrixConfigFromFile $rawConfig\n# Strip empty string filters in order to be able to use azure pipelines yaml join()\n$Filters = $Filters | Where-Object { $_ }\n\nLogGroupStart \"Matrix generation configuration\"\nWrite-Host \"Configuration File: $ConfigPath\"\nWrite-Host $rawConfig\nWrite-Host \"SelectionType: $Selection\"\nWrite-Host \"DisplayNameFilter: $DisplayNameFilter\"\nWrite-Host \"Filters: $Filters\"\nWrite-Host \"Replace: $Replace\"\nWrite-Host \"NonSparseParameters: $NonSparseParameters\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2178653926",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"discussion_id": "2178653926",
"commented_code": "@@ -21,15 +21,27 @@ param (\n )\n \n . $PSScriptRoot/job-matrix-functions.ps1\n+. $PSScriptRoot/../logging.ps1\n \n if (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n }\n-$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n+$rawConfig = Get-Content $ConfigPath -Raw\n+$config = GetMatrixConfigFromFile $rawConfig\n # Strip empty string filters in order to be able to use azure pipelines yaml join()\n $Filters = $Filters | Where-Object { $_ }\n \n+LogGroupStart \"Matrix generation configuration\"\n+Write-Host \"Configuration File: $ConfigPath\"\n+Write-Host $rawConfig\n+Write-Host \"SelectionType: $Selection\"\n+Write-Host \"DisplayNameFilter: $DisplayNameFilter\"\n+Write-Host \"Filters: $Filters\"\n+Write-Host \"Replace: $Replace\"\n+Write-Host \"NonSparseParameters: $NonSparseParameters\"",
"comment_created_at": "2025-07-01T22:33:14+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] The sequence of Write-Host statements for logging could be refactored into a loop or helper function to reduce duplication and improve maintainability.\n```suggestion\n$logEntries = @{\n \"Configuration File\" = $ConfigPath\n \"Raw Configuration\" = $rawConfig\n \"SelectionType\" = $Selection\n \"DisplayNameFilter\" = $DisplayNameFilter\n \"Filters\" = $Filters\n \"Replace\" = $Replace\n \"NonSparseParameters\" = $NonSparseParameters\n}\nforeach ($key in $logEntries.Keys) {\n Write-Host \"$key: $($logEntries[$key])\"\n}\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2176461585",
"pr_number": 50970,
"pr_file": "eng/scripts/automation/GenerateAndBuildLib.ps1",
"created_at": "2025-07-01T05:30:04+00:00",
"commented_code": "if ($yml[\"parameters\"] -And $yml[\"parameters\"][\"service-dir\"]) {\n $service = $yml[\"parameters\"][\"service-dir\"][\"default\"];\n }\n if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"]) {\n if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n }\n if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]) {\n $service = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2176461585",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50970,
"pr_file": "eng/scripts/automation/GenerateAndBuildLib.ps1",
"discussion_id": "2176461585",
"commented_code": "@@ -917,8 +917,13 @@ function GetSDKProjectFolder()\n if ($yml[\"parameters\"] -And $yml[\"parameters\"][\"service-dir\"]) {\n $service = $yml[\"parameters\"][\"service-dir\"][\"default\"];\n }\n- if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n- $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n+ if ($yml[\"options\"] -And $yml[\"options\"][\"@azure-tools/typespec-csharp\"]) {\n+ if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]) {\n+ $packageDir = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"package-dir\"]\n+ }\n+ if ($yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]) {\n+ $service = $yml[\"options\"][\"@azure-tools/typespec-csharp\"][\"service-dir\"]",
"comment_created_at": "2025-07-01T05:30:04+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Consider assigning `$yml[\"options\"][\"@azure-tools/typespec-csharp\"]` to a local variable (e.g. `$csharpOpts`) to avoid repeated indexing and improve readability.\n```suggestion\n $csharpOpts = $yml[\"options\"][\"@azure-tools/typespec-csharp\"]\n if ($csharpOpts[\"package-dir\"]) {\n $packageDir = $csharpOpts[\"package-dir\"]\n }\n if ($csharpOpts[\"service-dir\"]) {\n $service = $csharpOpts[\"service-dir\"]\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,102 @@
[
{
"discussion_id": "2162710383",
"pr_number": 50802,
"pr_file": "eng/pipelines/docindex.yml",
"created_at": "2025-06-24T00:00:40+00:00",
"commented_code": "- job: UpdateDocsMsBuildConfig\n pool:\n name: $(LINUXPOOL)\n demands: $(LinuxImageDemand)\n demands: ImageOverride -equals $(LINUXVMIMAGE)",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162710383",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50802,
"pr_file": "eng/pipelines/docindex.yml",
"discussion_id": "2162710383",
"commented_code": "@@ -34,7 +34,7 @@ jobs:\n - job: UpdateDocsMsBuildConfig\n pool:\n name: $(LINUXPOOL)\n- demands: $(LinuxImageDemand)\n+ demands: ImageOverride -equals $(LINUXVMIMAGE)",
"comment_created_at": "2025-06-24T00:00:40+00:00",
"comment_author": "Copilot",
"comment_body": "Consider centralizing the inline image demand expression in a shared variable to improve consistency and ease future maintenance.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2162710385",
"pr_number": 50802,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"created_at": "2025-06-24T00:00:41+00:00",
"commented_code": "pool:\n name: $(WINDOWSPOOL)\n demands: $(WindowsImageDemand)\n demands: ImageOverride -equals $(WINDOWSVMIMAGE)\n\nvariables:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162710385",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50802,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2162710385",
"commented_code": "@@ -11,7 +11,7 @@ pr:\n \n pool:\n name: $(WINDOWSPOOL)\n- demands: $(WindowsImageDemand)\n+ demands: ImageOverride -equals $(WINDOWSVMIMAGE)\n \n variables:",
"comment_created_at": "2025-06-24T00:00:41+00:00",
"comment_author": "Copilot",
"comment_body": "Consider centralizing the inline image demand expression in a shared variable to improve consistency and ease future maintenance.\n```suggestion\n demands: $(IMAGE_DEMAND)\n\nvariables:\n - name: IMAGE_DEMAND\n value: ImageOverride -equals $(WINDOWSVMIMAGE)\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2113092739",
"pr_number": 50316,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"created_at": "2025-05-29T03:12:50+00:00",
"commented_code": "steps:\n - pwsh: |\n $apiChangeDetectRequestUrl = \"https://apiview.dev/PullRequest/DetectApiChanges\"\n $apiChangeDetectRequestUrl = \"https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2113092739",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50316,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"discussion_id": "2113092739",
"commented_code": "@@ -6,7 +6,7 @@ parameters:\n \n steps:\n - pwsh: |\n- $apiChangeDetectRequestUrl = \"https://apiview.dev/PullRequest/DetectApiChanges\"\n+ $apiChangeDetectRequestUrl = \"https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges\"",
"comment_created_at": "2025-05-29T03:12:50+00:00",
"comment_author": "Copilot",
"comment_body": "It may be beneficial to extract this hardcoded URL into a pipeline variable group or template parameter to ensure consistency and simplify future updates.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2103307241",
"pr_number": 50221,
"pr_file": "eng/pipelines/docindex.yml",
"created_at": "2025-05-22T19:58:29+00:00",
"commented_code": "- job: UpdateDocsMsBuildConfig\n pool:\n name: $(LINUXPOOL)\n name: azsdk-pool",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2103307241",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50221,
"pr_file": "eng/pipelines/docindex.yml",
"discussion_id": "2103307241",
"commented_code": "@@ -33,7 +33,8 @@ jobs:\n \n - job: UpdateDocsMsBuildConfig\n pool:\n- name: $(LINUXPOOL)\n+ name: azsdk-pool",
"comment_created_at": "2025-05-22T19:58:29+00:00",
"comment_author": "weshaggard",
"comment_body": "Why not use the variables? It might also be interesting to add the demand to the image.yml variables. ",
"pr_file_module": null
},
{
"comment_id": "2103738606",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50221,
"pr_file": "eng/pipelines/docindex.yml",
"discussion_id": "2103307241",
"commented_code": "@@ -33,7 +33,8 @@ jobs:\n \n - job: UpdateDocsMsBuildConfig\n pool:\n- name: $(LINUXPOOL)\n+ name: azsdk-pool",
"comment_created_at": "2025-05-23T03:52:28+00:00",
"comment_author": "danieljurek",
"comment_body": "Tested and the demand can be placed in a variable and it can reference variables (when the values are defined before job run time). ",
"pr_file_module": null
}
]
}
]

View File

@@ -31,107 +31,3 @@ variables:
```
For API endpoints, pipeline definitions, and other configuration values that may change over time, always use variables rather than hardcoding values directly in scripts or pipeline definitions.
[
{
"discussion_id": "2162710383",
"pr_number": 50802,
"pr_file": "eng/pipelines/docindex.yml",
"created_at": "2025-06-24T00:00:40+00:00",
"commented_code": "- job: UpdateDocsMsBuildConfig\n pool:\n name: $(LINUXPOOL)\n demands: $(LinuxImageDemand)\n demands: ImageOverride -equals $(LINUXVMIMAGE)",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162710383",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50802,
"pr_file": "eng/pipelines/docindex.yml",
"discussion_id": "2162710383",
"commented_code": "@@ -34,7 +34,7 @@ jobs:\n - job: UpdateDocsMsBuildConfig\n pool:\n name: $(LINUXPOOL)\n- demands: $(LinuxImageDemand)\n+ demands: ImageOverride -equals $(LINUXVMIMAGE)",
"comment_created_at": "2025-06-24T00:00:40+00:00",
"comment_author": "Copilot",
"comment_body": "Consider centralizing the inline image demand expression in a shared variable to improve consistency and ease future maintenance.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2162710385",
"pr_number": 50802,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"created_at": "2025-06-24T00:00:41+00:00",
"commented_code": "pool:\n name: $(WINDOWSPOOL)\n demands: $(WindowsImageDemand)\n demands: ImageOverride -equals $(WINDOWSVMIMAGE)\n\nvariables:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162710385",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50802,
"pr_file": "eng/pipelines/aggregate-reports.yml",
"discussion_id": "2162710385",
"commented_code": "@@ -11,7 +11,7 @@ pr:\n \n pool:\n name: $(WINDOWSPOOL)\n- demands: $(WindowsImageDemand)\n+ demands: ImageOverride -equals $(WINDOWSVMIMAGE)\n \n variables:",
"comment_created_at": "2025-06-24T00:00:41+00:00",
"comment_author": "Copilot",
"comment_body": "Consider centralizing the inline image demand expression in a shared variable to improve consistency and ease future maintenance.\n```suggestion\n demands: $(IMAGE_DEMAND)\n\nvariables:\n - name: IMAGE_DEMAND\n value: ImageOverride -equals $(WINDOWSVMIMAGE)\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2113092739",
"pr_number": 50316,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"created_at": "2025-05-29T03:12:50+00:00",
"commented_code": "steps:\n - pwsh: |\n $apiChangeDetectRequestUrl = \"https://apiview.dev/PullRequest/DetectApiChanges\"\n $apiChangeDetectRequestUrl = \"https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2113092739",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50316,
"pr_file": "eng/common/pipelines/templates/steps/detect-api-changes.yml",
"discussion_id": "2113092739",
"commented_code": "@@ -6,7 +6,7 @@ parameters:\n \n steps:\n - pwsh: |\n- $apiChangeDetectRequestUrl = \"https://apiview.dev/PullRequest/DetectApiChanges\"\n+ $apiChangeDetectRequestUrl = \"https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges\"",
"comment_created_at": "2025-05-29T03:12:50+00:00",
"comment_author": "Copilot",
"comment_body": "It may be beneficial to extract this hardcoded URL into a pipeline variable group or template parameter to ensure consistency and simplify future updates.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2103307241",
"pr_number": 50221,
"pr_file": "eng/pipelines/docindex.yml",
"created_at": "2025-05-22T19:58:29+00:00",
"commented_code": "- job: UpdateDocsMsBuildConfig\n pool:\n name: $(LINUXPOOL)\n name: azsdk-pool",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2103307241",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50221,
"pr_file": "eng/pipelines/docindex.yml",
"discussion_id": "2103307241",
"commented_code": "@@ -33,7 +33,8 @@ jobs:\n \n - job: UpdateDocsMsBuildConfig\n pool:\n- name: $(LINUXPOOL)\n+ name: azsdk-pool",
"comment_created_at": "2025-05-22T19:58:29+00:00",
"comment_author": "weshaggard",
"comment_body": "Why not use the variables? It might also be interesting to add the demand to the image.yml variables. ",
"pr_file_module": null
},
{
"comment_id": "2103738606",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50221,
"pr_file": "eng/pipelines/docindex.yml",
"discussion_id": "2103307241",
"commented_code": "@@ -33,7 +33,8 @@ jobs:\n \n - job: UpdateDocsMsBuildConfig\n pool:\n- name: $(LINUXPOOL)\n+ name: azsdk-pool",
"comment_created_at": "2025-05-23T03:52:28+00:00",
"comment_author": "danieljurek",
"comment_body": "Tested and the demand can be placed in a variable and it can reference variables (when the values are defined before job run time). ",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,24 @@
[
{
"discussion_id": "2109943039",
"pr_number": 50282,
"pr_file": "eng/common/pipelines/templates/jobs/npm-publish.yml",
"created_at": "2025-05-27T18:55:49+00:00",
"commented_code": "parameters:\n Tag: 'latest'\n ArtifactName: 'packages'\n DependsOn: []\n Environment: 'package-publish'\n Registry: 'https://registry.npmjs.org/'\n\njobs:\n- deployment: PublishPackage_${{ parameters.ArtifactName }}\n displayName: 'Publish ${{ parameters.ArtifactName }} to ${{ parameters.Registry }}' \n environment: ${{ parameters.Environment }}\n dependsOn: ${{ parameters.DependsOn }}\n variables:\n - name: ArtifactPath\n value: $(Pipeline.Workspace)/${{ parameters.ArtifactName }}\n\n templateContext:\n type: releaseJob\n isProduction: ${{ eq(parameters.Registry, 'https://registry.npmjs.org/') }}\n inputs:\n - input: pipelineArtifact\n artifactName: ${{ parameters.ArtifactName }}\n itemPattern: '**/*.tgz'\n targetPath: $(ArtifactPath)\n\n pool:\n name: azsdk-pool\n image: ubuntu-24.04\n os: linux\n \n strategy:\n runOnce:\n deploy:\n steps:\n - pwsh: |\n foreach ($package in (dir $(ArtifactPath) *.tgz -Recurse)) {\n Write-Host \"Publishing $package to ${{ parameters.Registry }} with tag ${{ parameters.Tag }}\"\n }\n displayName: 'Display packages to be published'\n\n - ${{ if eq(parameters.Registry, 'https://registry.npmjs.org/') }}:\n - task: EsrpRelease@9\n displayName: 'Publish ${{ parameters.ArtifactName }} via ESRP'\n inputs:\n ConnectedServiceName: 'Azure SDK PME Managed Identity'\n ClientId: '5f81938c-2544-4f1f-9251-dd9de5b8a81b'",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2109943039",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50282,
"pr_file": "eng/common/pipelines/templates/jobs/npm-publish.yml",
"discussion_id": "2109943039",
"commented_code": "@@ -0,0 +1,73 @@\n+parameters:\n+ Tag: 'latest'\n+ ArtifactName: 'packages'\n+ DependsOn: []\n+ Environment: 'package-publish'\n+ Registry: 'https://registry.npmjs.org/'\n+\n+jobs:\n+- deployment: PublishPackage_${{ parameters.ArtifactName }}\n+ displayName: 'Publish ${{ parameters.ArtifactName }} to ${{ parameters.Registry }}' \n+ environment: ${{ parameters.Environment }}\n+ dependsOn: ${{ parameters.DependsOn }}\n+ variables:\n+ - name: ArtifactPath\n+ value: $(Pipeline.Workspace)/${{ parameters.ArtifactName }}\n+\n+ templateContext:\n+ type: releaseJob\n+ isProduction: ${{ eq(parameters.Registry, 'https://registry.npmjs.org/') }}\n+ inputs:\n+ - input: pipelineArtifact\n+ artifactName: ${{ parameters.ArtifactName }}\n+ itemPattern: '**/*.tgz'\n+ targetPath: $(ArtifactPath)\n+\n+ pool:\n+ name: azsdk-pool\n+ image: ubuntu-24.04\n+ os: linux\n+ \n+ strategy:\n+ runOnce:\n+ deploy:\n+ steps:\n+ - pwsh: |\n+ foreach ($package in (dir $(ArtifactPath) *.tgz -Recurse)) {\n+ Write-Host \"Publishing $package to ${{ parameters.Registry }} with tag ${{ parameters.Tag }}\"\n+ }\n+ displayName: 'Display packages to be published'\n+\n+ - ${{ if eq(parameters.Registry, 'https://registry.npmjs.org/') }}:\n+ - task: EsrpRelease@9\n+ displayName: 'Publish ${{ parameters.ArtifactName }} via ESRP'\n+ inputs:\n+ ConnectedServiceName: 'Azure SDK PME Managed Identity'\n+ ClientId: '5f81938c-2544-4f1f-9251-dd9de5b8a81b'",
"comment_created_at": "2025-05-27T18:55:49+00:00",
"comment_author": "Copilot",
"comment_body": "Ensure that client identifiers and similar sensitive configuration values are managed securely, for example via secret variable groups, rather than being hard-coded in the pipeline file.\n```suggestion\n ClientId: $(ClientId)\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -26,29 +26,3 @@ task: EsrpRelease@9
inputs:
ClientId: $(ClientId)
```
[
{
"discussion_id": "2109943039",
"pr_number": 50282,
"pr_file": "eng/common/pipelines/templates/jobs/npm-publish.yml",
"created_at": "2025-05-27T18:55:49+00:00",
"commented_code": "parameters:\n Tag: 'latest'\n ArtifactName: 'packages'\n DependsOn: []\n Environment: 'package-publish'\n Registry: 'https://registry.npmjs.org/'\n\njobs:\n- deployment: PublishPackage_${{ parameters.ArtifactName }}\n displayName: 'Publish ${{ parameters.ArtifactName }} to ${{ parameters.Registry }}' \n environment: ${{ parameters.Environment }}\n dependsOn: ${{ parameters.DependsOn }}\n variables:\n - name: ArtifactPath\n value: $(Pipeline.Workspace)/${{ parameters.ArtifactName }}\n\n templateContext:\n type: releaseJob\n isProduction: ${{ eq(parameters.Registry, 'https://registry.npmjs.org/') }}\n inputs:\n - input: pipelineArtifact\n artifactName: ${{ parameters.ArtifactName }}\n itemPattern: '**/*.tgz'\n targetPath: $(ArtifactPath)\n\n pool:\n name: azsdk-pool\n image: ubuntu-24.04\n os: linux\n \n strategy:\n runOnce:\n deploy:\n steps:\n - pwsh: |\n foreach ($package in (dir $(ArtifactPath) *.tgz -Recurse)) {\n Write-Host \"Publishing $package to ${{ parameters.Registry }} with tag ${{ parameters.Tag }}\"\n }\n displayName: 'Display packages to be published'\n\n - ${{ if eq(parameters.Registry, 'https://registry.npmjs.org/') }}:\n - task: EsrpRelease@9\n displayName: 'Publish ${{ parameters.ArtifactName }} via ESRP'\n inputs:\n ConnectedServiceName: 'Azure SDK PME Managed Identity'\n ClientId: '5f81938c-2544-4f1f-9251-dd9de5b8a81b'",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2109943039",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50282,
"pr_file": "eng/common/pipelines/templates/jobs/npm-publish.yml",
"discussion_id": "2109943039",
"commented_code": "@@ -0,0 +1,73 @@\n+parameters:\n+ Tag: 'latest'\n+ ArtifactName: 'packages'\n+ DependsOn: []\n+ Environment: 'package-publish'\n+ Registry: 'https://registry.npmjs.org/'\n+\n+jobs:\n+- deployment: PublishPackage_${{ parameters.ArtifactName }}\n+ displayName: 'Publish ${{ parameters.ArtifactName }} to ${{ parameters.Registry }}' \n+ environment: ${{ parameters.Environment }}\n+ dependsOn: ${{ parameters.DependsOn }}\n+ variables:\n+ - name: ArtifactPath\n+ value: $(Pipeline.Workspace)/${{ parameters.ArtifactName }}\n+\n+ templateContext:\n+ type: releaseJob\n+ isProduction: ${{ eq(parameters.Registry, 'https://registry.npmjs.org/') }}\n+ inputs:\n+ - input: pipelineArtifact\n+ artifactName: ${{ parameters.ArtifactName }}\n+ itemPattern: '**/*.tgz'\n+ targetPath: $(ArtifactPath)\n+\n+ pool:\n+ name: azsdk-pool\n+ image: ubuntu-24.04\n+ os: linux\n+ \n+ strategy:\n+ runOnce:\n+ deploy:\n+ steps:\n+ - pwsh: |\n+ foreach ($package in (dir $(ArtifactPath) *.tgz -Recurse)) {\n+ Write-Host \"Publishing $package to ${{ parameters.Registry }} with tag ${{ parameters.Tag }}\"\n+ }\n+ displayName: 'Display packages to be published'\n+\n+ - ${{ if eq(parameters.Registry, 'https://registry.npmjs.org/') }}:\n+ - task: EsrpRelease@9\n+ displayName: 'Publish ${{ parameters.ArtifactName }} via ESRP'\n+ inputs:\n+ ConnectedServiceName: 'Azure SDK PME Managed Identity'\n+ ClientId: '5f81938c-2544-4f1f-9251-dd9de5b8a81b'",
"comment_created_at": "2025-05-27T18:55:49+00:00",
"comment_author": "Copilot",
"comment_body": "Ensure that client identifiers and similar sensitive configuration values are managed securely, for example via secret variable groups, rather than being hard-coded in the pipeline file.\n```suggestion\n ClientId: $(ClientId)\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,118 @@
[
{
"discussion_id": "2139151065",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"created_at": "2025-06-11T04:08:56+00:00",
"commented_code": "# Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"\n \n Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion /p:OutputProjectInfoListFilePath=\"\"$outputFilePath\"\" -tl:off\"\n\n $msbuildOutput = dotnet msbuild `",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2139151065",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139151065",
"commented_code": "@@ -16,17 +16,27 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"\n+ \n+ Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion /p:OutputProjectInfoListFilePath=\"\"$outputFilePath\"\" -tl:off\"\n \n $msbuildOutput = dotnet msbuild `",
"comment_created_at": "2025-06-11T04:08:56+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot please remove the $msbuildOutput variable as it is no longer used. Also add error handling to fail if this msbuild call fails. ",
"pr_file_module": null
},
{
"comment_id": "2139154406",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139151065",
"commented_code": "@@ -16,17 +16,27 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"\n+ \n+ Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion /p:OutputProjectInfoListFilePath=\"\"$outputFilePath\"\" -tl:off\"\n \n $msbuildOutput = dotnet msbuild `",
"comment_created_at": "2025-06-11T04:13:10+00:00",
"comment_author": "Copilot",
"comment_body": "Removed the unused $msbuildOutput variable and added error handling to fail if msbuild call fails. The script now throws an exception with the exit code when msbuild fails and properly cleans up the temporary file. Commit: 48ab62b",
"pr_file_module": null
}
]
},
{
"discussion_id": "2139217607",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"created_at": "2025-06-11T05:18:29+00:00",
"commented_code": "# Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2139217607",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:18:29+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot this function is somehow causing an empty string to be added to the returned array of objects. What change do you think caused it to add an empty string to the object pipeline?",
"pr_file_module": null
},
{
"comment_id": "2139228464",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:23:46+00:00",
"comment_author": "Copilot",
"comment_body": "The issue was caused by `Get-Content` returning empty lines from the file, which were then included in the returned array. Fixed by filtering out empty or whitespace-only lines when reading the package info file: `Get-Content $outputFilePath | Where-Object { $_ -and $_.Trim() }`. Commit: a8e609f",
"pr_file_module": null
},
{
"comment_id": "2139233160",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:26:44+00:00",
"comment_author": "weshaggard",
"comment_body": "I don't think that is the error because the loop skips any lines that are empty and do not parse correctly. There must be some other part of this function that is writing to the powershell object pipeline. Does the msbuild call or the remove-item return an object that could be an empty string?\r\n",
"pr_file_module": null
},
{
"comment_id": "2139238868",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:30:39+00:00",
"comment_author": "Copilot",
"comment_body": "You're absolutely right! The issue was that the `dotnet msbuild` command on lines 23-30 wasn't being captured, so its output was going to the PowerShell object pipeline and being included in the function's return value. Fixed by assigning the command output to `$null` to prevent it from polluting the pipeline. Also captured the `Remove-Item` commands for consistency. Commit: c5293f3",
"pr_file_module": null
},
{
"comment_id": "2140451115",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T15:09:23+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot If we assign the dotnet msbuild command output to $null how will we see the errors that come out of that command if there are any?",
"pr_file_module": null
},
{
"comment_id": "2140461456",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T15:14:11+00:00",
"comment_author": "Copilot",
"comment_body": "You're absolutely right! I've changed the approach to use `Out-Host` instead of assigning to `$null`. This way, all msbuild output (including error messages) is displayed to users, but it doesn't pollute the PowerShell object pipeline. The error handling via `$LASTEXITCODE` still works correctly. Commit: 6d6ed17",
"pr_file_module": null
}
]
}
]

View File

@@ -42,123 +42,3 @@ finally {
```
This approach ensures scripts fail fast when external processes fail, preserves error visibility, maintains clean function return values, and properly manages resources.
[
{
"discussion_id": "2139151065",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"created_at": "2025-06-11T04:08:56+00:00",
"commented_code": "# Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"\n \n Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion /p:OutputProjectInfoListFilePath=\"\"$outputFilePath\"\" -tl:off\"\n\n $msbuildOutput = dotnet msbuild `",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2139151065",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139151065",
"commented_code": "@@ -16,17 +16,27 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"\n+ \n+ Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion /p:OutputProjectInfoListFilePath=\"\"$outputFilePath\"\" -tl:off\"\n \n $msbuildOutput = dotnet msbuild `",
"comment_created_at": "2025-06-11T04:08:56+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot please remove the $msbuildOutput variable as it is no longer used. Also add error handling to fail if this msbuild call fails. ",
"pr_file_module": null
},
{
"comment_id": "2139154406",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139151065",
"commented_code": "@@ -16,17 +16,27 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"\n+ \n+ Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion /p:OutputProjectInfoListFilePath=\"\"$outputFilePath\"\" -tl:off\"\n \n $msbuildOutput = dotnet msbuild `",
"comment_created_at": "2025-06-11T04:13:10+00:00",
"comment_author": "Copilot",
"comment_body": "Removed the unused $msbuildOutput variable and added error handling to fail if msbuild call fails. The script now throws an exception with the exit code when msbuild fails and properly cleans up the temporary file. Commit: 48ab62b",
"pr_file_module": null
}
]
},
{
"discussion_id": "2139217607",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"created_at": "2025-06-11T05:18:29+00:00",
"commented_code": "# Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2139217607",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:18:29+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot this function is somehow causing an empty string to be added to the returned array of objects. What change do you think caused it to add an empty string to the object pipeline?",
"pr_file_module": null
},
{
"comment_id": "2139228464",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:23:46+00:00",
"comment_author": "Copilot",
"comment_body": "The issue was caused by `Get-Content` returning empty lines from the file, which were then included in the returned array. Fixed by filtering out empty or whitespace-only lines when reading the package info file: `Get-Content $outputFilePath | Where-Object { $_ -and $_.Trim() }`. Commit: a8e609f",
"pr_file_module": null
},
{
"comment_id": "2139233160",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:26:44+00:00",
"comment_author": "weshaggard",
"comment_body": "I don't think that is the error because the loop skips any lines that are empty and do not parse correctly. There must be some other part of this function that is writing to the powershell object pipeline. Does the msbuild call or the remove-item return an object that could be an empty string?\r\n",
"pr_file_module": null
},
{
"comment_id": "2139238868",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T05:30:39+00:00",
"comment_author": "Copilot",
"comment_body": "You're absolutely right! The issue was that the `dotnet msbuild` command on lines 23-30 wasn't being captured, so its output was going to the PowerShell object pipeline and being included in the function's return value. Fixed by assigning the command output to `$null` to prevent it from polluting the pipeline. Also captured the `Remove-Item` commands for consistency. Commit: c5293f3",
"pr_file_module": null
},
{
"comment_id": "2140451115",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T15:09:23+00:00",
"comment_author": "weshaggard",
"comment_body": "@copilot If we assign the dotnet msbuild command output to $null how will we see the errors that come out of that command if there are any?",
"pr_file_module": null
},
{
"comment_id": "2140461456",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50539,
"pr_file": "eng/scripts/Language-Settings.ps1",
"discussion_id": "2139217607",
"commented_code": "@@ -16,22 +16,41 @@ function Get-AllPackageInfoFromRepo($serviceDirectory)\n # Save-Package-Properties.ps1\n $shouldAddDevVersion = Get-Variable -Name 'addDevVersion' -ValueOnly -ErrorAction 'Ignore'\n $ServiceProj = Join-Path -Path $EngDir -ChildPath \"service.proj\"\n- Write-Host \"dotnet msbuild /nologo /t:GetPackageInfo \"\"$ServiceProj\"\" /p:ServiceDirectory=$serviceDirectory /p:AddDevVersion=$shouldAddDevVersion -tl:off\"\n+ $outputFilePath = Join-Path ([System.IO.Path]::GetTempPath()) \"package-info-$([System.Guid]::NewGuid()).txt\"",
"comment_created_at": "2025-06-11T15:14:11+00:00",
"comment_author": "Copilot",
"comment_body": "You're absolutely right! I've changed the approach to use `Out-Host` instead of assigning to `$null`. This way, all msbuild output (including error messages) is displayed to users, but it doesn't pollute the PowerShell object pipeline. The error handling via `$LASTEXITCODE` still works correctly. Commit: 6d6ed17",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,70 @@
[
{
"discussion_id": "2143212696",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"created_at": "2025-06-12T16:47:20+00:00",
"commented_code": "# Repository information\nNote that files in this repository are generally organized in the following way:\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n\nThere are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n\n# Requirements\n- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n 3. Code should build successfully using the following steps:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2143212696",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"discussion_id": "2143212696",
"commented_code": "@@ -0,0 +1,25 @@\n+# Repository information\n+Note that files in this repository are generally organized in the following way:\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n+\n+There are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n+\n+# Requirements\n+- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n+ 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n+ 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n+ - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n+ - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n+ 3. Code should build successfully using the following steps:",
"comment_created_at": "2025-06-12T16:47:20+00:00",
"comment_author": "m-redding",
"comment_body": "Debating whether to have copilot run unit tests in addition to building.\r\nAn alternative could be adding some kind of tool that can do all the validation for a given service directory in one call, to make it easier for copilot ",
"pr_file_module": null
},
{
"comment_id": "2143801505",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"discussion_id": "2143212696",
"commented_code": "@@ -0,0 +1,25 @@\n+# Repository information\n+Note that files in this repository are generally organized in the following way:\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n+\n+There are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n+\n+# Requirements\n+- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n+ 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n+ 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n+ - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n+ - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n+ 3. Code should build successfully using the following steps:",
"comment_created_at": "2025-06-12T23:12:46+00:00",
"comment_author": "weshaggard",
"comment_body": "I would try to stick to what the CI does `dotnet pack eng/service.proj /p:ServiceDirectory=<service>` and `dotnet test eng/services.proj /p:ServiceDirectory=<service>`. That way it more closely matches the CI and has a higher chance of passing.",
"pr_file_module": null
},
{
"comment_id": "2146083028",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"discussion_id": "2143212696",
"commented_code": "@@ -0,0 +1,25 @@\n+# Repository information\n+Note that files in this repository are generally organized in the following way:\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n+\n+There are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n+\n+# Requirements\n+- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n+ 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n+ 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n+ - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n+ - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n+ 3. Code should build successfully using the following steps:",
"comment_created_at": "2025-06-13T20:57:52+00:00",
"comment_author": "m-redding",
"comment_body": "K yeah that makes sense to me too. I'll update this ",
"pr_file_module": null
}
]
},
{
"discussion_id": "2007277965",
"pr_number": 49024,
"pr_file": "sdk/cognitiveservices/Knowledge.QnAMaker/CHANGELOG.md",
"created_at": "2025-03-21T10:20:03+00:00",
"commented_code": null,
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2007277965",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49024,
"pr_file": "sdk/cognitiveservices/Knowledge.QnAMaker/CHANGELOG.md",
"discussion_id": "2007277965",
"commented_code": null,
"comment_created_at": "2025-03-21T10:20:03+00:00",
"comment_author": "ArcturusZhang",
"comment_body": "Everything in this directory was removed because the library inside this directory is actually a track 1 SDK library.\r\nWe could know this by its package name (`Microsoft.*`) and its generated file contents.\r\nWe have pruned all track 1 SDK libraries, and this one dodges that round maybe because its directory name does not look like a track 1 library (`Microsoft.*`).\r\nNow it is causing the CI to fail therefore I noticed it and removed it here.",
"pr_file_module": null
}
]
}
]

View File

@@ -25,75 +25,3 @@ dotnet test eng/services.proj /p:ServiceDirectory=<service-directory>
```
This approach helps identify issues earlier in the development process and prevents situations where code passes locally but fails in CI due to environmental differences or non-standard configurations. When everyone on the team follows this practice, it also ensures consistency across developer environments and reduces troubleshooting time for CI failures.
[
{
"discussion_id": "2143212696",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"created_at": "2025-06-12T16:47:20+00:00",
"commented_code": "# Repository information\nNote that files in this repository are generally organized in the following way:\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n\nThere are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n\n# Requirements\n- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n 3. Code should build successfully using the following steps:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2143212696",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"discussion_id": "2143212696",
"commented_code": "@@ -0,0 +1,25 @@\n+# Repository information\n+Note that files in this repository are generally organized in the following way:\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n+\n+There are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n+\n+# Requirements\n+- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n+ 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n+ 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n+ - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n+ - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n+ 3. Code should build successfully using the following steps:",
"comment_created_at": "2025-06-12T16:47:20+00:00",
"comment_author": "m-redding",
"comment_body": "Debating whether to have copilot run unit tests in addition to building.\r\nAn alternative could be adding some kind of tool that can do all the validation for a given service directory in one call, to make it easier for copilot ",
"pr_file_module": null
},
{
"comment_id": "2143801505",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"discussion_id": "2143212696",
"commented_code": "@@ -0,0 +1,25 @@\n+# Repository information\n+Note that files in this repository are generally organized in the following way:\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n+\n+There are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n+\n+# Requirements\n+- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n+ 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n+ 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n+ - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n+ - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n+ 3. Code should build successfully using the following steps:",
"comment_created_at": "2025-06-12T23:12:46+00:00",
"comment_author": "weshaggard",
"comment_body": "I would try to stick to what the CI does `dotnet pack eng/service.proj /p:ServiceDirectory=<service>` and `dotnet test eng/services.proj /p:ServiceDirectory=<service>`. That way it more closely matches the CI and has a higher chance of passing.",
"pr_file_module": null
},
{
"comment_id": "2146083028",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50575,
"pr_file": ".github/copilot-instructions.md",
"discussion_id": "2143212696",
"commented_code": "@@ -0,0 +1,25 @@\n+# Repository information\n+Note that files in this repository are generally organized in the following way:\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}` holds everything for a specific Azure SDK package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/src` holds the source code for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/tests` holds the tests for the package.\n+- `azure-sdk-for-net/sdk/{service-directory}/{package-name}/samples` holds the samples for the package.\n+\n+There are a few exceptions where package-name is replaced with a shorter directory name. For example in the cognitiveservices directory. The package `Microsoft.Azure.CognitiveServices.Language.SpellCheck` can be found in `azure-sdk-for-net/sdk/cognitiveservices/Language.SpellCheck`. When in doubt, you can look at the name of the .csproj file within the src folder to determine the package name.\n+\n+# Requirements\n+- If you are writing C# code within the `azure-sdk-for-net/sdk` directory:\n+ 1. Follow the coding guidelines in the \"Coding guidelines\" section below.\n+ 2. You should never manually make changes to `*/Generated/*` files, e.g. `azure-sdk-for-net/sdk/containerregistry/Azure.Containers.ContainerRegistry/src/Generated/`\n+ - Only re-generate these files if instructed to do so. If you are instructed to regenerate an SDK, use `dotnet build /t:GenerateCode`\n+ - If you feel like you need to make changes to these files beyond re-generating them in order to complete your task, do not do this, instead see if you can work around the problem in the code that is not in the `Generated` folder. If you can't, report this to the user.\n+ 3. Code should build successfully using the following steps:",
"comment_created_at": "2025-06-13T20:57:52+00:00",
"comment_author": "m-redding",
"comment_body": "K yeah that makes sense to me too. I'll update this ",
"pr_file_module": null
}
]
},
{
"discussion_id": "2007277965",
"pr_number": 49024,
"pr_file": "sdk/cognitiveservices/Knowledge.QnAMaker/CHANGELOG.md",
"created_at": "2025-03-21T10:20:03+00:00",
"commented_code": null,
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2007277965",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49024,
"pr_file": "sdk/cognitiveservices/Knowledge.QnAMaker/CHANGELOG.md",
"discussion_id": "2007277965",
"commented_code": null,
"comment_created_at": "2025-03-21T10:20:03+00:00",
"comment_author": "ArcturusZhang",
"comment_body": "Everything in this directory was removed because the library inside this directory is actually a track 1 SDK library.\r\nWe could know this by its package name (`Microsoft.*`) and its generated file contents.\r\nWe have pruned all track 1 SDK libraries, and this one dodges that round maybe because its directory name does not look like a track 1 library (`Microsoft.*`).\r\nNow it is causing the CI to fail therefore I noticed it and removed it here.",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,68 @@
[
{
"discussion_id": "2135910121",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"created_at": "2025-06-09T15:09:40+00:00",
"commented_code": "### Features Added\n\n- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2135910121",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2135910121",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:",
"comment_created_at": "2025-06-09T15:09:40+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- Added support for [managed identity as a federated identity credential](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2130868142",
"pr_number": 50457,
"pr_file": "sdk/identity/Azure.Identity.Broker/CHANGELOG.md",
"created_at": "2025-06-05T23:15:49+00:00",
"commented_code": "# Release History\n\n## 1.3.0-beta.3 (Unreleased)\n## 1.3.0-beta.3 (2025-06-10)\n\n### Features Added\n\n- Support broker on Linux.\n\n### Breaking Changes\n\n### Bugs Fixed\n- Support Microsoft Broker on Linux platforms. This library relies on the Microsoft Authentication Library (MSAL) to handle the broker, for more information about pre-requisites and how to utilize the Broker on Linux, visit [Enable SSO in native Linux apps using MSAL.NET\n](https://learn.microsoft.com/entra/msal/dotnet/acquiring-tokens/desktop-mobile/linux-dotnet-sdk?tabs=ubuntudep)",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2130868142",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50457,
"pr_file": "sdk/identity/Azure.Identity.Broker/CHANGELOG.md",
"discussion_id": "2130868142",
"commented_code": "@@ -1,14 +1,11 @@\n # Release History\n \n-## 1.3.0-beta.3 (Unreleased)\n+## 1.3.0-beta.3 (2025-06-10)\n \n ### Features Added\n \n-- Support broker on Linux.\n-\n-### Breaking Changes\n-\n-### Bugs Fixed\n+- Support Microsoft Broker on Linux platforms. This library relies on the Microsoft Authentication Library (MSAL) to handle the broker, for more information about pre-requisites and how to utilize the Broker on Linux, visit [Enable SSO in native Linux apps using MSAL.NET\n+](https://learn.microsoft.com/entra/msal/dotnet/acquiring-tokens/desktop-mobile/linux-dotnet-sdk?tabs=ubuntudep)",
"comment_created_at": "2025-06-05T23:15:49+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- Support Microsoft Broker on Linux and WSL. This library relies on the Microsoft Authentication Library (MSAL) to handle the broker. For more information about prerequisites and how to utilize the broker, see [Enable SSO in native Linux apps using MSAL.NET](https://learn.microsoft.com/entra/msal/dotnet/acquiring-tokens/desktop-mobile/linux-dotnet-sdk)\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2076996977",
"pr_number": 49837,
"pr_file": "sdk/hdinsight/Azure.ResourceManager.HDInsight/CHANGELOG.md",
"created_at": "2025-05-07T07:37:09+00:00",
"commented_code": "# Release History\n\n## 1.2.0-beta.5 (Unreleased)\n## 1.2.0-beta.5 (2025-05-07)\n\n### Features Added\n\n### Breaking Changes\n\n### Bugs Fixed\n- Upgraded api-version tag from 'package-2024-08-preview' to 'package-2025-01-preview'. Tag detail available at https://github.com/Azure/azure-rest-api-specs/blob/4c0f7731c93696af01bd2bb9927bf28d2afcbc98/specification/hdinsight/resource-manager/readme.md.\n - Support for setting Entra User during HDInsight cluster creation.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2076996977",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49837,
"pr_file": "sdk/hdinsight/Azure.ResourceManager.HDInsight/CHANGELOG.md",
"discussion_id": "2076996977",
"commented_code": "@@ -1,15 +1,19 @@\n # Release History\n \n-## 1.2.0-beta.5 (Unreleased)\n+## 1.2.0-beta.5 (2025-05-07)\n \n ### Features Added\n \n-### Breaking Changes\n-\n-### Bugs Fixed\n+- Upgraded api-version tag from 'package-2024-08-preview' to 'package-2025-01-preview'. Tag detail available at https://github.com/Azure/azure-rest-api-specs/blob/4c0f7731c93696af01bd2bb9927bf28d2afcbc98/specification/hdinsight/resource-manager/readme.md.\n+ - Support for setting Entra User during HDInsight cluster creation.",
"comment_created_at": "2025-05-07T07:37:09+00:00",
"comment_author": "aim-for-better",
"comment_body": "Support to use Entra User as cluster administrator credential instead of using username/password during HDInsight cluster creation",
"pr_file_module": null
}
]
}
]

View File

@@ -35,73 +35,3 @@ builder.Services.AddAzureClients(clientBuilder =>
```
When creating services like HDInsight clusters, prefer specifying Entra User as the administrator credential rather than username/password combinations.
[
{
"discussion_id": "2135910121",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"created_at": "2025-06-09T15:09:40+00:00",
"commented_code": "### Features Added\n\n- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2135910121",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2135910121",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:",
"comment_created_at": "2025-06-09T15:09:40+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- Added support for [managed identity as a federated identity credential](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2130868142",
"pr_number": 50457,
"pr_file": "sdk/identity/Azure.Identity.Broker/CHANGELOG.md",
"created_at": "2025-06-05T23:15:49+00:00",
"commented_code": "# Release History\n\n## 1.3.0-beta.3 (Unreleased)\n## 1.3.0-beta.3 (2025-06-10)\n\n### Features Added\n\n- Support broker on Linux.\n\n### Breaking Changes\n\n### Bugs Fixed\n- Support Microsoft Broker on Linux platforms. This library relies on the Microsoft Authentication Library (MSAL) to handle the broker, for more information about pre-requisites and how to utilize the Broker on Linux, visit [Enable SSO in native Linux apps using MSAL.NET\n](https://learn.microsoft.com/entra/msal/dotnet/acquiring-tokens/desktop-mobile/linux-dotnet-sdk?tabs=ubuntudep)",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2130868142",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50457,
"pr_file": "sdk/identity/Azure.Identity.Broker/CHANGELOG.md",
"discussion_id": "2130868142",
"commented_code": "@@ -1,14 +1,11 @@\n # Release History\n \n-## 1.3.0-beta.3 (Unreleased)\n+## 1.3.0-beta.3 (2025-06-10)\n \n ### Features Added\n \n-- Support broker on Linux.\n-\n-### Breaking Changes\n-\n-### Bugs Fixed\n+- Support Microsoft Broker on Linux platforms. This library relies on the Microsoft Authentication Library (MSAL) to handle the broker, for more information about pre-requisites and how to utilize the Broker on Linux, visit [Enable SSO in native Linux apps using MSAL.NET\n+](https://learn.microsoft.com/entra/msal/dotnet/acquiring-tokens/desktop-mobile/linux-dotnet-sdk?tabs=ubuntudep)",
"comment_created_at": "2025-06-05T23:15:49+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- Support Microsoft Broker on Linux and WSL. This library relies on the Microsoft Authentication Library (MSAL) to handle the broker. For more information about prerequisites and how to utilize the broker, see [Enable SSO in native Linux apps using MSAL.NET](https://learn.microsoft.com/entra/msal/dotnet/acquiring-tokens/desktop-mobile/linux-dotnet-sdk)\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2076996977",
"pr_number": 49837,
"pr_file": "sdk/hdinsight/Azure.ResourceManager.HDInsight/CHANGELOG.md",
"created_at": "2025-05-07T07:37:09+00:00",
"commented_code": "# Release History\n\n## 1.2.0-beta.5 (Unreleased)\n## 1.2.0-beta.5 (2025-05-07)\n\n### Features Added\n\n### Breaking Changes\n\n### Bugs Fixed\n- Upgraded api-version tag from 'package-2024-08-preview' to 'package-2025-01-preview'. Tag detail available at https://github.com/Azure/azure-rest-api-specs/blob/4c0f7731c93696af01bd2bb9927bf28d2afcbc98/specification/hdinsight/resource-manager/readme.md.\n - Support for setting Entra User during HDInsight cluster creation.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2076996977",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49837,
"pr_file": "sdk/hdinsight/Azure.ResourceManager.HDInsight/CHANGELOG.md",
"discussion_id": "2076996977",
"commented_code": "@@ -1,15 +1,19 @@\n # Release History\n \n-## 1.2.0-beta.5 (Unreleased)\n+## 1.2.0-beta.5 (2025-05-07)\n \n ### Features Added\n \n-### Breaking Changes\n-\n-### Bugs Fixed\n+- Upgraded api-version tag from 'package-2024-08-preview' to 'package-2025-01-preview'. Tag detail available at https://github.com/Azure/azure-rest-api-specs/blob/4c0f7731c93696af01bd2bb9927bf28d2afcbc98/specification/hdinsight/resource-manager/readme.md.\n+ - Support for setting Entra User during HDInsight cluster creation.",
"comment_created_at": "2025-05-07T07:37:09+00:00",
"comment_author": "aim-for-better",
"comment_body": "Support to use Entra User as cluster administrator credential instead of using username/password during HDInsight cluster creation",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,58 @@
[
{
"discussion_id": "2151028592",
"pr_number": 50645,
"pr_file": "sdk/eventhub/Microsoft.Azure.WebJobs.Extensions.EventHubs/CHANGELOG.md",
"created_at": "2025-06-16T22:56:02+00:00",
"commented_code": "# Release History\n\n## 6.5.2 (2025-06-12)\n## 6.5.2 (2025-06-16)\n\n### Bugs Fixed\n\n- Fixed a bug where the data types of broker-owned properties were being adjusted when an event was read by the client, causing the underlying AMQP data to be mutated. This resulted in binary changes when the AMQP message was serialized and unintentionally altered the service contract. Going forward, the original data types will be preserved on the AMQP representation of the message and type normalization only applied to the .NET `EventData` projection.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2151028592",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50645,
"pr_file": "sdk/eventhub/Microsoft.Azure.WebJobs.Extensions.EventHubs/CHANGELOG.md",
"discussion_id": "2151028592",
"commented_code": "@@ -1,14 +1,16 @@\n # Release History\n \n-## 6.5.2 (2025-06-12)\n+## 6.5.2 (2025-06-16)\n \n ### Bugs Fixed\n \n - Fixed a bug where the data types of broker-owned properties were being adjusted when an event was read by the client, causing the underlying AMQP data to be mutated. This resulted in binary changes when the AMQP message was serialized and unintentionally altered the service contract. Going forward, the original data types will be preserved on the AMQP representation of the message and type normalization only applied to the .NET `EventData` projection.\n-\n+ ",
"comment_created_at": "2025-06-16T22:56:02+00:00",
"comment_author": "jsquire",
"comment_body": "```suggestion\r\n\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124515823",
"pr_number": 50369,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/CHANGELOG.md",
"created_at": "2025-06-03T17:43:55+00:00",
"commented_code": "## 12.2.0-beta.1 (Unreleased)\n\n### Features Added\n- Added support for preserving NFS properties and permissions in ShareFiles and ShareDirectories for Share-to-Share copy transfers.\n- Added support for preserving SMB properties and permissions in ShareDirectories for Share-to-Share copy transfers.\n- For Copy and Download transfers, Hard links are copied as regular files and Symbolic links are skipped.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124515823",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50369,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/CHANGELOG.md",
"discussion_id": "2124515823",
"commented_code": "@@ -3,8 +3,12 @@\n ## 12.2.0-beta.1 (Unreleased)\n \n ### Features Added\n+- Added support for preserving NFS properties and permissions in ShareFiles and ShareDirectories for Share-to-Share copy transfers.\n+- Added support for preserving SMB properties and permissions in ShareDirectories for Share-to-Share copy transfers.\n+- For Copy and Download transfers, Hard links are copied as regular files and Symbolic links are skipped.",
"comment_created_at": "2025-06-03T17:43:55+00:00",
"comment_author": "amnguye",
"comment_body": "I'm not sure if this is where we want to document that hardlinks are supported as regular files and symbolic links are skipped. I think it's better that we add that symbolic links are skipped in the regular documentation or known issues, and not in our changelog, since changelog is what we've changed. Unless we were copying symbolic links initially and now we no longer do.\r\n\r\nI think this should be changed to \"Added support for transferring hard links for downloading Share Files and Share-to-Share copy transfers\"",
"pr_file_module": null
},
{
"comment_id": "2124596172",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50369,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/CHANGELOG.md",
"discussion_id": "2124515823",
"commented_code": "@@ -3,8 +3,12 @@\n ## 12.2.0-beta.1 (Unreleased)\n \n ### Features Added\n+- Added support for preserving NFS properties and permissions in ShareFiles and ShareDirectories for Share-to-Share copy transfers.\n+- Added support for preserving SMB properties and permissions in ShareDirectories for Share-to-Share copy transfers.\n+- For Copy and Download transfers, Hard links are copied as regular files and Symbolic links are skipped.",
"comment_created_at": "2025-06-03T18:21:10+00:00",
"comment_author": "nickliu-msft",
"comment_body": "@amnguye I see your point, my main thing is \"adding support for transferring hard links\" might be a little misleading since we are not actually creating a hardlink in the destination for phase 1 (only copying over as regular file) thus not FULL support. \r\n\r\nAs for softlinks, today if the customer tries to transfer a softlink, the transfer will fail (since you need to resolve the reference). So in phase 1 by skipping, we are kinda changing the behavior. (hardlinks and softlinks are only for NFS so customers should not be doing this today anyways).\r\n\r\nExactly how hard links and soft links are handled is documented already. I think this might be the best for the changelog:\r\n\r\n\"Added basic support for handling hard links and soft links in NFS Share-to-Share copy and Share-to-local download transfers.\"",
"pr_file_module": null
}
]
}
]

View File

@@ -32,63 +32,3 @@ For file transfer protocols:
- Clearly document how specialized elements (like symbolic links, hard links) are handled
- Ensure consistent behavior across different transfer operations
- Preserve protocol-specific properties where possible to maintain compatibility
[
{
"discussion_id": "2151028592",
"pr_number": 50645,
"pr_file": "sdk/eventhub/Microsoft.Azure.WebJobs.Extensions.EventHubs/CHANGELOG.md",
"created_at": "2025-06-16T22:56:02+00:00",
"commented_code": "# Release History\n\n## 6.5.2 (2025-06-12)\n## 6.5.2 (2025-06-16)\n\n### Bugs Fixed\n\n- Fixed a bug where the data types of broker-owned properties were being adjusted when an event was read by the client, causing the underlying AMQP data to be mutated. This resulted in binary changes when the AMQP message was serialized and unintentionally altered the service contract. Going forward, the original data types will be preserved on the AMQP representation of the message and type normalization only applied to the .NET `EventData` projection.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2151028592",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50645,
"pr_file": "sdk/eventhub/Microsoft.Azure.WebJobs.Extensions.EventHubs/CHANGELOG.md",
"discussion_id": "2151028592",
"commented_code": "@@ -1,14 +1,16 @@\n # Release History\n \n-## 6.5.2 (2025-06-12)\n+## 6.5.2 (2025-06-16)\n \n ### Bugs Fixed\n \n - Fixed a bug where the data types of broker-owned properties were being adjusted when an event was read by the client, causing the underlying AMQP data to be mutated. This resulted in binary changes when the AMQP message was serialized and unintentionally altered the service contract. Going forward, the original data types will be preserved on the AMQP representation of the message and type normalization only applied to the .NET `EventData` projection.\n-\n+ ",
"comment_created_at": "2025-06-16T22:56:02+00:00",
"comment_author": "jsquire",
"comment_body": "```suggestion\r\n\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124515823",
"pr_number": 50369,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/CHANGELOG.md",
"created_at": "2025-06-03T17:43:55+00:00",
"commented_code": "## 12.2.0-beta.1 (Unreleased)\n\n### Features Added\n- Added support for preserving NFS properties and permissions in ShareFiles and ShareDirectories for Share-to-Share copy transfers.\n- Added support for preserving SMB properties and permissions in ShareDirectories for Share-to-Share copy transfers.\n- For Copy and Download transfers, Hard links are copied as regular files and Symbolic links are skipped.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124515823",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50369,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/CHANGELOG.md",
"discussion_id": "2124515823",
"commented_code": "@@ -3,8 +3,12 @@\n ## 12.2.0-beta.1 (Unreleased)\n \n ### Features Added\n+- Added support for preserving NFS properties and permissions in ShareFiles and ShareDirectories for Share-to-Share copy transfers.\n+- Added support for preserving SMB properties and permissions in ShareDirectories for Share-to-Share copy transfers.\n+- For Copy and Download transfers, Hard links are copied as regular files and Symbolic links are skipped.",
"comment_created_at": "2025-06-03T17:43:55+00:00",
"comment_author": "amnguye",
"comment_body": "I'm not sure if this is where we want to document that hardlinks are supported as regular files and symbolic links are skipped. I think it's better that we add that symbolic links are skipped in the regular documentation or known issues, and not in our changelog, since changelog is what we've changed. Unless we were copying symbolic links initially and now we no longer do.\r\n\r\nI think this should be changed to \"Added support for transferring hard links for downloading Share Files and Share-to-Share copy transfers\"",
"pr_file_module": null
},
{
"comment_id": "2124596172",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50369,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/CHANGELOG.md",
"discussion_id": "2124515823",
"commented_code": "@@ -3,8 +3,12 @@\n ## 12.2.0-beta.1 (Unreleased)\n \n ### Features Added\n+- Added support for preserving NFS properties and permissions in ShareFiles and ShareDirectories for Share-to-Share copy transfers.\n+- Added support for preserving SMB properties and permissions in ShareDirectories for Share-to-Share copy transfers.\n+- For Copy and Download transfers, Hard links are copied as regular files and Symbolic links are skipped.",
"comment_created_at": "2025-06-03T18:21:10+00:00",
"comment_author": "nickliu-msft",
"comment_body": "@amnguye I see your point, my main thing is \"adding support for transferring hard links\" might be a little misleading since we are not actually creating a hardlink in the destination for phase 1 (only copying over as regular file) thus not FULL support. \r\n\r\nAs for softlinks, today if the customer tries to transfer a softlink, the transfer will fail (since you need to resolve the reference). So in phase 1 by skipping, we are kinda changing the behavior. (hardlinks and softlinks are only for NFS so customers should not be doing this today anyways).\r\n\r\nExactly how hard links and soft links are handled is documented already. I think this might be the best for the changelog:\r\n\r\n\"Added basic support for handling hard links and soft links in NFS Share-to-Share copy and Share-to-local download transfers.\"",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,24 @@
[
{
"discussion_id": "2178653902",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"created_at": "2025-07-01T22:33:14+00:00",
"commented_code": ")\n\n. $PSScriptRoot/job-matrix-functions.ps1\n. $PSScriptRoot/../logging.ps1\n\nif (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n}\n$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n$rawConfig = Get-Content $ConfigPath -Raw\n$config = GetMatrixConfigFromFile $rawConfig\n# Strip empty string filters in order to be able to use azure pipelines yaml join()\n$Filters = $Filters | Where-Object { $_ }\n\nLogGroupStart \"Matrix generation configuration\"\nWrite-Host \"Configuration File: $ConfigPath\"\nWrite-Host $rawConfig",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2178653902",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"discussion_id": "2178653902",
"commented_code": "@@ -21,15 +21,27 @@ param (\n )\n \n . $PSScriptRoot/job-matrix-functions.ps1\n+. $PSScriptRoot/../logging.ps1\n \n if (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n }\n-$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n+$rawConfig = Get-Content $ConfigPath -Raw\n+$config = GetMatrixConfigFromFile $rawConfig\n # Strip empty string filters in order to be able to use azure pipelines yaml join()\n $Filters = $Filters | Where-Object { $_ }\n \n+LogGroupStart \"Matrix generation configuration\"\n+Write-Host \"Configuration File: $ConfigPath\"\n+Write-Host $rawConfig",
"comment_created_at": "2025-07-01T22:33:14+00:00",
"comment_author": "Copilot",
"comment_body": "Printing the entire raw configuration may expose sensitive information; consider redacting secrets or limiting the amount of output.\n```suggestion\n$safeConfig = $rawConfig -replace '(?i)(\\\"(password|secret|key)\\\":\\s*\\\".*?\\\")', '\"$1\":\"[REDACTED]\"'\nWrite-Host $safeConfig\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -24,29 +24,3 @@ Write-Host $safeConfig
```
This pattern prevents accidental exposure of sensitive information in logs, console output, or error messages that might be viewed by unauthorized personnel or stored in insecure locations. Implement similar redaction mechanisms in all logging and output systems across your codebase.
[
{
"discussion_id": "2178653902",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"created_at": "2025-07-01T22:33:14+00:00",
"commented_code": ")\n\n. $PSScriptRoot/job-matrix-functions.ps1\n. $PSScriptRoot/../logging.ps1\n\nif (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n}\n$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n$rawConfig = Get-Content $ConfigPath -Raw\n$config = GetMatrixConfigFromFile $rawConfig\n# Strip empty string filters in order to be able to use azure pipelines yaml join()\n$Filters = $Filters | Where-Object { $_ }\n\nLogGroupStart \"Matrix generation configuration\"\nWrite-Host \"Configuration File: $ConfigPath\"\nWrite-Host $rawConfig",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2178653902",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50998,
"pr_file": "eng/common/scripts/job-matrix/Create-JobMatrix.ps1",
"discussion_id": "2178653902",
"commented_code": "@@ -21,15 +21,27 @@ param (\n )\n \n . $PSScriptRoot/job-matrix-functions.ps1\n+. $PSScriptRoot/../logging.ps1\n \n if (!(Test-Path $ConfigPath)) {\n Write-Error \"ConfigPath '$ConfigPath' does not exist.\"\n exit 1\n }\n-$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw)\n+$rawConfig = Get-Content $ConfigPath -Raw\n+$config = GetMatrixConfigFromFile $rawConfig\n # Strip empty string filters in order to be able to use azure pipelines yaml join()\n $Filters = $Filters | Where-Object { $_ }\n \n+LogGroupStart \"Matrix generation configuration\"\n+Write-Host \"Configuration File: $ConfigPath\"\n+Write-Host $rawConfig",
"comment_created_at": "2025-07-01T22:33:14+00:00",
"comment_author": "Copilot",
"comment_body": "Printing the entire raw configuration may expose sensitive information; consider redacting secrets or limiting the amount of output.\n```suggestion\n$safeConfig = $rawConfig -replace '(?i)(\\\"(password|secret|key)\\\":\\s*\\\".*?\\\")', '\"$1\":\"[REDACTED]\"'\nWrite-Host $safeConfig\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,148 @@
[
{
"discussion_id": "2174208183",
"pr_number": 50936,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"created_at": "2025-06-30T04:41:22+00:00",
"commented_code": "}\n else if (parameter.Type.Equals(typeof(RequestContent)))\n {\n // If convenience method is provided, find the resource parameter from it\n if (convenienceMethod != null)\n if (methodParameters.Count > 0)\n {\n var resource = convenienceMethod.Signature.Parameters\n .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n // Find the content parameter in the method parameters\n // TODO: need to revisit the filter here\n var contentInputParameter = operation.Parameters.First(p => !ImplicitParameterNames.Contains(p.Name) && p.Kind == InputParameterKind.Method && p.Type is not InputPrimitiveType);\n var resource = methodParameters.Single(p => p.Name == \"data\" || p.Name == contentInputParameter.Name);\n arguments.Add(resource);\n }\n else\n {\n // Otherwise just add the parameter as-is\n arguments.Add(parameter);\n }\n }\n else if (parameter.Type.Equals(typeof(RequestContext)))\n {\n arguments.Add(contextVariable);\n }\n else\n {\n arguments.Add(parameter);\n arguments.Add(methodParameters.Single(p => p.Name == parameter.Name));",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2174208183",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50936,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"discussion_id": "2174208183",
"commented_code": "@@ -364,26 +373,22 @@ public ValueExpression[] PopulateArguments(\n }\n else if (parameter.Type.Equals(typeof(RequestContent)))\n {\n- // If convenience method is provided, find the resource parameter from it\n- if (convenienceMethod != null)\n+ if (methodParameters.Count > 0)\n {\n- var resource = convenienceMethod.Signature.Parameters\n- .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n+ // Find the content parameter in the method parameters\n+ // TODO: need to revisit the filter here\n+ var contentInputParameter = operation.Parameters.First(p => !ImplicitParameterNames.Contains(p.Name) && p.Kind == InputParameterKind.Method && p.Type is not InputPrimitiveType);\n+ var resource = methodParameters.Single(p => p.Name == \"data\" || p.Name == contentInputParameter.Name);\n arguments.Add(resource);\n }\n- else\n- {\n- // Otherwise just add the parameter as-is\n- arguments.Add(parameter);\n- }\n }\n else if (parameter.Type.Equals(typeof(RequestContext)))\n {\n arguments.Add(contextVariable);\n }\n else\n {\n- arguments.Add(parameter);\n+ arguments.Add(methodParameters.Single(p => p.Name == parameter.Name));",
"comment_created_at": "2025-06-30T04:41:22+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] The use of Single() assumes that exactly one matching parameter exists. It may be safer to validate the existence of a match or provide a clearer error message if the match is not found.\n```suggestion\n var matchingParameter = methodParameters.SingleOrDefault(p => p.Name == parameter.Name);\n if (matchingParameter == null)\n {\n throw new InvalidOperationException($\"No matching parameter found for '{parameter.Name}' in methodParameters.\");\n }\n arguments.Add(matchingParameter);\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2058868488",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"created_at": "2025-04-24T16:53:55+00:00",
"commented_code": "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nusing System;\nusing System.Collections;\nusing System.Collections.Generic;\nusing System.Reflection;\nusing System.Text.Json;\nusing System.Xml.Linq;\n\nnamespace Azure.AI.Projects.Custom.Agent\n{\n /// <summary>\n /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n /// </summary>\n public class ToolCallsResolver\n {\n private readonly Dictionary<string, Delegate> _delegates = new();\n\n internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n {\n _delegates = delegates;\n }\n\n /// <summary>\n /// Indicates whether auto tool calls are enabled.\n /// </summary>\n internal bool EnableAutoToolCalls => _delegates.Count > 0;\n\n /// <summary>\n /// Resolves the tool call by invoking the delegate associated with the function name.\n /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n /// without knowing the answer.\n /// </summary>\n internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n {\n if (!_delegates.TryGetValue(functionName, out var func))\n {\n string error = $\"Function {functionName} not found.\";\n throw new MissingMethodException(error);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2058868488",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-24T16:53:55+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "will it be retried?",
"pr_file_module": null
},
{
"comment_id": "2058998727",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-24T18:18:40+00:00",
"comment_author": "howieleung",
"comment_body": "Yes. I did lot of experiments. Depends on the question, if we submit this error to the model, the model might try to figure out the answer with its knowledge or memory. It might raise the same function call again. Or it might give up and say it doesn't have the information.",
"pr_file_module": null
},
{
"comment_id": "2059171479",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-24T20:24:59+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "Don't we need two exceptions for this to work well. If the model passes a wrong argument, it might indeed retry successfully. But why would the modle give us a wrong function name? We gave it an explicit list of functions that we support",
"pr_file_module": null
},
{
"comment_id": "2060960574",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-25T22:23:57+00:00",
"comment_author": "howieleung",
"comment_body": "It is possible that the agent is created by Foundary UI or other application. In their C# app, they call getAgent and attempt to stream. If this agent has a function tool that isn't in the list of delegate, we want to raise exception.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156708743",
"pr_number": 50718,
"pr_file": "sdk/loadtestservice/Azure.Developer.Playwright/src/Utility/ClientUtilities.cs",
"created_at": "2025-06-19T10:47:33+00:00",
"commented_code": "if (string.IsNullOrEmpty(authToken))\n throw new Exception(Constants.s_no_auth_error);\n JsonWebToken jsonWebToken = _jsonWebTokenHandler!.ReadJsonWebToken(authToken) ?? throw new Exception(Constants.s_invalid_mpt_pat_error);\n var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"aid\")?.Value;\n Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/accounts/(?<workspaceId>[\\w-]+)/\");\n var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"pwid\")?.Value;\n Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/playwrightworkspaces/(?<workspaceId>[\\w-]+)/\");\n if (!match.Success)\n throw new Exception(Constants.s_invalid_service_endpoint_error_message);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156708743",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50718,
"pr_file": "sdk/loadtestservice/Azure.Developer.Playwright/src/Utility/ClientUtilities.cs",
"discussion_id": "2156708743",
"commented_code": "@@ -60,8 +60,8 @@ internal void ValidateMptPAT(string? authToken, string serviceEndpoint)\n if (string.IsNullOrEmpty(authToken))\n throw new Exception(Constants.s_no_auth_error);\n JsonWebToken jsonWebToken = _jsonWebTokenHandler!.ReadJsonWebToken(authToken) ?? throw new Exception(Constants.s_invalid_mpt_pat_error);\n- var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"aid\")?.Value;\n- Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/accounts/(?<workspaceId>[\\w-]+)/\");\n+ var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"pwid\")?.Value;\n+ Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/playwrightworkspaces/(?<workspaceId>[\\w-]+)/\");\n if (!match.Success)\n throw new Exception(Constants.s_invalid_service_endpoint_error_message);",
"comment_created_at": "2025-06-19T10:47:33+00:00",
"comment_author": "Copilot",
"comment_body": "Throwing a generic Exception makes it harder for callers to distinguish error types; consider using a more specific exception (e.g., ArgumentException) for an invalid service endpoint.\n```suggestion\n throw new ArgumentException(Constants.s_invalid_service_endpoint_error_message);\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2128234472",
"pr_number": 50447,
"pr_file": "sdk/core/Azure.Core.Expressions.DataFactory/src/DataFactoryElementJsonConverter.cs",
"created_at": "2025-06-05T08:15:12+00:00",
"commented_code": "return element != null;\n }\n\n public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options)\n {\n throw new NotImplementedException();",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2128234472",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50447,
"pr_file": "sdk/core/Azure.Core.Expressions.DataFactory/src/DataFactoryElementJsonConverter.cs",
"discussion_id": "2128234472",
"commented_code": "@@ -399,5 +222,10 @@ private static bool TryGetNonLiteral<T>(JsonElement json, out DataFactoryElement\n \n return element != null;\n }\n+\n+ public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options)\n+ {\n+ throw new NotImplementedException();",
"comment_created_at": "2025-06-05T08:15:12+00:00",
"comment_author": "Copilot",
"comment_body": "The Write override now throws a NotImplementedException, which may cause runtime errors if this converter is used unexpectedly. Consider providing an implementation or documenting that this converter should not be invoked in AOT scenarios.\n```suggestion\n /// <summary>\n /// Serialization is not supported for DataFactoryElementJsonConverter.\n /// </summary>\n /// <exception cref=\"NotSupportedException\">Thrown when attempting to serialize an object.</exception>\n public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options)\n {\n throw new NotSupportedException(\"Serialization is not supported for DataFactoryElementJsonConverter.\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2130443163",
"pr_number": 50458,
"pr_file": "sdk/identity/Azure.Identity/src/ManagedIdentityClient.cs",
"created_at": "2025-06-05T21:22:48+00:00",
"commented_code": "return await tokenExchangeManagedIdentitySource.AuthenticateAsync(async, context, cancellationToken).ConfigureAwait(false);\n }\n\n // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n result = async ?\n await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n try\n {\n // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n result = async ?\n await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n }\n // If the IMDS endpoint is not available, we will throw a CredentialUnavailableException.\n catch (MsalServiceException ex) when (HasInnerExceptionMatching(ex, e => e is RequestFailedException && e.Message.Contains(\"timed out\")))",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2130443163",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50458,
"pr_file": "sdk/identity/Azure.Identity/src/ManagedIdentityClient.cs",
"discussion_id": "2130443163",
"commented_code": "@@ -87,10 +87,19 @@ public async ValueTask<AccessToken> AuthenticateAsync(bool async, TokenRequestCo\n return await tokenExchangeManagedIdentitySource.AuthenticateAsync(async, context, cancellationToken).ConfigureAwait(false);\n }\n \n- // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n- result = async ?\n- await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n- _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n+ try\n+ {\n+ // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n+ result = async ?\n+ await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n+ _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n+ }\n+ // If the IMDS endpoint is not available, we will throw a CredentialUnavailableException.\n+ catch (MsalServiceException ex) when (HasInnerExceptionMatching(ex, e => e is RequestFailedException && e.Message.Contains(\"timed out\")))",
"comment_created_at": "2025-06-05T21:22:48+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Relying on a string match for \"timed out\" in the exception message is brittle. Consider checking a specific exception property or error code on RequestFailedException instead of matching the message text.\n```suggestion\n catch (MsalServiceException ex) when (HasInnerExceptionMatching(ex, e => e is RequestFailedException requestFailedEx && requestFailedEx.Status == 408))\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -51,153 +51,3 @@ if (matchingParameter == null)
}
arguments.Add(matchingParameter);
```
[
{
"discussion_id": "2174208183",
"pr_number": 50936,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"created_at": "2025-06-30T04:41:22+00:00",
"commented_code": "}\n else if (parameter.Type.Equals(typeof(RequestContent)))\n {\n // If convenience method is provided, find the resource parameter from it\n if (convenienceMethod != null)\n if (methodParameters.Count > 0)\n {\n var resource = convenienceMethod.Signature.Parameters\n .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n // Find the content parameter in the method parameters\n // TODO: need to revisit the filter here\n var contentInputParameter = operation.Parameters.First(p => !ImplicitParameterNames.Contains(p.Name) && p.Kind == InputParameterKind.Method && p.Type is not InputPrimitiveType);\n var resource = methodParameters.Single(p => p.Name == \"data\" || p.Name == contentInputParameter.Name);\n arguments.Add(resource);\n }\n else\n {\n // Otherwise just add the parameter as-is\n arguments.Add(parameter);\n }\n }\n else if (parameter.Type.Equals(typeof(RequestContext)))\n {\n arguments.Add(contextVariable);\n }\n else\n {\n arguments.Add(parameter);\n arguments.Add(methodParameters.Single(p => p.Name == parameter.Name));",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2174208183",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50936,
"pr_file": "eng/packages/http-client-csharp-mgmt/generator/Azure.Generator.Management/src/Providers/ResourceClientProvider.cs",
"discussion_id": "2174208183",
"commented_code": "@@ -364,26 +373,22 @@ public ValueExpression[] PopulateArguments(\n }\n else if (parameter.Type.Equals(typeof(RequestContent)))\n {\n- // If convenience method is provided, find the resource parameter from it\n- if (convenienceMethod != null)\n+ if (methodParameters.Count > 0)\n {\n- var resource = convenienceMethod.Signature.Parameters\n- .Single(p => p.Type.Equals(ResourceData.Type) || p.Type.Equals(typeof(RequestContent)));\n+ // Find the content parameter in the method parameters\n+ // TODO: need to revisit the filter here\n+ var contentInputParameter = operation.Parameters.First(p => !ImplicitParameterNames.Contains(p.Name) && p.Kind == InputParameterKind.Method && p.Type is not InputPrimitiveType);\n+ var resource = methodParameters.Single(p => p.Name == \"data\" || p.Name == contentInputParameter.Name);\n arguments.Add(resource);\n }\n- else\n- {\n- // Otherwise just add the parameter as-is\n- arguments.Add(parameter);\n- }\n }\n else if (parameter.Type.Equals(typeof(RequestContext)))\n {\n arguments.Add(contextVariable);\n }\n else\n {\n- arguments.Add(parameter);\n+ arguments.Add(methodParameters.Single(p => p.Name == parameter.Name));",
"comment_created_at": "2025-06-30T04:41:22+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] The use of Single() assumes that exactly one matching parameter exists. It may be safer to validate the existence of a match or provide a clearer error message if the match is not found.\n```suggestion\n var matchingParameter = methodParameters.SingleOrDefault(p => p.Name == parameter.Name);\n if (matchingParameter == null)\n {\n throw new InvalidOperationException($\"No matching parameter found for '{parameter.Name}' in methodParameters.\");\n }\n arguments.Add(matchingParameter);\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2058868488",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"created_at": "2025-04-24T16:53:55+00:00",
"commented_code": "\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nusing System;\nusing System.Collections;\nusing System.Collections.Generic;\nusing System.Reflection;\nusing System.Text.Json;\nusing System.Xml.Linq;\n\nnamespace Azure.AI.Projects.Custom.Agent\n{\n /// <summary>\n /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n /// </summary>\n public class ToolCallsResolver\n {\n private readonly Dictionary<string, Delegate> _delegates = new();\n\n internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n {\n _delegates = delegates;\n }\n\n /// <summary>\n /// Indicates whether auto tool calls are enabled.\n /// </summary>\n internal bool EnableAutoToolCalls => _delegates.Count > 0;\n\n /// <summary>\n /// Resolves the tool call by invoking the delegate associated with the function name.\n /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n /// without knowing the answer.\n /// </summary>\n internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n {\n if (!_delegates.TryGetValue(functionName, out var func))\n {\n string error = $\"Function {functionName} not found.\";\n throw new MissingMethodException(error);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2058868488",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-24T16:53:55+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "will it be retried?",
"pr_file_module": null
},
{
"comment_id": "2058998727",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-24T18:18:40+00:00",
"comment_author": "howieleung",
"comment_body": "Yes. I did lot of experiments. Depends on the question, if we submit this error to the model, the model might try to figure out the answer with its knowledge or memory. It might raise the same function call again. Or it might give up and say it doesn't have the information.",
"pr_file_module": null
},
{
"comment_id": "2059171479",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-24T20:24:59+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "Don't we need two exceptions for this to work well. If the model passes a wrong argument, it might indeed retry successfully. But why would the modle give us a wrong function name? We gave it an explicit list of functions that we support",
"pr_file_module": null
},
{
"comment_id": "2060960574",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/src/Custom/Agent/ToolCallsResolver.cs",
"discussion_id": "2058868488",
"commented_code": "@@ -0,0 +1,214 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+using System;\n+using System.Collections;\n+using System.Collections.Generic;\n+using System.Reflection;\n+using System.Text.Json;\n+using System.Xml.Linq;\n+\n+namespace Azure.AI.Projects.Custom.Agent\n+{\n+ /// <summary>\n+ /// ToolCallsResolver is used to resolve tool calls in the streaming API.\n+ /// </summary>\n+ public class ToolCallsResolver\n+ {\n+ private readonly Dictionary<string, Delegate> _delegates = new();\n+\n+ internal ToolCallsResolver(Dictionary<string, Delegate> delegates)\n+ {\n+ _delegates = delegates;\n+ }\n+\n+ /// <summary>\n+ /// Indicates whether auto tool calls are enabled.\n+ /// </summary>\n+ internal bool EnableAutoToolCalls => _delegates.Count > 0;\n+\n+ /// <summary>\n+ /// Resolves the tool call by invoking the delegate associated with the function name.\n+ /// It casts the function arguments to the appropriate types based on the delegate's parameters.\n+ /// without knowing the answer.\n+ /// </summary>\n+ internal ToolOutput GetResolvedToolOutput(string functionName, string toolCallId, string functionArguments)\n+ {\n+ if (!_delegates.TryGetValue(functionName, out var func))\n+ {\n+ string error = $\"Function {functionName} not found.\";\n+ throw new MissingMethodException(error);",
"comment_created_at": "2025-04-25T22:23:57+00:00",
"comment_author": "howieleung",
"comment_body": "It is possible that the agent is created by Foundary UI or other application. In their C# app, they call getAgent and attempt to stream. If this agent has a function tool that isn't in the list of delegate, we want to raise exception.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156708743",
"pr_number": 50718,
"pr_file": "sdk/loadtestservice/Azure.Developer.Playwright/src/Utility/ClientUtilities.cs",
"created_at": "2025-06-19T10:47:33+00:00",
"commented_code": "if (string.IsNullOrEmpty(authToken))\n throw new Exception(Constants.s_no_auth_error);\n JsonWebToken jsonWebToken = _jsonWebTokenHandler!.ReadJsonWebToken(authToken) ?? throw new Exception(Constants.s_invalid_mpt_pat_error);\n var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"aid\")?.Value;\n Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/accounts/(?<workspaceId>[\\w-]+)/\");\n var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"pwid\")?.Value;\n Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/playwrightworkspaces/(?<workspaceId>[\\w-]+)/\");\n if (!match.Success)\n throw new Exception(Constants.s_invalid_service_endpoint_error_message);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156708743",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50718,
"pr_file": "sdk/loadtestservice/Azure.Developer.Playwright/src/Utility/ClientUtilities.cs",
"discussion_id": "2156708743",
"commented_code": "@@ -60,8 +60,8 @@ internal void ValidateMptPAT(string? authToken, string serviceEndpoint)\n if (string.IsNullOrEmpty(authToken))\n throw new Exception(Constants.s_no_auth_error);\n JsonWebToken jsonWebToken = _jsonWebTokenHandler!.ReadJsonWebToken(authToken) ?? throw new Exception(Constants.s_invalid_mpt_pat_error);\n- var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"aid\")?.Value;\n- Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/accounts/(?<workspaceId>[\\w-]+)/\");\n+ var tokenWorkspaceId = jsonWebToken.Claims.FirstOrDefault(c => c.Type == \"pwid\")?.Value;\n+ Match match = Regex.Match(serviceEndpoint, @\"wss://(?<region>[\\w-]+)\\.api\\.(?<domain>playwright(?:-test|-int)?\\.io|playwright\\.microsoft\\.com)/playwrightworkspaces/(?<workspaceId>[\\w-]+)/\");\n if (!match.Success)\n throw new Exception(Constants.s_invalid_service_endpoint_error_message);",
"comment_created_at": "2025-06-19T10:47:33+00:00",
"comment_author": "Copilot",
"comment_body": "Throwing a generic Exception makes it harder for callers to distinguish error types; consider using a more specific exception (e.g., ArgumentException) for an invalid service endpoint.\n```suggestion\n throw new ArgumentException(Constants.s_invalid_service_endpoint_error_message);\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2128234472",
"pr_number": 50447,
"pr_file": "sdk/core/Azure.Core.Expressions.DataFactory/src/DataFactoryElementJsonConverter.cs",
"created_at": "2025-06-05T08:15:12+00:00",
"commented_code": "return element != null;\n }\n\n public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options)\n {\n throw new NotImplementedException();",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2128234472",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50447,
"pr_file": "sdk/core/Azure.Core.Expressions.DataFactory/src/DataFactoryElementJsonConverter.cs",
"discussion_id": "2128234472",
"commented_code": "@@ -399,5 +222,10 @@ private static bool TryGetNonLiteral<T>(JsonElement json, out DataFactoryElement\n \n return element != null;\n }\n+\n+ public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options)\n+ {\n+ throw new NotImplementedException();",
"comment_created_at": "2025-06-05T08:15:12+00:00",
"comment_author": "Copilot",
"comment_body": "The Write override now throws a NotImplementedException, which may cause runtime errors if this converter is used unexpectedly. Consider providing an implementation or documenting that this converter should not be invoked in AOT scenarios.\n```suggestion\n /// <summary>\n /// Serialization is not supported for DataFactoryElementJsonConverter.\n /// </summary>\n /// <exception cref=\"NotSupportedException\">Thrown when attempting to serialize an object.</exception>\n public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options)\n {\n throw new NotSupportedException(\"Serialization is not supported for DataFactoryElementJsonConverter.\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2130443163",
"pr_number": 50458,
"pr_file": "sdk/identity/Azure.Identity/src/ManagedIdentityClient.cs",
"created_at": "2025-06-05T21:22:48+00:00",
"commented_code": "return await tokenExchangeManagedIdentitySource.AuthenticateAsync(async, context, cancellationToken).ConfigureAwait(false);\n }\n\n // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n result = async ?\n await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n try\n {\n // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n result = async ?\n await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n }\n // If the IMDS endpoint is not available, we will throw a CredentialUnavailableException.\n catch (MsalServiceException ex) when (HasInnerExceptionMatching(ex, e => e is RequestFailedException && e.Message.Contains(\"timed out\")))",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2130443163",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50458,
"pr_file": "sdk/identity/Azure.Identity/src/ManagedIdentityClient.cs",
"discussion_id": "2130443163",
"commented_code": "@@ -87,10 +87,19 @@ public async ValueTask<AccessToken> AuthenticateAsync(bool async, TokenRequestCo\n return await tokenExchangeManagedIdentitySource.AuthenticateAsync(async, context, cancellationToken).ConfigureAwait(false);\n }\n \n- // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n- result = async ?\n- await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n- _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n+ try\n+ {\n+ // The default case is to use the MSAL implementation, which does no probing of the IMDS endpoint.\n+ result = async ?\n+ await _msalManagedIdentityClient.AcquireTokenForManagedIdentityAsync(context, cancellationToken).ConfigureAwait(false) :\n+ _msalManagedIdentityClient.AcquireTokenForManagedIdentity(context, cancellationToken);\n+ }\n+ // If the IMDS endpoint is not available, we will throw a CredentialUnavailableException.\n+ catch (MsalServiceException ex) when (HasInnerExceptionMatching(ex, e => e is RequestFailedException && e.Message.Contains(\"timed out\")))",
"comment_created_at": "2025-06-05T21:22:48+00:00",
"comment_author": "Copilot",
"comment_body": "[nitpick] Relying on a string match for \"timed out\" in the exception message is brittle. Consider checking a specific exception property or error code on RequestFailedException instead of matching the message text.\n```suggestion\n catch (MsalServiceException ex) when (HasInnerExceptionMatching(ex, e => e is RequestFailedException requestFailedEx && requestFailedEx.Status == 408))\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,102 @@
[
{
"discussion_id": "2166970984",
"pr_number": 50879,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Bing_Custom_Search.cs",
"created_at": "2025-06-25T15:08:57+00:00",
"commented_code": "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n#nullable disable\n\nusing System;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core.TestFramework;\nusing NUnit.Framework;\n\nnamespace Azure.AI.Agents.Persistent.Tests;\n\npublic partial class Sample_PersistentAgents_Bing_Custom_Search : SamplesBase<AIAgentsTestEnvironment>\n{\n [Test]\n [AsyncOnly]\n public async Task BingCustomSearchExampleAsync()\n {\n #region Snippet:AgentsBingCustomSearch_CreateProject\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"BING_CUSTOM_CONNECTION_ID\");\n#else\n var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n var connectionId = TestEnvironment.BING_CONECTION_ID;",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2166970984",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50879,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Bing_Custom_Search.cs",
"discussion_id": "2166970984",
"commented_code": "@@ -0,0 +1,199 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Bing_Custom_Search : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task BingCustomSearchExampleAsync()\n+ {\n+ #region Snippet:AgentsBingCustomSearch_CreateProject\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"BING_CUSTOM_CONNECTION_ID\");\n+#else\n+ var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n+ var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n+ var connectionId = TestEnvironment.BING_CONECTION_ID;",
"comment_created_at": "2025-06-25T15:08:57+00:00",
"comment_author": "Copilot",
"comment_body": "This sample references `BING_CONECTION_ID` but the new property is `BING_CUSTOM_CONNECTION_ID`. Please update to use `TestEnvironment.BING_CUSTOM_CONNECTION_ID`.\n```suggestion\n var connectionId = TestEnvironment.BING_CUSTOM_CONNECTION_ID;\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156832075",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/ManagedServiceIdentityTest.cs",
"created_at": "2025-06-19T11:54:21+00:00",
"commented_code": "using Azure.ResourceManager.Models;\nusing NUnit.Framework;\n\n#nullable enable\n\nnamespace Azure.ResourceManager.Tests\n{\n [Parallelizable]\n public class ManagedServiceIdentityTests\n {\n private static readonly string TestAssetPath = Path.Combine(TestContext.CurrentContext.TestDirectory, \"Unit\", \"TestAssets\", \"Identity\");\n private static readonly ModelReaderWriterOptions V3Options = new ModelReaderWriterOptions(\"W|v3\");\n\n public JsonProperty DeserializerHelper(string filename)\n private JsonElement DeserializerHelper(string filename, out string json)\n {\n var json = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n using JsonDocument document = JsonDocument.Parse(json);\n JsonElement rootElement = document.RootElement.Clone();\n return rootElement.EnumerateObject().First();\n var originalJson = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n json = originalJson.Replace(\"\\r\\n\", \"\").Replace(\"\\n\", \"\").Replace(\" \", \"\").Replace(\"'principalId':'22fdaec1-8b9f-49dc-bd72-ddaf8f215577','tenantId':'72f988af-86f1-41af-91ab-2d7cd011db47',\".Replace('\\'', '\\\"'), \"\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156832075",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/ManagedServiceIdentityTest.cs",
"discussion_id": "2156832075",
"commented_code": "@@ -8,40 +9,44 @@\n using Azure.ResourceManager.Models;\n using NUnit.Framework;\n \n+#nullable enable\n+\n namespace Azure.ResourceManager.Tests\n {\n [Parallelizable]\n public class ManagedServiceIdentityTests\n {\n private static readonly string TestAssetPath = Path.Combine(TestContext.CurrentContext.TestDirectory, \"Unit\", \"TestAssets\", \"Identity\");\n+ private static readonly ModelReaderWriterOptions V3Options = new ModelReaderWriterOptions(\"W|v3\");\n \n- public JsonProperty DeserializerHelper(string filename)\n+ private JsonElement DeserializerHelper(string filename, out string json)\n {\n- var json = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n- using JsonDocument document = JsonDocument.Parse(json);\n- JsonElement rootElement = document.RootElement.Clone();\n- return rootElement.EnumerateObject().First();\n+ var originalJson = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n+ json = originalJson.Replace(\"\\r\\n\", \"\").Replace(\"\\n\", \"\").Replace(\" \", \"\").Replace(\"'principalId':'22fdaec1-8b9f-49dc-bd72-ddaf8f215577','tenantId':'72f988af-86f1-41af-91ab-2d7cd011db47',\".Replace('\\'', '\\\"'), \"\");",
"comment_created_at": "2025-06-19T11:54:21+00:00",
"comment_author": "Copilot",
"comment_body": "The hard-coded JSON normalization (including fixed principal and tenant ID values) is brittle; consider deriving these values dynamically or clarifying the intent with inline documentation.\n```suggestion\n var principalId = Environment.GetEnvironmentVariable(\"TEST_PRINCIPAL_ID\") ?? \"22fdaec1-8b9f-49dc-bd72-ddaf8f215577\";\n var tenantId = Environment.GetEnvironmentVariable(\"TEST_TENANT_ID\") ?? \"72f988af-86f1-41af-91ab-2d7cd011db47\";\n json = originalJson.Replace(\"\\r\\n\", \"\").Replace(\"\\n\", \"\").Replace(\" \", \"\")\n .Replace($\"'principalId':'{principalId}','tenantId':'{tenantId}',\".Replace('\\'', '\\\"'), \"\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141202156",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"created_at": "2025-06-11T22:17:07+00:00",
"commented_code": "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n#nullable disable\n\nusing System;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core.TestFramework;\nusing NUnit.Framework;\n\nnamespace Azure.AI.Agents.Persistent.Tests;\n\npublic partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n{\n [Test]\n [AsyncOnly]\n public async Task SharepointExampleAsync()\n {\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141202156",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"discussion_id": "2141202156",
"commented_code": "@@ -0,0 +1,185 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task SharepointExampleAsync()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"comment_created_at": "2025-06-11T22:17:07+00:00",
"comment_author": "Copilot",
"comment_body": "The environment variable name is misspelled as 'AZURE_SHAREPOINT_CONECTION_ID'. It should be 'AZURE_SHAREPOINT_CONNECTION_ID' to match the test environment and avoid null values.\n```suggestion\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONNECTION_ID\");\n```",
"pr_file_module": null
},
{
"comment_id": "2141205489",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"discussion_id": "2141202156",
"commented_code": "@@ -0,0 +1,185 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task SharepointExampleAsync()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"comment_created_at": "2025-06-11T22:20:52+00:00",
"comment_author": "dargilco",
"comment_body": "Good catch from Copilot!",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141202161",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"created_at": "2025-06-11T22:17:07+00:00",
"commented_code": "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n#nullable disable\n\nusing System;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core.TestFramework;\nusing NUnit.Framework;\n\nnamespace Azure.AI.Agents.Persistent.Tests;\n\npublic partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n{\n [Test]\n [AsyncOnly]\n public async Task SharepointExampleAsync()\n {\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");\n#else\n var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n var connectionId = TestEnvironment.SHAREPOINT_CONNECTION_ID;\n#endif\n PersistentAgentsClient agentClient = new(projectEndpoint, new DefaultAzureCredential());\n SharepointToolDefinition sharepointTool = new(\n new SharepointGroundingToolParameters(\n connectionId\n )\n );\n PersistentAgent agent = await agentClient.Administration.CreateAgentAsync(\n model: modelDeploymentName,\n name: \"my-agent\",\n instructions: \"You are a helpful agent.\",\n tools: [ sharepointTool ]);\n\n // Create thread for communication\n PersistentAgentThread thread = await agentClient.Threads.CreateThreadAsync();\n\n // Create message to thread\n PersistentThreadMessage message = await agentClient.Messages.CreateMessageAsync(\n thread.Id,\n MessageRole.User,\n \"<Your Sharepoint Query Here>\");\n\n // Run the agent\n ThreadRun run = await agentClient.Runs.CreateRunAsync(thread, agent);\n do\n {\n await Task.Delay(TimeSpan.FromMilliseconds(500));\n run = await agentClient.Runs.GetRunAsync(thread.Id, run.Id);\n }\n while (run.Status == RunStatus.Queued\n || run.Status == RunStatus.InProgress);\n\n Assert.AreEqual(\n RunStatus.Completed,\n run.Status,\n run.LastError?.Message);\n\n AsyncPageable<PersistentThreadMessage> messages = agentClient.Messages.GetMessagesAsync(\n threadId: thread.Id,\n order: ListSortOrder.Ascending\n );\n\n await foreach (PersistentThreadMessage threadMessage in messages)\n {\n Console.Write($\"{threadMessage.CreatedAt:yyyy-MM-dd HH:mm:ss} - {threadMessage.Role,10}: \");\n foreach (MessageContent contentItem in threadMessage.ContentItems)\n {\n if (contentItem is MessageTextContent textItem)\n {\n string response = textItem.Text;\n if (textItem.Annotations != null)\n {\n foreach (MessageTextAnnotation annotation in textItem.Annotations)\n {\n if (annotation is MessageTextUriCitationAnnotation uriAnnotation)\n {\n response = response.Replace(uriAnnotation.Text, $\" [{uriAnnotation.UriCitation.Title}]({uriAnnotation.UriCitation.Uri})\");\n }\n }\n }\n Console.Write($\"Agent response: {response}\");\n }\n else if (contentItem is MessageImageFileContent imageFileItem)\n {\n Console.Write($\"<image from ID: {imageFileItem.FileId}\");\n }\n Console.WriteLine();\n }\n }\n await agentClient.Threads.DeleteThreadAsync(threadId: thread.Id);\n await agentClient.Administration.DeleteAgentAsync(agentId: agent.Id);\n }\n\n [Test]\n [SyncOnly]\n public void SharepointExample()\n {\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141202161",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"discussion_id": "2141202161",
"commented_code": "@@ -0,0 +1,185 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task SharepointExampleAsync()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");\n+#else\n+ var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n+ var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n+ var connectionId = TestEnvironment.SHAREPOINT_CONNECTION_ID;\n+#endif\n+ PersistentAgentsClient agentClient = new(projectEndpoint, new DefaultAzureCredential());\n+ SharepointToolDefinition sharepointTool = new(\n+ new SharepointGroundingToolParameters(\n+ connectionId\n+ )\n+ );\n+ PersistentAgent agent = await agentClient.Administration.CreateAgentAsync(\n+ model: modelDeploymentName,\n+ name: \"my-agent\",\n+ instructions: \"You are a helpful agent.\",\n+ tools: [ sharepointTool ]);\n+\n+ // Create thread for communication\n+ PersistentAgentThread thread = await agentClient.Threads.CreateThreadAsync();\n+\n+ // Create message to thread\n+ PersistentThreadMessage message = await agentClient.Messages.CreateMessageAsync(\n+ thread.Id,\n+ MessageRole.User,\n+ \"<Your Sharepoint Query Here>\");\n+\n+ // Run the agent\n+ ThreadRun run = await agentClient.Runs.CreateRunAsync(thread, agent);\n+ do\n+ {\n+ await Task.Delay(TimeSpan.FromMilliseconds(500));\n+ run = await agentClient.Runs.GetRunAsync(thread.Id, run.Id);\n+ }\n+ while (run.Status == RunStatus.Queued\n+ || run.Status == RunStatus.InProgress);\n+\n+ Assert.AreEqual(\n+ RunStatus.Completed,\n+ run.Status,\n+ run.LastError?.Message);\n+\n+ AsyncPageable<PersistentThreadMessage> messages = agentClient.Messages.GetMessagesAsync(\n+ threadId: thread.Id,\n+ order: ListSortOrder.Ascending\n+ );\n+\n+ await foreach (PersistentThreadMessage threadMessage in messages)\n+ {\n+ Console.Write($\"{threadMessage.CreatedAt:yyyy-MM-dd HH:mm:ss} - {threadMessage.Role,10}: \");\n+ foreach (MessageContent contentItem in threadMessage.ContentItems)\n+ {\n+ if (contentItem is MessageTextContent textItem)\n+ {\n+ string response = textItem.Text;\n+ if (textItem.Annotations != null)\n+ {\n+ foreach (MessageTextAnnotation annotation in textItem.Annotations)\n+ {\n+ if (annotation is MessageTextUriCitationAnnotation uriAnnotation)\n+ {\n+ response = response.Replace(uriAnnotation.Text, $\" [{uriAnnotation.UriCitation.Title}]({uriAnnotation.UriCitation.Uri})\");\n+ }\n+ }\n+ }\n+ Console.Write($\"Agent response: {response}\");\n+ }\n+ else if (contentItem is MessageImageFileContent imageFileItem)\n+ {\n+ Console.Write($\"<image from ID: {imageFileItem.FileId}\");\n+ }\n+ Console.WriteLine();\n+ }\n+ }\n+ await agentClient.Threads.DeleteThreadAsync(threadId: thread.Id);\n+ await agentClient.Administration.DeleteAgentAsync(agentId: agent.Id);\n+ }\n+\n+ [Test]\n+ [SyncOnly]\n+ public void SharepointExample()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"comment_created_at": "2025-06-11T22:17:07+00:00",
"comment_author": "Copilot",
"comment_body": "The environment variable name is misspelled as 'AZURE_SHAREPOINT_CONECTION_ID'. It should be 'AZURE_SHAREPOINT_CONNECTION_ID' to match the test environment and avoid null values.\n```suggestion\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONNECTION_ID\");\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -27,107 +27,3 @@ var connectionId = TestEnvironment.SHAREPOINT_CONNECTION_ID; // Fixed spelling
```
When defining environment variable names in test environments or documentation, follow consistent naming patterns like using all caps with underscores for separators. Double-check for typos, as misspelled environment variable names will silently return null values and may cause tests to fail in unexpected ways.
[
{
"discussion_id": "2166970984",
"pr_number": 50879,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Bing_Custom_Search.cs",
"created_at": "2025-06-25T15:08:57+00:00",
"commented_code": "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n#nullable disable\n\nusing System;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core.TestFramework;\nusing NUnit.Framework;\n\nnamespace Azure.AI.Agents.Persistent.Tests;\n\npublic partial class Sample_PersistentAgents_Bing_Custom_Search : SamplesBase<AIAgentsTestEnvironment>\n{\n [Test]\n [AsyncOnly]\n public async Task BingCustomSearchExampleAsync()\n {\n #region Snippet:AgentsBingCustomSearch_CreateProject\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"BING_CUSTOM_CONNECTION_ID\");\n#else\n var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n var connectionId = TestEnvironment.BING_CONECTION_ID;",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2166970984",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50879,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Bing_Custom_Search.cs",
"discussion_id": "2166970984",
"commented_code": "@@ -0,0 +1,199 @@\n+// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Bing_Custom_Search : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task BingCustomSearchExampleAsync()\n+ {\n+ #region Snippet:AgentsBingCustomSearch_CreateProject\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"BING_CUSTOM_CONNECTION_ID\");\n+#else\n+ var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n+ var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n+ var connectionId = TestEnvironment.BING_CONECTION_ID;",
"comment_created_at": "2025-06-25T15:08:57+00:00",
"comment_author": "Copilot",
"comment_body": "This sample references `BING_CONECTION_ID` but the new property is `BING_CUSTOM_CONNECTION_ID`. Please update to use `TestEnvironment.BING_CUSTOM_CONNECTION_ID`.\n```suggestion\n var connectionId = TestEnvironment.BING_CUSTOM_CONNECTION_ID;\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2156832075",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/ManagedServiceIdentityTest.cs",
"created_at": "2025-06-19T11:54:21+00:00",
"commented_code": "using Azure.ResourceManager.Models;\nusing NUnit.Framework;\n\n#nullable enable\n\nnamespace Azure.ResourceManager.Tests\n{\n [Parallelizable]\n public class ManagedServiceIdentityTests\n {\n private static readonly string TestAssetPath = Path.Combine(TestContext.CurrentContext.TestDirectory, \"Unit\", \"TestAssets\", \"Identity\");\n private static readonly ModelReaderWriterOptions V3Options = new ModelReaderWriterOptions(\"W|v3\");\n\n public JsonProperty DeserializerHelper(string filename)\n private JsonElement DeserializerHelper(string filename, out string json)\n {\n var json = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n using JsonDocument document = JsonDocument.Parse(json);\n JsonElement rootElement = document.RootElement.Clone();\n return rootElement.EnumerateObject().First();\n var originalJson = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n json = originalJson.Replace(\"\\r\\n\", \"\").Replace(\"\\n\", \"\").Replace(\" \", \"\").Replace(\"'principalId':'22fdaec1-8b9f-49dc-bd72-ddaf8f215577','tenantId':'72f988af-86f1-41af-91ab-2d7cd011db47',\".Replace('\\'', '\\\"'), \"\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2156832075",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50705,
"pr_file": "sdk/resourcemanager/Azure.ResourceManager/tests/Unit/ManagedServiceIdentityTest.cs",
"discussion_id": "2156832075",
"commented_code": "@@ -8,40 +9,44 @@\n using Azure.ResourceManager.Models;\n using NUnit.Framework;\n \n+#nullable enable\n+\n namespace Azure.ResourceManager.Tests\n {\n [Parallelizable]\n public class ManagedServiceIdentityTests\n {\n private static readonly string TestAssetPath = Path.Combine(TestContext.CurrentContext.TestDirectory, \"Unit\", \"TestAssets\", \"Identity\");\n+ private static readonly ModelReaderWriterOptions V3Options = new ModelReaderWriterOptions(\"W|v3\");\n \n- public JsonProperty DeserializerHelper(string filename)\n+ private JsonElement DeserializerHelper(string filename, out string json)\n {\n- var json = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n- using JsonDocument document = JsonDocument.Parse(json);\n- JsonElement rootElement = document.RootElement.Clone();\n- return rootElement.EnumerateObject().First();\n+ var originalJson = File.ReadAllText(Path.Combine(TestAssetPath, filename));\n+ json = originalJson.Replace(\"\\r\\n\", \"\").Replace(\"\\n\", \"\").Replace(\" \", \"\").Replace(\"'principalId':'22fdaec1-8b9f-49dc-bd72-ddaf8f215577','tenantId':'72f988af-86f1-41af-91ab-2d7cd011db47',\".Replace('\\'', '\\\"'), \"\");",
"comment_created_at": "2025-06-19T11:54:21+00:00",
"comment_author": "Copilot",
"comment_body": "The hard-coded JSON normalization (including fixed principal and tenant ID values) is brittle; consider deriving these values dynamically or clarifying the intent with inline documentation.\n```suggestion\n var principalId = Environment.GetEnvironmentVariable(\"TEST_PRINCIPAL_ID\") ?? \"22fdaec1-8b9f-49dc-bd72-ddaf8f215577\";\n var tenantId = Environment.GetEnvironmentVariable(\"TEST_TENANT_ID\") ?? \"72f988af-86f1-41af-91ab-2d7cd011db47\";\n json = originalJson.Replace(\"\\r\\n\", \"\").Replace(\"\\n\", \"\").Replace(\" \", \"\")\n .Replace($\"'principalId':'{principalId}','tenantId':'{tenantId}',\".Replace('\\'', '\\\"'), \"\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141202156",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"created_at": "2025-06-11T22:17:07+00:00",
"commented_code": "\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n#nullable disable\n\nusing System;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core.TestFramework;\nusing NUnit.Framework;\n\nnamespace Azure.AI.Agents.Persistent.Tests;\n\npublic partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n{\n [Test]\n [AsyncOnly]\n public async Task SharepointExampleAsync()\n {\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141202156",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"discussion_id": "2141202156",
"commented_code": "@@ -0,0 +1,185 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task SharepointExampleAsync()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"comment_created_at": "2025-06-11T22:17:07+00:00",
"comment_author": "Copilot",
"comment_body": "The environment variable name is misspelled as 'AZURE_SHAREPOINT_CONECTION_ID'. It should be 'AZURE_SHAREPOINT_CONNECTION_ID' to match the test environment and avoid null values.\n```suggestion\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONNECTION_ID\");\n```",
"pr_file_module": null
},
{
"comment_id": "2141205489",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"discussion_id": "2141202156",
"commented_code": "@@ -0,0 +1,185 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task SharepointExampleAsync()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"comment_created_at": "2025-06-11T22:20:52+00:00",
"comment_author": "dargilco",
"comment_body": "Good catch from Copilot!",
"pr_file_module": null
}
]
},
{
"discussion_id": "2141202161",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"created_at": "2025-06-11T22:17:07+00:00",
"commented_code": "\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n#nullable disable\n\nusing System;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core.TestFramework;\nusing NUnit.Framework;\n\nnamespace Azure.AI.Agents.Persistent.Tests;\n\npublic partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n{\n [Test]\n [AsyncOnly]\n public async Task SharepointExampleAsync()\n {\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");\n#else\n var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n var connectionId = TestEnvironment.SHAREPOINT_CONNECTION_ID;\n#endif\n PersistentAgentsClient agentClient = new(projectEndpoint, new DefaultAzureCredential());\n SharepointToolDefinition sharepointTool = new(\n new SharepointGroundingToolParameters(\n connectionId\n )\n );\n PersistentAgent agent = await agentClient.Administration.CreateAgentAsync(\n model: modelDeploymentName,\n name: \"my-agent\",\n instructions: \"You are a helpful agent.\",\n tools: [ sharepointTool ]);\n\n // Create thread for communication\n PersistentAgentThread thread = await agentClient.Threads.CreateThreadAsync();\n\n // Create message to thread\n PersistentThreadMessage message = await agentClient.Messages.CreateMessageAsync(\n thread.Id,\n MessageRole.User,\n \"<Your Sharepoint Query Here>\");\n\n // Run the agent\n ThreadRun run = await agentClient.Runs.CreateRunAsync(thread, agent);\n do\n {\n await Task.Delay(TimeSpan.FromMilliseconds(500));\n run = await agentClient.Runs.GetRunAsync(thread.Id, run.Id);\n }\n while (run.Status == RunStatus.Queued\n || run.Status == RunStatus.InProgress);\n\n Assert.AreEqual(\n RunStatus.Completed,\n run.Status,\n run.LastError?.Message);\n\n AsyncPageable<PersistentThreadMessage> messages = agentClient.Messages.GetMessagesAsync(\n threadId: thread.Id,\n order: ListSortOrder.Ascending\n );\n\n await foreach (PersistentThreadMessage threadMessage in messages)\n {\n Console.Write($\"{threadMessage.CreatedAt:yyyy-MM-dd HH:mm:ss} - {threadMessage.Role,10}: \");\n foreach (MessageContent contentItem in threadMessage.ContentItems)\n {\n if (contentItem is MessageTextContent textItem)\n {\n string response = textItem.Text;\n if (textItem.Annotations != null)\n {\n foreach (MessageTextAnnotation annotation in textItem.Annotations)\n {\n if (annotation is MessageTextUriCitationAnnotation uriAnnotation)\n {\n response = response.Replace(uriAnnotation.Text, $\" [{uriAnnotation.UriCitation.Title}]({uriAnnotation.UriCitation.Uri})\");\n }\n }\n }\n Console.Write($\"Agent response: {response}\");\n }\n else if (contentItem is MessageImageFileContent imageFileItem)\n {\n Console.Write($\"<image from ID: {imageFileItem.FileId}\");\n }\n Console.WriteLine();\n }\n }\n await agentClient.Threads.DeleteThreadAsync(threadId: thread.Id);\n await agentClient.Administration.DeleteAgentAsync(agentId: agent.Id);\n }\n\n [Test]\n [SyncOnly]\n public void SharepointExample()\n {\n#if SNIPPET\n var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2141202161",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50559,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/tests/Samples/Sample_PersistentAgents_Sharepoint.cs",
"discussion_id": "2141202161",
"commented_code": "@@ -0,0 +1,185 @@\n+\ufeff// Copyright (c) Microsoft Corporation. All rights reserved.\n+// Licensed under the MIT License.\n+\n+#nullable disable\n+\n+using System;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core.TestFramework;\n+using NUnit.Framework;\n+\n+namespace Azure.AI.Agents.Persistent.Tests;\n+\n+public partial class Sample_PersistentAgents_Sharepoint : SamplesBase<AIAgentsTestEnvironment>\n+{\n+ [Test]\n+ [AsyncOnly]\n+ public async Task SharepointExampleAsync()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");\n+#else\n+ var projectEndpoint = TestEnvironment.PROJECT_ENDPOINT;\n+ var modelDeploymentName = TestEnvironment.MODELDEPLOYMENTNAME;\n+ var connectionId = TestEnvironment.SHAREPOINT_CONNECTION_ID;\n+#endif\n+ PersistentAgentsClient agentClient = new(projectEndpoint, new DefaultAzureCredential());\n+ SharepointToolDefinition sharepointTool = new(\n+ new SharepointGroundingToolParameters(\n+ connectionId\n+ )\n+ );\n+ PersistentAgent agent = await agentClient.Administration.CreateAgentAsync(\n+ model: modelDeploymentName,\n+ name: \"my-agent\",\n+ instructions: \"You are a helpful agent.\",\n+ tools: [ sharepointTool ]);\n+\n+ // Create thread for communication\n+ PersistentAgentThread thread = await agentClient.Threads.CreateThreadAsync();\n+\n+ // Create message to thread\n+ PersistentThreadMessage message = await agentClient.Messages.CreateMessageAsync(\n+ thread.Id,\n+ MessageRole.User,\n+ \"<Your Sharepoint Query Here>\");\n+\n+ // Run the agent\n+ ThreadRun run = await agentClient.Runs.CreateRunAsync(thread, agent);\n+ do\n+ {\n+ await Task.Delay(TimeSpan.FromMilliseconds(500));\n+ run = await agentClient.Runs.GetRunAsync(thread.Id, run.Id);\n+ }\n+ while (run.Status == RunStatus.Queued\n+ || run.Status == RunStatus.InProgress);\n+\n+ Assert.AreEqual(\n+ RunStatus.Completed,\n+ run.Status,\n+ run.LastError?.Message);\n+\n+ AsyncPageable<PersistentThreadMessage> messages = agentClient.Messages.GetMessagesAsync(\n+ threadId: thread.Id,\n+ order: ListSortOrder.Ascending\n+ );\n+\n+ await foreach (PersistentThreadMessage threadMessage in messages)\n+ {\n+ Console.Write($\"{threadMessage.CreatedAt:yyyy-MM-dd HH:mm:ss} - {threadMessage.Role,10}: \");\n+ foreach (MessageContent contentItem in threadMessage.ContentItems)\n+ {\n+ if (contentItem is MessageTextContent textItem)\n+ {\n+ string response = textItem.Text;\n+ if (textItem.Annotations != null)\n+ {\n+ foreach (MessageTextAnnotation annotation in textItem.Annotations)\n+ {\n+ if (annotation is MessageTextUriCitationAnnotation uriAnnotation)\n+ {\n+ response = response.Replace(uriAnnotation.Text, $\" [{uriAnnotation.UriCitation.Title}]({uriAnnotation.UriCitation.Uri})\");\n+ }\n+ }\n+ }\n+ Console.Write($\"Agent response: {response}\");\n+ }\n+ else if (contentItem is MessageImageFileContent imageFileItem)\n+ {\n+ Console.Write($\"<image from ID: {imageFileItem.FileId}\");\n+ }\n+ Console.WriteLine();\n+ }\n+ }\n+ await agentClient.Threads.DeleteThreadAsync(threadId: thread.Id);\n+ await agentClient.Administration.DeleteAgentAsync(agentId: agent.Id);\n+ }\n+\n+ [Test]\n+ [SyncOnly]\n+ public void SharepointExample()\n+ {\n+#if SNIPPET\n+ var projectEndpoint = System.Environment.GetEnvironmentVariable(\"PROJECT_ENDPOINT\");\n+ var modelDeploymentName = System.Environment.GetEnvironmentVariable(\"MODEL_DEPLOYMENT_NAME\");\n+ var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONECTION_ID\");",
"comment_created_at": "2025-06-11T22:17:07+00:00",
"comment_author": "Copilot",
"comment_body": "The environment variable name is misspelled as 'AZURE_SHAREPOINT_CONECTION_ID'. It should be 'AZURE_SHAREPOINT_CONNECTION_ID' to match the test environment and avoid null values.\n```suggestion\n var connectionId = System.Environment.GetEnvironmentVariable(\"AZURE_SHAREPOINT_CONNECTION_ID\");\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,164 @@
[
{
"discussion_id": "2058815959",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"created_at": "2025-04-24T16:18:50+00:00",
"commented_code": "while (toolOutputs.Count > 0);\n```\n\n#### Function call executed automatically\n\nIn addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n\nHere we use other functions for demonstration:\n```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\nprivate class Address\n{\n public string Street { get; set; }\n public string City { get; set; }\n}\n\nprivate int GetHumidityByAddress(Address address)\n{\n return (address.City == \"Seattle\") ? 60 : 80;\n}\n\nprivate string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n{\n string[] temps = new string[addresses.Length];\n for (int i = 0; i < addresses.Length; i++)\n {\n if (addresses[i].TryGetValue(\"city\", out string city))\n {\n temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n }\n else\n {\n throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n }\n }\n return temps;\n}\n```\nNow we define the function definitions:\n```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\nprivate FunctionToolDefinition geHhumidityByAddressTool = new(\n name: \"GetHumidityByAddress\",\n description: \"Get humidity by street and city\",\n parameters: BinaryData.FromObjectAsJson(\n new\n {\n Type = \"object\",\n Properties = new\n {\n Address = new\n {\n Type = \"object\",\n Properties = new\n {\n Street = new\n {\n Type = \"string\",\n Description = \"Street\"\n },\n City = new\n {\n Type = \"string\",\n Description = \"city\"\n },\n },\n Required = new[] { \"street\", \"city\" }\n }\n },\n Required = new[] { \"address\" }\n },\n new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n\nprivate FunctionToolDefinition getWeatherByAddressesTool = new(\n name: \"GetWeatherByAddresses\",\n description: \"Get weather by street and city\",\n parameters: BinaryData.FromObjectAsJson(\n new\n {\n Type = \"object\",\n Properties = new\n {\n Addresses = new\n {\n Type = \"array\",\n Description = \"A list of addresses\",\n Items = new\n {\n Type = \"object\",\n Properties = new\n {\n Street = new\n {\n Type = \"string\",\n Description = \"Street\"\n },\n City = new\n {\n Type = \"string\",\n description = \"city\"\n },\n },\n Required = new[] { \"street\", \"city\" }\n }\n },\n Unit = new\n {\n Type = \"string\",\n Enum = new[] { \"c\", \"f\" },\n },\n },\n Required = new[] { \"addresses\" }\n },\n new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n```\nUse `EnableAutoFunctionCall` to enable the auto function call:\n```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\nList<ToolOutput> toolOutputs = new();\nDictionary<string, Delegate> delegates = new();\ndelegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\ndelegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\nAIProjectClientOptions options = new();\noptions.EnableAutoFunctionCalls(delegates);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2058815959",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-24T16:18:50+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "what's the error handling model for automatic tool calls? i.e. where do I handle errors that might happen in tool calls?",
"pr_file_module": null
},
{
"comment_id": "2058960638",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-24T17:54:03+00:00",
"comment_author": "howieleung",
"comment_body": "When there is error, the error will be submitted back to the model in Json format liked \"{error: \"Error message\"}\". Then model will raise another function call to retry or figure out by its knowledge instead.",
"pr_file_module": null
},
{
"comment_id": "2059164667",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-24T20:19:15+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "I am not sure this is how we want it to work. It will hide bugs and will be hard to debug. ",
"pr_file_module": null
},
{
"comment_id": "2060477164",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T15:39:49+00:00",
"comment_author": "howieleung",
"comment_body": "A similar solution also used in SK and asure-ai-project sdk. In python, we would do logger.warning function to log the warning. Can C# log warning?",
"pr_file_module": null
},
{
"comment_id": "2060669666",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T18:00:04+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "there is no really a good way to do it. This is why in CM protoype, we had APIs for the caller to issue the tool call directly and handle errors if they want: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cloudmachine/Azure.Projects/samples/HelloRAG/Program.cs#L57\r\n\r\nIn addition, in our automatic runner, there is a virtual method that the user can override to hook up any error handlers they want: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cloudmachine/Azure.Projects.AI/src/Agents/ChatRunner.cs#L146",
"pr_file_module": null
},
{
"comment_id": "2060896976",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T21:22:03+00:00",
"comment_author": "howieleung",
"comment_body": "I just read the code in the link. I think it make sense to add handlers streaming, but I would not want to do it now for just onToolCall. It makes more sense to come up with a list of handler and implement after GA. And this is not going to be a breaking change. Are you OK?",
"pr_file_module": null
},
{
"comment_id": "2061032463",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T23:37:09+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "Adding it after GA will be a breaking change, won't it? Non-streaming tool calls will be intercepted and handled automatically. This means custom user code that handles these tool calls won't execute anymore. Unless you add more knobs/options to preserve compat, and then it just adds complexity to the API. ",
"pr_file_module": null
},
{
"comment_id": "2062963457",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-28T06:25:29+00:00",
"comment_author": "howieleung",
"comment_body": "No worry! beside create_run, Python SDK has a function called create_and_process_run. For non-streaming, users either call create_run followed by writing their own while-loop and call function tool manually. Or they call create_and_process_run that call create_run and embed the while-loop with auto function calls. Currently this SDK doesn't have something similar to create_and_process_run. I have discussed with Jarno this release we will do auto function call for streaming only. We might consider to create createAndRunProcess with auto function call for non-streaming.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2164226771",
"pr_number": 50725,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"created_at": "2025-06-24T14:44:09+00:00",
"commented_code": "### Bugs Fixed\n\n- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2164226771",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50725,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"discussion_id": "2164226771",
"commented_code": "@@ -8,6 +8,7 @@\n \n ### Bugs Fixed\n \n+- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).",
"comment_created_at": "2025-06-24T14:44:09+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).\r\n```",
"pr_file_module": null
},
{
"comment_id": "2164232457",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50725,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"discussion_id": "2164226771",
"commented_code": "@@ -8,6 +8,7 @@\n \n ### Bugs Fixed\n \n+- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).",
"comment_created_at": "2025-06-24T14:46:18+00:00",
"comment_author": "Copilot",
"comment_body": "Updated the CHANGELOG URL format as suggested. Commit 6789abc.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124977732",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/CHANGELOG.md",
"created_at": "2025-06-03T21:40:43+00:00",
"commented_code": "### Other Changes\n\n- Updated retry policy behavior when the service is throttling and the `TryTimeout` is shorter than the standard throttling time of 30 seconds. Previously, the operation was immediately canceled with a server busy exception. With these changes, the operation will begin consuming retry attempts while throttling until either the server busy state is cleared or all configured retry attempts are exhausted. ([#50121](https://github.com/Azure/azure-sdk-for-net/issues/50121))",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124977732",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/CHANGELOG.md",
"discussion_id": "2124977732",
"commented_code": "@@ -12,6 +12,7 @@\n \n ### Other Changes\n \n+- Updated retry policy behavior when the service is throttling and the `TryTimeout` is shorter than the standard throttling time of 30 seconds. Previously, the operation was immediately canceled with a server busy exception. With these changes, the operation will begin consuming retry attempts while throttling until either the server busy state is cleared or all configured retry attempts are exhausted. ([#50121](https://github.com/Azure/azure-sdk-for-net/issues/50121))",
"comment_created_at": "2025-06-03T21:40:43+00:00",
"comment_author": "jsquire",
"comment_body": "```suggestion\r\n- Updated retry policy behavior when the service is throttling and the `TryTimeout` is shorter than the standard throttling time of 30 seconds. Previously, the operation was immediately canceled with a server busy exception. With these changes, the operation will begin consuming retry attempts while throttling until either the server busy state is cleared or all configured retry attempts are exhausted. ([#50121](https://github.com/Azure/azure-sdk-for-net/issues/50121))\r\n\r\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -37,169 +37,3 @@ options.EnableAutoFunctionCalls(delegates, errorHandler: (exception, context) =>
```
When dealing with transient errors like throttling or service unavailability, implement retry policies that respect documented service requirements (e.g., retrying for adequate duration) rather than failing immediately or using generic retry strategies.
[
{
"discussion_id": "2058815959",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"created_at": "2025-04-24T16:18:50+00:00",
"commented_code": "while (toolOutputs.Count > 0);\n```\n\n#### Function call executed automatically\n\nIn addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n\nHere we use other functions for demonstration:\n```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\nprivate class Address\n{\n public string Street { get; set; }\n public string City { get; set; }\n}\n\nprivate int GetHumidityByAddress(Address address)\n{\n return (address.City == \"Seattle\") ? 60 : 80;\n}\n\nprivate string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n{\n string[] temps = new string[addresses.Length];\n for (int i = 0; i < addresses.Length; i++)\n {\n if (addresses[i].TryGetValue(\"city\", out string city))\n {\n temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n }\n else\n {\n throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n }\n }\n return temps;\n}\n```\nNow we define the function definitions:\n```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\nprivate FunctionToolDefinition geHhumidityByAddressTool = new(\n name: \"GetHumidityByAddress\",\n description: \"Get humidity by street and city\",\n parameters: BinaryData.FromObjectAsJson(\n new\n {\n Type = \"object\",\n Properties = new\n {\n Address = new\n {\n Type = \"object\",\n Properties = new\n {\n Street = new\n {\n Type = \"string\",\n Description = \"Street\"\n },\n City = new\n {\n Type = \"string\",\n Description = \"city\"\n },\n },\n Required = new[] { \"street\", \"city\" }\n }\n },\n Required = new[] { \"address\" }\n },\n new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n\nprivate FunctionToolDefinition getWeatherByAddressesTool = new(\n name: \"GetWeatherByAddresses\",\n description: \"Get weather by street and city\",\n parameters: BinaryData.FromObjectAsJson(\n new\n {\n Type = \"object\",\n Properties = new\n {\n Addresses = new\n {\n Type = \"array\",\n Description = \"A list of addresses\",\n Items = new\n {\n Type = \"object\",\n Properties = new\n {\n Street = new\n {\n Type = \"string\",\n Description = \"Street\"\n },\n City = new\n {\n Type = \"string\",\n description = \"city\"\n },\n },\n Required = new[] { \"street\", \"city\" }\n }\n },\n Unit = new\n {\n Type = \"string\",\n Enum = new[] { \"c\", \"f\" },\n },\n },\n Required = new[] { \"addresses\" }\n },\n new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n```\nUse `EnableAutoFunctionCall` to enable the auto function call:\n```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\nList<ToolOutput> toolOutputs = new();\nDictionary<string, Delegate> delegates = new();\ndelegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\ndelegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\nAIProjectClientOptions options = new();\noptions.EnableAutoFunctionCalls(delegates);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2058815959",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-24T16:18:50+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "what's the error handling model for automatic tool calls? i.e. where do I handle errors that might happen in tool calls?",
"pr_file_module": null
},
{
"comment_id": "2058960638",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-24T17:54:03+00:00",
"comment_author": "howieleung",
"comment_body": "When there is error, the error will be submitted back to the model in Json format liked \"{error: \"Error message\"}\". Then model will raise another function call to retry or figure out by its knowledge instead.",
"pr_file_module": null
},
{
"comment_id": "2059164667",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-24T20:19:15+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "I am not sure this is how we want it to work. It will hide bugs and will be hard to debug. ",
"pr_file_module": null
},
{
"comment_id": "2060477164",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T15:39:49+00:00",
"comment_author": "howieleung",
"comment_body": "A similar solution also used in SK and asure-ai-project sdk. In python, we would do logger.warning function to log the warning. Can C# log warning?",
"pr_file_module": null
},
{
"comment_id": "2060669666",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T18:00:04+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "there is no really a good way to do it. This is why in CM protoype, we had APIs for the caller to issue the tool call directly and handle errors if they want: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cloudmachine/Azure.Projects/samples/HelloRAG/Program.cs#L57\r\n\r\nIn addition, in our automatic runner, there is a virtual method that the user can override to hook up any error handlers they want: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cloudmachine/Azure.Projects.AI/src/Agents/ChatRunner.cs#L146",
"pr_file_module": null
},
{
"comment_id": "2060896976",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T21:22:03+00:00",
"comment_author": "howieleung",
"comment_body": "I just read the code in the link. I think it make sense to add handlers streaming, but I would not want to do it now for just onToolCall. It makes more sense to come up with a list of handler and implement after GA. And this is not going to be a breaking change. Are you OK?",
"pr_file_module": null
},
{
"comment_id": "2061032463",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-25T23:37:09+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "Adding it after GA will be a breaking change, won't it? Non-streaming tool calls will be intercepted and handled automatically. This means custom user code that handles these tool calls won't execute anymore. Unless you add more knobs/options to preserve compat, and then it just adds complexity to the API. ",
"pr_file_module": null
},
{
"comment_id": "2062963457",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 49293,
"pr_file": "sdk/ai/Azure.AI.Projects/README.md",
"discussion_id": "2058815959",
"commented_code": "@@ -647,6 +648,172 @@ do\n while (toolOutputs.Count > 0);\n ```\n \n+#### Function call executed automatically\n+\n+In addition to the manual function calls, SDK supports automatic function calling. Here is the example:\n+\n+Here we use other functions for demonstration:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctions\n+private class Address\n+{\n+ public string Street { get; set; }\n+ public string City { get; set; }\n+}\n+\n+private int GetHumidityByAddress(Address address)\n+{\n+ return (address.City == \"Seattle\") ? 60 : 80;\n+}\n+\n+private string[] GetWeatherByAddresses(Dictionary<string, string>[] addresses, string unit = \"F\")\n+{\n+ string[] temps = new string[addresses.Length];\n+ for (int i = 0; i < addresses.Length; i++)\n+ {\n+ if (addresses[i].TryGetValue(\"city\", out string city))\n+ {\n+ temps[i] = string.Format(\"{0}{1}\", (city == \"Seattle\") ? \"20\" : \"50\", unit);\n+ }\n+ else\n+ {\n+ throw new ArgumentException(\"Each address must contain 'street' and 'city' keys.\");\n+ }\n+ }\n+ return temps;\n+}\n+```\n+Now we define the function definitions:\n+```C# Snippet:StreamingWithAutoFunctionCall_DefineFunctionTools\n+private FunctionToolDefinition geHhumidityByAddressTool = new(\n+ name: \"GetHumidityByAddress\",\n+ description: \"Get humidity by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Address = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ Description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Required = new[] { \"address\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+\n+private FunctionToolDefinition getWeatherByAddressesTool = new(\n+ name: \"GetWeatherByAddresses\",\n+ description: \"Get weather by street and city\",\n+ parameters: BinaryData.FromObjectAsJson(\n+ new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Addresses = new\n+ {\n+ Type = \"array\",\n+ Description = \"A list of addresses\",\n+ Items = new\n+ {\n+ Type = \"object\",\n+ Properties = new\n+ {\n+ Street = new\n+ {\n+ Type = \"string\",\n+ Description = \"Street\"\n+ },\n+ City = new\n+ {\n+ Type = \"string\",\n+ description = \"city\"\n+ },\n+ },\n+ Required = new[] { \"street\", \"city\" }\n+ }\n+ },\n+ Unit = new\n+ {\n+ Type = \"string\",\n+ Enum = new[] { \"c\", \"f\" },\n+ },\n+ },\n+ Required = new[] { \"addresses\" }\n+ },\n+ new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));\n+```\n+Use `EnableAutoFunctionCall` to enable the auto function call:\n+```C# Snippet:StreamingWithAutoFunctionCall_EnableAutoFunctionCalls\n+List<ToolOutput> toolOutputs = new();\n+Dictionary<string, Delegate> delegates = new();\n+delegates.Add(nameof(GetWeatherByAddresses), GetWeatherByAddresses);\n+delegates.Add(nameof(GetHumidityByAddress), GetHumidityByAddress);\n+AIProjectClientOptions options = new();\n+options.EnableAutoFunctionCalls(delegates);",
"comment_created_at": "2025-04-28T06:25:29+00:00",
"comment_author": "howieleung",
"comment_body": "No worry! beside create_run, Python SDK has a function called create_and_process_run. For non-streaming, users either call create_run followed by writing their own while-loop and call function tool manually. Or they call create_and_process_run that call create_run and embed the while-loop with auto function calls. Currently this SDK doesn't have something similar to create_and_process_run. I have discussed with Jarno this release we will do auto function call for streaming only. We might consider to create createAndRunProcess with auto function call for non-streaming.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2164226771",
"pr_number": 50725,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"created_at": "2025-06-24T14:44:09+00:00",
"commented_code": "### Bugs Fixed\n\n- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2164226771",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50725,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"discussion_id": "2164226771",
"commented_code": "@@ -8,6 +8,7 @@\n \n ### Bugs Fixed\n \n+- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).",
"comment_created_at": "2025-06-24T14:44:09+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).\r\n```",
"pr_file_module": null
},
{
"comment_id": "2164232457",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50725,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"discussion_id": "2164226771",
"commented_code": "@@ -8,6 +8,7 @@\n \n ### Bugs Fixed\n \n+- `ManagedIdentityCredential` now retries 410 status responses for at least 70 seconds total duration as required by [Azure IMDS documentation](https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service?tabs=windows#errors-and-debugging). Previously, 410 responses were retried with the same short exponential backoff as other status codes, resulting in insufficient retry duration ([#50724](https://github.com/Azure/azure-sdk-for-net/issues/50724)).",
"comment_created_at": "2025-06-24T14:46:18+00:00",
"comment_author": "Copilot",
"comment_body": "Updated the CHANGELOG URL format as suggested. Commit 6789abc.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124977732",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/CHANGELOG.md",
"created_at": "2025-06-03T21:40:43+00:00",
"commented_code": "### Other Changes\n\n- Updated retry policy behavior when the service is throttling and the `TryTimeout` is shorter than the standard throttling time of 30 seconds. Previously, the operation was immediately canceled with a server busy exception. With these changes, the operation will begin consuming retry attempts while throttling until either the server busy state is cleared or all configured retry attempts are exhausted. ([#50121](https://github.com/Azure/azure-sdk-for-net/issues/50121))",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124977732",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/CHANGELOG.md",
"discussion_id": "2124977732",
"commented_code": "@@ -12,6 +12,7 @@\n \n ### Other Changes\n \n+- Updated retry policy behavior when the service is throttling and the `TryTimeout` is shorter than the standard throttling time of 30 seconds. Previously, the operation was immediately canceled with a server busy exception. With these changes, the operation will begin consuming retry attempts while throttling until either the server busy state is cleared or all configured retry attempts are exhausted. ([#50121](https://github.com/Azure/azure-sdk-for-net/issues/50121))",
"comment_created_at": "2025-06-03T21:40:43+00:00",
"comment_author": "jsquire",
"comment_body": "```suggestion\r\n- Updated retry policy behavior when the service is throttling and the `TryTimeout` is shorter than the standard throttling time of 30 seconds. Previously, the operation was immediately canceled with a server busy exception. With these changes, the operation will begin consuming retry attempts while throttling until either the server busy state is cleared or all configured retry attempts are exhausted. ([#50121](https://github.com/Azure/azure-sdk-for-net/issues/50121))\r\n\r\n```",
"pr_file_module": null
}
]
}
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,70 @@
[
{
"discussion_id": "2162531642",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"created_at": "2025-06-23T21:05:31+00:00",
"commented_code": "run.LastError?.Message);\n```\n`\n\n#### Tracing\n\nYou can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n\nTracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162531642",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162531642",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.",
"comment_created_at": "2025-06-23T21:05:31+00:00",
"comment_author": "lmolkova",
"comment_body": "do we also support `AppContext.SetSwitch(\"Azure.Experimental.EnableActivitySource\", true);` ?\r\n\r\nsee https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Inference/samples/Sample8_ChatCompletionsWithOpenTelemetry.md for an example of it being documented and https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Diagnostics.md to reference for more details",
"pr_file_module": null
},
{
"comment_id": "2162589182",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162531642",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.",
"comment_created_at": "2025-06-23T21:52:19+00:00",
"comment_author": "M-Hietala",
"comment_body": "added",
"pr_file_module": null
}
]
},
{
"discussion_id": "2162538146",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"created_at": "2025-06-23T21:09:54+00:00",
"commented_code": "run.LastError?.Message);\n```\n`\n\n#### Tracing\n\nYou can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n\nTracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.\n\nTo enabled content recording, set the `AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED` environment variable to `true`. Content in this context refers to chat message content, function call tool related function names, function parameter names and values.\n\n##### Tracing to Azure Montior\n\nFor tracing to Azure Monitor from your application, install the Azure.Monitor.OpenTelemetry.Exporter with [NuGet](https://www.nuget.org/ ):\n\n```dotnetcli\ndotnet add package Azure.Monitor.OpenTelemetry.Exporter",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162538146",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162538146",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.\n+\n+To enabled content recording, set the `AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED` environment variable to `true`. Content in this context refers to chat message content, function call tool related function names, function parameter names and values.\n+\n+##### Tracing to Azure Montior\n+\n+For tracing to Azure Monitor from your application, install the Azure.Monitor.OpenTelemetry.Exporter with [NuGet](https://www.nuget.org/ ):\n+\n+```dotnetcli\n+dotnet add package Azure.Monitor.OpenTelemetry.Exporter",
"comment_created_at": "2025-06-23T21:09:54+00:00",
"comment_author": "lmolkova",
"comment_body": "it's actually way easier and better to use az.mon.ote.aspnetcore package - https://github.com/Azure/azure-sdk-for-net/blob/0c2a40252d1c7ed86188153bd4aa8ddeed868371/sdk/monitor/Azure.Monitor.OpenTelemetry.AspNetCore/README.md, consider switching to it or at least note that it should be the first choice.\r\n\r\nDirect usage of the exporter is very low-level and not expected in AzMon use cases\r\n\r\n",
"pr_file_module": null
},
{
"comment_id": "2162589001",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162538146",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.\n+\n+To enabled content recording, set the `AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED` environment variable to `true`. Content in this context refers to chat message content, function call tool related function names, function parameter names and values.\n+\n+##### Tracing to Azure Montior\n+\n+For tracing to Azure Monitor from your application, install the Azure.Monitor.OpenTelemetry.Exporter with [NuGet](https://www.nuget.org/ ):\n+\n+```dotnetcli\n+dotnet add package Azure.Monitor.OpenTelemetry.Exporter",
"comment_created_at": "2025-06-23T21:52:09+00:00",
"comment_author": "M-Hietala",
"comment_body": "added",
"pr_file_module": null
}
]
}
]

View File

@@ -29,75 +29,3 @@ AppContext.SetSwitch("Azure.Experimental.EnableActivitySource", true);
```
Using higher-level telemetry packages simplifies implementation, provides better defaults, and follows Azure SDK best practices for observability. This approach reduces the complexity of monitoring code and improves maintainability while ensuring comprehensive system visibility.
[
{
"discussion_id": "2162531642",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"created_at": "2025-06-23T21:05:31+00:00",
"commented_code": "run.LastError?.Message);\n```\n`\n\n#### Tracing\n\nYou can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n\nTracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162531642",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162531642",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.",
"comment_created_at": "2025-06-23T21:05:31+00:00",
"comment_author": "lmolkova",
"comment_body": "do we also support `AppContext.SetSwitch(\"Azure.Experimental.EnableActivitySource\", true);` ?\r\n\r\nsee https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/ai/Azure.AI.Inference/samples/Sample8_ChatCompletionsWithOpenTelemetry.md for an example of it being documented and https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Diagnostics.md to reference for more details",
"pr_file_module": null
},
{
"comment_id": "2162589182",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162531642",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.",
"comment_created_at": "2025-06-23T21:52:19+00:00",
"comment_author": "M-Hietala",
"comment_body": "added",
"pr_file_module": null
}
]
},
{
"discussion_id": "2162538146",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"created_at": "2025-06-23T21:09:54+00:00",
"commented_code": "run.LastError?.Message);\n```\n`\n\n#### Tracing\n\nYou can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n\nTracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.\n\nTo enabled content recording, set the `AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED` environment variable to `true`. Content in this context refers to chat message content, function call tool related function names, function parameter names and values.\n\n##### Tracing to Azure Montior\n\nFor tracing to Azure Monitor from your application, install the Azure.Monitor.OpenTelemetry.Exporter with [NuGet](https://www.nuget.org/ ):\n\n```dotnetcli\ndotnet add package Azure.Monitor.OpenTelemetry.Exporter",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2162538146",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162538146",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.\n+\n+To enabled content recording, set the `AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED` environment variable to `true`. Content in this context refers to chat message content, function call tool related function names, function parameter names and values.\n+\n+##### Tracing to Azure Montior\n+\n+For tracing to Azure Monitor from your application, install the Azure.Monitor.OpenTelemetry.Exporter with [NuGet](https://www.nuget.org/ ):\n+\n+```dotnetcli\n+dotnet add package Azure.Monitor.OpenTelemetry.Exporter",
"comment_created_at": "2025-06-23T21:09:54+00:00",
"comment_author": "lmolkova",
"comment_body": "it's actually way easier and better to use az.mon.ote.aspnetcore package - https://github.com/Azure/azure-sdk-for-net/blob/0c2a40252d1c7ed86188153bd4aa8ddeed868371/sdk/monitor/Azure.Monitor.OpenTelemetry.AspNetCore/README.md, consider switching to it or at least note that it should be the first choice.\r\n\r\nDirect usage of the exporter is very low-level and not expected in AzMon use cases\r\n\r\n",
"pr_file_module": null
},
{
"comment_id": "2162589001",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50119,
"pr_file": "sdk/ai/Azure.AI.Agents.Persistent/README.md",
"discussion_id": "2162538146",
"commented_code": "@@ -866,6 +869,51 @@ Assert.AreEqual(\n run.LastError?.Message);\n ```\n `\n+\n+#### Tracing\n+\n+You can add an Application Insights Azure resource to your Azure AI Foundry project. See the Tracing tab in your AI Foundry project. If one was enabled, you use the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.\n+\n+Tracing also requires enabling OpenTelemetry support. One way to do this is to set the `AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE` environment variable value to `true`.\n+\n+To enabled content recording, set the `AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED` environment variable to `true`. Content in this context refers to chat message content, function call tool related function names, function parameter names and values.\n+\n+##### Tracing to Azure Montior\n+\n+For tracing to Azure Monitor from your application, install the Azure.Monitor.OpenTelemetry.Exporter with [NuGet](https://www.nuget.org/ ):\n+\n+```dotnetcli\n+dotnet add package Azure.Monitor.OpenTelemetry.Exporter",
"comment_created_at": "2025-06-23T21:52:09+00:00",
"comment_author": "M-Hietala",
"comment_body": "added",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,150 @@
[
{
"discussion_id": "2159569308",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"created_at": "2025-06-20T19:38:26+00:00",
"commented_code": "## 1.15.0-beta.1 (Unreleased)\n\n### Features Added\n- Added support for the `AZURE_TOKEN_CREDENTIALS` environment variable to `DefaultAzureCredential`, which allows for additional credential selection options. The valid values now include any of the credential names available in the default chain (VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential.) **Note:** BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2159569308",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"discussion_id": "2159569308",
"commented_code": "@@ -3,6 +3,7 @@\n ## 1.15.0-beta.1 (Unreleased)\n \n ### Features Added\n+- Added support for the `AZURE_TOKEN_CREDENTIALS` environment variable to `DefaultAzureCredential`, which allows for additional credential selection options. The valid values now include any of the credential names available in the default chain (VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential.) **Note:** BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker.",
"comment_created_at": "2025-06-20T19:38:26+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- Expanded the set of acceptable values for environment variable `AZURE_TOKEN_CREDENTIALS` to allow for selection of a specific credential in the `DefaultAzureCredential` chain. The valid values now include any of the credential names available in the default chain (`VisualStudioCredential`, `VisualStudioCodeCredential`, `AzureCliCredential`, `AzurePowerShellCredential`, `AzureDeveloperCliCredential`, `EnvironmentCredential`, `WorkloadIdentityCredential`, `ManagedIdentityCredential`, `InteractiveBrowserCredential`, or `BrokerAuthenticationCredential`.) **Note:** BrokerAuthenticationCredential requires that the project include a reference to package Azure.Identity.Broker.\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2159570531",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/TROUBLESHOOTING.md",
"created_at": "2025-06-20T19:39:46+00:00",
"commented_code": "|---|---|---|\n|`CredentialUnavailableException` raised with message. \"DefaultAzureCredential failed to retrieve a token from the included credentials.\"|All credentials in the `DefaultAzureCredential` chain failed to retrieve a token, each throwing a `CredentialUnavailableException`.|<ul><li>[Enable logging](#enable-and-configure-logging) to verify the credentials being tried, and get further diagnostic information.</li><li>Consult the troubleshooting guide for underlying credential types for more information.</li><ul><li>[EnvironmentCredential](#troubleshoot-environmentcredential-authentication-issues)</li><li>[WorkloadIdentityCredential](#troubleshoot-workloadidentitycredential-authentication-issues)</li><li>[ManagedIdentityCredential](#troubleshoot-managedidentitycredential-authentication-issues)</li><li>[VisualStudioCredential](#troubleshoot-visualstudiocredential-authentication-issues)</li><li>[AzureCliCredential](#troubleshoot-azureclicredential-authentication-issues)</li><li>[AzurePowerShellCredential](#troubleshoot-azurepowershellcredential-authentication-issues)</li></ul>|\n|`RequestFailedException` raised from the client with a status code of 401 or 403|Authentication succeeded but the authorizing Azure service responded with a 401 (Authenticate) or 403 (Forbidden) status code. This error can often be caused by the `DefaultAzureCredential` authenticating an account other than the intended or that the intended account doesn't have the correct permissions or roles assigned.|<ul><li>[Enable logging](#enable-and-configure-logging) to determine which credential in the chain returned the authenticating token.</li><li>In the case a credential other than the expected is returning a token, bypass this by either signing out of the corresponding development tool, or excluding the credential with the ExcludeXXXCredential property in the `DefaultAzureCredentialOptions`</li><li>Ensure that the correct role is assigned to the account being used. For example, a service specific role rather than the subscription Owner role.</li></ul>|\n|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential. **Note:** VisualStudioCodeCredential and BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker. |",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2159570531",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/TROUBLESHOOTING.md",
"discussion_id": "2159570531",
"commented_code": "@@ -125,6 +125,7 @@ DefaultAzureCredentialOptions options = new\n |---|---|---|\n |`CredentialUnavailableException` raised with message. \"DefaultAzureCredential failed to retrieve a token from the included credentials.\"|All credentials in the `DefaultAzureCredential` chain failed to retrieve a token, each throwing a `CredentialUnavailableException`.|<ul><li>[Enable logging](#enable-and-configure-logging) to verify the credentials being tried, and get further diagnostic information.</li><li>Consult the troubleshooting guide for underlying credential types for more information.</li><ul><li>[EnvironmentCredential](#troubleshoot-environmentcredential-authentication-issues)</li><li>[WorkloadIdentityCredential](#troubleshoot-workloadidentitycredential-authentication-issues)</li><li>[ManagedIdentityCredential](#troubleshoot-managedidentitycredential-authentication-issues)</li><li>[VisualStudioCredential](#troubleshoot-visualstudiocredential-authentication-issues)</li><li>[AzureCliCredential](#troubleshoot-azureclicredential-authentication-issues)</li><li>[AzurePowerShellCredential](#troubleshoot-azurepowershellcredential-authentication-issues)</li></ul>|\n |`RequestFailedException` raised from the client with a status code of 401 or 403|Authentication succeeded but the authorizing Azure service responded with a 401 (Authenticate) or 403 (Forbidden) status code. This error can often be caused by the `DefaultAzureCredential` authenticating an account other than the intended or that the intended account doesn't have the correct permissions or roles assigned.|<ul><li>[Enable logging](#enable-and-configure-logging) to determine which credential in the chain returned the authenticating token.</li><li>In the case a credential other than the expected is returning a token, bypass this by either signing out of the corresponding development tool, or excluding the credential with the ExcludeXXXCredential property in the `DefaultAzureCredentialOptions`</li><li>Ensure that the correct role is assigned to the account being used. For example, a service specific role rather than the subscription Owner role.</li></ul>|\n+|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential. **Note:** VisualStudioCodeCredential and BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker. |",
"comment_created_at": "2025-06-20T19:39:46+00:00",
"comment_author": "scottaddie",
"comment_body": "I noticed the CHANGELOG entry doesn't mention anything about VS Code credential in the last sentence. Should it? Note that credential is mentioned in the last sentence here.\r\n```suggestion\r\n|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, `VisualStudioCredential`, `VisualStudioCodeCredential`, `AzureCliCredential`, `AzurePowerShellCredential`, `AzureDeveloperCliCredential`, `EnvironmentCredential`, `WorkloadIdentityCredential`, `ManagedIdentityCredential`, `InteractiveBrowserCredential`, or `BrokerAuthenticationCredential`. **Note:** `VisualStudioCodeCredential` and `BrokerAuthenticationCredential` require that the project include a reference to package Azure.Identity.Broker. |\r\n```",
"pr_file_module": null
},
{
"comment_id": "2160166052",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/TROUBLESHOOTING.md",
"discussion_id": "2159570531",
"commented_code": "@@ -125,6 +125,7 @@ DefaultAzureCredentialOptions options = new\n |---|---|---|\n |`CredentialUnavailableException` raised with message. \"DefaultAzureCredential failed to retrieve a token from the included credentials.\"|All credentials in the `DefaultAzureCredential` chain failed to retrieve a token, each throwing a `CredentialUnavailableException`.|<ul><li>[Enable logging](#enable-and-configure-logging) to verify the credentials being tried, and get further diagnostic information.</li><li>Consult the troubleshooting guide for underlying credential types for more information.</li><ul><li>[EnvironmentCredential](#troubleshoot-environmentcredential-authentication-issues)</li><li>[WorkloadIdentityCredential](#troubleshoot-workloadidentitycredential-authentication-issues)</li><li>[ManagedIdentityCredential](#troubleshoot-managedidentitycredential-authentication-issues)</li><li>[VisualStudioCredential](#troubleshoot-visualstudiocredential-authentication-issues)</li><li>[AzureCliCredential](#troubleshoot-azureclicredential-authentication-issues)</li><li>[AzurePowerShellCredential](#troubleshoot-azurepowershellcredential-authentication-issues)</li></ul>|\n |`RequestFailedException` raised from the client with a status code of 401 or 403|Authentication succeeded but the authorizing Azure service responded with a 401 (Authenticate) or 403 (Forbidden) status code. This error can often be caused by the `DefaultAzureCredential` authenticating an account other than the intended or that the intended account doesn't have the correct permissions or roles assigned.|<ul><li>[Enable logging](#enable-and-configure-logging) to determine which credential in the chain returned the authenticating token.</li><li>In the case a credential other than the expected is returning a token, bypass this by either signing out of the corresponding development tool, or excluding the credential with the ExcludeXXXCredential property in the `DefaultAzureCredentialOptions`</li><li>Ensure that the correct role is assigned to the account being used. For example, a service specific role rather than the subscription Owner role.</li></ul>|\n+|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential. **Note:** VisualStudioCodeCredential and BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker. |",
"comment_created_at": "2025-06-21T21:58:31+00:00",
"comment_author": "christothes",
"comment_body": "I meant to exclude it until the next PR.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2138876254",
"pr_number": 50531,
"pr_file": "README.md",
"created_at": "2025-06-10T22:40:31+00:00",
"commented_code": "Telemetry collection is on by default.\n\nTo opt out, you can disable telemetry at client construction. Creating a [synchronous policy](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Pipeline.md#implementing-a-synchronous-policy) in your application gives you access to the headers sent with each request, allowing you to remove the `User-Agent` header containing client telemetry. To use the policy, you will add it to the options for your specific client as part of client creation. This will disable Azure SDK telemetry for all methods in the client. You will need to register the policy with each client created.\nTo opt out, you can disable telemetry by setting the `IsTelemetryEnabled` property to `false` in the client's `DiagnosticsOptions`. This will prevent the Azure SDK from adding telemetry information to the `User-Agent` header for all requests made by the client.\n\n> NOTE: `HttpClient` may set default user agent headers as part of the .NET platform behavior. This value does not contain any Azure SDK telemetry information.\n\nAn example policy implementation looks like:\n```C# Snippet:RemoveUserAgentPolicy\npublic class RemoveUserAgentPolicy : HttpPipelineSynchronousPolicy\n{\n public override void OnSendingRequest(HttpMessage message)\n {\n message.Request.Headers.Remove(HttpHeader.Names.UserAgent);\n }\n}\n```\n\nTo use it with a client, you would register it to run for every retry attempt as part of your client options:\n```C# Snippet:RemoveUserAgentPolicyUse\nYou can disable telemetry when creating a client by setting the `IsTelemetryEnabled` property in the diagnostics options:\n```C#\nUri serviceEndpoint = new Uri(\"https://example.contoso.com\");\nTokenCredential credential = new DefaultAzureCredential();\n \nSampleClientOptions clientOptions = new SampleClientOptions();\nclientOptions.AddPolicy(new RemoveUserAgentPolicy(), HttpPipelinePosition.PerRetry);\n\nSampleClientOptions clientOptions = new SampleClientOptions()\n{\n Diagnostics = { IsTelemetryEnabled = false }\n};\n\nSampleClient client = new SampleClient(serviceEndpoint, credential, clientOptions);\n```\n\nAlternatively, you can disable telemetry globally by setting the `AZURE_TELEMETRY_DISABLED` environment variable to `true` before creating any clients.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2138876254",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50531,
"pr_file": "README.md",
"discussion_id": "2138876254",
"commented_code": "@@ -60,32 +60,25 @@ The software may collect information about you and your use of the software and\n \n Telemetry collection is on by default.\n \n-To opt out, you can disable telemetry at client construction. Creating a [synchronous policy](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Pipeline.md#implementing-a-synchronous-policy) in your application gives you access to the headers sent with each request, allowing you to remove the `User-Agent` header containing client telemetry. To use the policy, you will add it to the options for your specific client as part of client creation. This will disable Azure SDK telemetry for all methods in the client. You will need to register the policy with each client created.\n+To opt out, you can disable telemetry by setting the `IsTelemetryEnabled` property to `false` in the client's `DiagnosticsOptions`. This will prevent the Azure SDK from adding telemetry information to the `User-Agent` header for all requests made by the client.\n \n > NOTE: `HttpClient` may set default user agent headers as part of the .NET platform behavior. This value does not contain any Azure SDK telemetry information.\n \n-An example policy implementation looks like:\n-```C# Snippet:RemoveUserAgentPolicy\n-public class RemoveUserAgentPolicy : HttpPipelineSynchronousPolicy\n-{\n- public override void OnSendingRequest(HttpMessage message)\n- {\n- message.Request.Headers.Remove(HttpHeader.Names.UserAgent);\n- }\n-}\n-```\n-\n-To use it with a client, you would register it to run for every retry attempt as part of your client options:\n-```C# Snippet:RemoveUserAgentPolicyUse\n+You can disable telemetry when creating a client by setting the `IsTelemetryEnabled` property in the diagnostics options:\n+```C#\n Uri serviceEndpoint = new Uri(\"https://example.contoso.com\");\n TokenCredential credential = new DefaultAzureCredential();\n- \n-SampleClientOptions clientOptions = new SampleClientOptions();\n-clientOptions.AddPolicy(new RemoveUserAgentPolicy(), HttpPipelinePosition.PerRetry);\n+\n+SampleClientOptions clientOptions = new SampleClientOptions()\n+{\n+ Diagnostics = { IsTelemetryEnabled = false }\n+};\n \n SampleClient client = new SampleClient(serviceEndpoint, credential, clientOptions);\n ```\n \n+Alternatively, you can disable telemetry globally by setting the `AZURE_TELEMETRY_DISABLED` environment variable to `true` before creating any clients.",
"comment_created_at": "2025-06-10T22:40:31+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "is that really true? @copilot, where did you find out about this?",
"pr_file_module": null
},
{
"comment_id": "2138883720",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50531,
"pr_file": "README.md",
"discussion_id": "2138876254",
"commented_code": "@@ -60,32 +60,25 @@ The software may collect information about you and your use of the software and\n \n Telemetry collection is on by default.\n \n-To opt out, you can disable telemetry at client construction. Creating a [synchronous policy](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Pipeline.md#implementing-a-synchronous-policy) in your application gives you access to the headers sent with each request, allowing you to remove the `User-Agent` header containing client telemetry. To use the policy, you will add it to the options for your specific client as part of client creation. This will disable Azure SDK telemetry for all methods in the client. You will need to register the policy with each client created.\n+To opt out, you can disable telemetry by setting the `IsTelemetryEnabled` property to `false` in the client's `DiagnosticsOptions`. This will prevent the Azure SDK from adding telemetry information to the `User-Agent` header for all requests made by the client.\n \n > NOTE: `HttpClient` may set default user agent headers as part of the .NET platform behavior. This value does not contain any Azure SDK telemetry information.\n \n-An example policy implementation looks like:\n-```C# Snippet:RemoveUserAgentPolicy\n-public class RemoveUserAgentPolicy : HttpPipelineSynchronousPolicy\n-{\n- public override void OnSendingRequest(HttpMessage message)\n- {\n- message.Request.Headers.Remove(HttpHeader.Names.UserAgent);\n- }\n-}\n-```\n-\n-To use it with a client, you would register it to run for every retry attempt as part of your client options:\n-```C# Snippet:RemoveUserAgentPolicyUse\n+You can disable telemetry when creating a client by setting the `IsTelemetryEnabled` property in the diagnostics options:\n+```C#\n Uri serviceEndpoint = new Uri(\"https://example.contoso.com\");\n TokenCredential credential = new DefaultAzureCredential();\n- \n-SampleClientOptions clientOptions = new SampleClientOptions();\n-clientOptions.AddPolicy(new RemoveUserAgentPolicy(), HttpPipelinePosition.PerRetry);\n+\n+SampleClientOptions clientOptions = new SampleClientOptions()\n+{\n+ Diagnostics = { IsTelemetryEnabled = false }\n+};\n \n SampleClient client = new SampleClient(serviceEndpoint, credential, clientOptions);\n ```\n \n+Alternatively, you can disable telemetry globally by setting the `AZURE_TELEMETRY_DISABLED` environment variable to `true` before creating any clients.",
"comment_created_at": "2025-06-10T22:45:13+00:00",
"comment_author": "Copilot",
"comment_body": "Yes, this is accurate. The `AZURE_TELEMETRY_DISABLED` environment variable support is implemented in the Azure.Core source code:\n\n1. **DiagnosticsOptions.cs:75** - `IsTelemetryEnabled = !EnvironmentVariableToBool(Environment.GetEnvironmentVariable(\"AZURE_TELEMETRY_DISABLED\")) ?? true;`\n2. **DiagnosticsOptions.cs:93-95** - XML documentation explicitly states: \"The default value can be controlled process wide by setting `AZURE_TELEMETRY_DISABLED` to `true`, `false`, `1` or `0`.\"\n3. **ClientOptionsTests.cs** - Contains unit test `CanDisableTelemetryWithEnvironmentVariable` that verifies this functionality\n\nThe environment variable is read during DiagnosticsOptions initialization and affects the default value of `IsTelemetryEnabled` for all clients.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2127558101",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"created_at": "2025-06-04T22:46:49+00:00",
"commented_code": "### Features Added\n\n- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n\n - `tenantId` : The tenant where the target resource was created\n - `clientId` : The client identifier for the application, which must be granted access on the target resource\n - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2127558101",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-04T22:46:49+00:00",
"comment_author": "christothes",
"comment_body": "Should we consider making this value something like `AudienceCloud` and accept values like `global` (default), `usgov`, `china` so that users don't need to remember the magic audience strings?",
"pr_file_module": null
},
{
"comment_id": "2127560619",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-04T22:49:01+00:00",
"comment_author": "jsquire",
"comment_body": "I'm good with that, but its @scottaddie's call. I followed the pattern that he asked for and I'm not sure if there are any wider considerations.",
"pr_file_module": null
},
{
"comment_id": "2130246074",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-05T20:30:31+00:00",
"comment_author": "scottaddie",
"comment_body": "+1 to Chris' suggestion. Most customers won't know this magic audience string.",
"pr_file_module": null
},
{
"comment_id": "2132973364",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-06T22:27:09+00:00",
"comment_author": "jsquire",
"comment_body": "Changed to `azureCloud`",
"pr_file_module": null
}
]
}
]

View File

@@ -36,155 +36,3 @@ clientOptions.Diagnostics = { IsTelemetryEnabled = false };
```
For environment variables, document all accepted values and include examples showing both the variable name and possible values.
[
{
"discussion_id": "2159569308",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"created_at": "2025-06-20T19:38:26+00:00",
"commented_code": "## 1.15.0-beta.1 (Unreleased)\n\n### Features Added\n- Added support for the `AZURE_TOKEN_CREDENTIALS` environment variable to `DefaultAzureCredential`, which allows for additional credential selection options. The valid values now include any of the credential names available in the default chain (VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential.) **Note:** BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2159569308",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/CHANGELOG.md",
"discussion_id": "2159569308",
"commented_code": "@@ -3,6 +3,7 @@\n ## 1.15.0-beta.1 (Unreleased)\n \n ### Features Added\n+- Added support for the `AZURE_TOKEN_CREDENTIALS` environment variable to `DefaultAzureCredential`, which allows for additional credential selection options. The valid values now include any of the credential names available in the default chain (VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential.) **Note:** BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker.",
"comment_created_at": "2025-06-20T19:38:26+00:00",
"comment_author": "scottaddie",
"comment_body": "```suggestion\r\n- Expanded the set of acceptable values for environment variable `AZURE_TOKEN_CREDENTIALS` to allow for selection of a specific credential in the `DefaultAzureCredential` chain. The valid values now include any of the credential names available in the default chain (`VisualStudioCredential`, `VisualStudioCodeCredential`, `AzureCliCredential`, `AzurePowerShellCredential`, `AzureDeveloperCliCredential`, `EnvironmentCredential`, `WorkloadIdentityCredential`, `ManagedIdentityCredential`, `InteractiveBrowserCredential`, or `BrokerAuthenticationCredential`.) **Note:** BrokerAuthenticationCredential requires that the project include a reference to package Azure.Identity.Broker.\r\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2159570531",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/TROUBLESHOOTING.md",
"created_at": "2025-06-20T19:39:46+00:00",
"commented_code": "|---|---|---|\n|`CredentialUnavailableException` raised with message. \"DefaultAzureCredential failed to retrieve a token from the included credentials.\"|All credentials in the `DefaultAzureCredential` chain failed to retrieve a token, each throwing a `CredentialUnavailableException`.|<ul><li>[Enable logging](#enable-and-configure-logging) to verify the credentials being tried, and get further diagnostic information.</li><li>Consult the troubleshooting guide for underlying credential types for more information.</li><ul><li>[EnvironmentCredential](#troubleshoot-environmentcredential-authentication-issues)</li><li>[WorkloadIdentityCredential](#troubleshoot-workloadidentitycredential-authentication-issues)</li><li>[ManagedIdentityCredential](#troubleshoot-managedidentitycredential-authentication-issues)</li><li>[VisualStudioCredential](#troubleshoot-visualstudiocredential-authentication-issues)</li><li>[AzureCliCredential](#troubleshoot-azureclicredential-authentication-issues)</li><li>[AzurePowerShellCredential](#troubleshoot-azurepowershellcredential-authentication-issues)</li></ul>|\n|`RequestFailedException` raised from the client with a status code of 401 or 403|Authentication succeeded but the authorizing Azure service responded with a 401 (Authenticate) or 403 (Forbidden) status code. This error can often be caused by the `DefaultAzureCredential` authenticating an account other than the intended or that the intended account doesn't have the correct permissions or roles assigned.|<ul><li>[Enable logging](#enable-and-configure-logging) to determine which credential in the chain returned the authenticating token.</li><li>In the case a credential other than the expected is returning a token, bypass this by either signing out of the corresponding development tool, or excluding the credential with the ExcludeXXXCredential property in the `DefaultAzureCredentialOptions`</li><li>Ensure that the correct role is assigned to the account being used. For example, a service specific role rather than the subscription Owner role.</li></ul>|\n|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential. **Note:** VisualStudioCodeCredential and BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker. |",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2159570531",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/TROUBLESHOOTING.md",
"discussion_id": "2159570531",
"commented_code": "@@ -125,6 +125,7 @@ DefaultAzureCredentialOptions options = new\n |---|---|---|\n |`CredentialUnavailableException` raised with message. \"DefaultAzureCredential failed to retrieve a token from the included credentials.\"|All credentials in the `DefaultAzureCredential` chain failed to retrieve a token, each throwing a `CredentialUnavailableException`.|<ul><li>[Enable logging](#enable-and-configure-logging) to verify the credentials being tried, and get further diagnostic information.</li><li>Consult the troubleshooting guide for underlying credential types for more information.</li><ul><li>[EnvironmentCredential](#troubleshoot-environmentcredential-authentication-issues)</li><li>[WorkloadIdentityCredential](#troubleshoot-workloadidentitycredential-authentication-issues)</li><li>[ManagedIdentityCredential](#troubleshoot-managedidentitycredential-authentication-issues)</li><li>[VisualStudioCredential](#troubleshoot-visualstudiocredential-authentication-issues)</li><li>[AzureCliCredential](#troubleshoot-azureclicredential-authentication-issues)</li><li>[AzurePowerShellCredential](#troubleshoot-azurepowershellcredential-authentication-issues)</li></ul>|\n |`RequestFailedException` raised from the client with a status code of 401 or 403|Authentication succeeded but the authorizing Azure service responded with a 401 (Authenticate) or 403 (Forbidden) status code. This error can often be caused by the `DefaultAzureCredential` authenticating an account other than the intended or that the intended account doesn't have the correct permissions or roles assigned.|<ul><li>[Enable logging](#enable-and-configure-logging) to determine which credential in the chain returned the authenticating token.</li><li>In the case a credential other than the expected is returning a token, bypass this by either signing out of the corresponding development tool, or excluding the credential with the ExcludeXXXCredential property in the `DefaultAzureCredentialOptions`</li><li>Ensure that the correct role is assigned to the account being used. For example, a service specific role rather than the subscription Owner role.</li></ul>|\n+|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential. **Note:** VisualStudioCodeCredential and BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker. |",
"comment_created_at": "2025-06-20T19:39:46+00:00",
"comment_author": "scottaddie",
"comment_body": "I noticed the CHANGELOG entry doesn't mention anything about VS Code credential in the last sentence. Should it? Note that credential is mentioned in the last sentence here.\r\n```suggestion\r\n|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, `VisualStudioCredential`, `VisualStudioCodeCredential`, `AzureCliCredential`, `AzurePowerShellCredential`, `AzureDeveloperCliCredential`, `EnvironmentCredential`, `WorkloadIdentityCredential`, `ManagedIdentityCredential`, `InteractiveBrowserCredential`, or `BrokerAuthenticationCredential`. **Note:** `VisualStudioCodeCredential` and `BrokerAuthenticationCredential` require that the project include a reference to package Azure.Identity.Broker. |\r\n```",
"pr_file_module": null
},
{
"comment_id": "2160166052",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50749,
"pr_file": "sdk/identity/Azure.Identity/TROUBLESHOOTING.md",
"discussion_id": "2159570531",
"commented_code": "@@ -125,6 +125,7 @@ DefaultAzureCredentialOptions options = new\n |---|---|---|\n |`CredentialUnavailableException` raised with message. \"DefaultAzureCredential failed to retrieve a token from the included credentials.\"|All credentials in the `DefaultAzureCredential` chain failed to retrieve a token, each throwing a `CredentialUnavailableException`.|<ul><li>[Enable logging](#enable-and-configure-logging) to verify the credentials being tried, and get further diagnostic information.</li><li>Consult the troubleshooting guide for underlying credential types for more information.</li><ul><li>[EnvironmentCredential](#troubleshoot-environmentcredential-authentication-issues)</li><li>[WorkloadIdentityCredential](#troubleshoot-workloadidentitycredential-authentication-issues)</li><li>[ManagedIdentityCredential](#troubleshoot-managedidentitycredential-authentication-issues)</li><li>[VisualStudioCredential](#troubleshoot-visualstudiocredential-authentication-issues)</li><li>[AzureCliCredential](#troubleshoot-azureclicredential-authentication-issues)</li><li>[AzurePowerShellCredential](#troubleshoot-azurepowershellcredential-authentication-issues)</li></ul>|\n |`RequestFailedException` raised from the client with a status code of 401 or 403|Authentication succeeded but the authorizing Azure service responded with a 401 (Authenticate) or 403 (Forbidden) status code. This error can often be caused by the `DefaultAzureCredential` authenticating an account other than the intended or that the intended account doesn't have the correct permissions or roles assigned.|<ul><li>[Enable logging](#enable-and-configure-logging) to determine which credential in the chain returned the authenticating token.</li><li>In the case a credential other than the expected is returning a token, bypass this by either signing out of the corresponding development tool, or excluding the credential with the ExcludeXXXCredential property in the `DefaultAzureCredentialOptions`</li><li>Ensure that the correct role is assigned to the account being used. For example, a service specific role rather than the subscription Owner role.</li></ul>|\n+|`InvalidOperationException` raised with message: \"Invalid value for environment variable AZURE_TOKEN_CREDENTIALS ...\" | An invalid value was set for the AZURE_TOKEN_CREDENTIALS environment variable | Set the environment variable to one of the following values: dev, prod, VisualStudioCredential, VisualStudioCodeCredential, AzureCliCredential, AzurePowerShellCredential, AzureDeveloperCliCredential, EnvironmentCredential, WorkloadIdentityCredential, ManagedIdentityCredential, InteractiveBrowserCredential, or BrokerAuthenticationCredential. **Note:** VisualStudioCodeCredential and BrokerAuthenticationCredential require that the project include a reference to Azure.Identity.Broker. |",
"comment_created_at": "2025-06-21T21:58:31+00:00",
"comment_author": "christothes",
"comment_body": "I meant to exclude it until the next PR.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2138876254",
"pr_number": 50531,
"pr_file": "README.md",
"created_at": "2025-06-10T22:40:31+00:00",
"commented_code": "Telemetry collection is on by default.\n\nTo opt out, you can disable telemetry at client construction. Creating a [synchronous policy](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Pipeline.md#implementing-a-synchronous-policy) in your application gives you access to the headers sent with each request, allowing you to remove the `User-Agent` header containing client telemetry. To use the policy, you will add it to the options for your specific client as part of client creation. This will disable Azure SDK telemetry for all methods in the client. You will need to register the policy with each client created.\nTo opt out, you can disable telemetry by setting the `IsTelemetryEnabled` property to `false` in the client's `DiagnosticsOptions`. This will prevent the Azure SDK from adding telemetry information to the `User-Agent` header for all requests made by the client.\n\n> NOTE: `HttpClient` may set default user agent headers as part of the .NET platform behavior. This value does not contain any Azure SDK telemetry information.\n\nAn example policy implementation looks like:\n```C# Snippet:RemoveUserAgentPolicy\npublic class RemoveUserAgentPolicy : HttpPipelineSynchronousPolicy\n{\n public override void OnSendingRequest(HttpMessage message)\n {\n message.Request.Headers.Remove(HttpHeader.Names.UserAgent);\n }\n}\n```\n\nTo use it with a client, you would register it to run for every retry attempt as part of your client options:\n```C# Snippet:RemoveUserAgentPolicyUse\nYou can disable telemetry when creating a client by setting the `IsTelemetryEnabled` property in the diagnostics options:\n```C#\nUri serviceEndpoint = new Uri(\"https://example.contoso.com\");\nTokenCredential credential = new DefaultAzureCredential();\n \nSampleClientOptions clientOptions = new SampleClientOptions();\nclientOptions.AddPolicy(new RemoveUserAgentPolicy(), HttpPipelinePosition.PerRetry);\n\nSampleClientOptions clientOptions = new SampleClientOptions()\n{\n Diagnostics = { IsTelemetryEnabled = false }\n};\n\nSampleClient client = new SampleClient(serviceEndpoint, credential, clientOptions);\n```\n\nAlternatively, you can disable telemetry globally by setting the `AZURE_TELEMETRY_DISABLED` environment variable to `true` before creating any clients.",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2138876254",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50531,
"pr_file": "README.md",
"discussion_id": "2138876254",
"commented_code": "@@ -60,32 +60,25 @@ The software may collect information about you and your use of the software and\n \n Telemetry collection is on by default.\n \n-To opt out, you can disable telemetry at client construction. Creating a [synchronous policy](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Pipeline.md#implementing-a-synchronous-policy) in your application gives you access to the headers sent with each request, allowing you to remove the `User-Agent` header containing client telemetry. To use the policy, you will add it to the options for your specific client as part of client creation. This will disable Azure SDK telemetry for all methods in the client. You will need to register the policy with each client created.\n+To opt out, you can disable telemetry by setting the `IsTelemetryEnabled` property to `false` in the client's `DiagnosticsOptions`. This will prevent the Azure SDK from adding telemetry information to the `User-Agent` header for all requests made by the client.\n \n > NOTE: `HttpClient` may set default user agent headers as part of the .NET platform behavior. This value does not contain any Azure SDK telemetry information.\n \n-An example policy implementation looks like:\n-```C# Snippet:RemoveUserAgentPolicy\n-public class RemoveUserAgentPolicy : HttpPipelineSynchronousPolicy\n-{\n- public override void OnSendingRequest(HttpMessage message)\n- {\n- message.Request.Headers.Remove(HttpHeader.Names.UserAgent);\n- }\n-}\n-```\n-\n-To use it with a client, you would register it to run for every retry attempt as part of your client options:\n-```C# Snippet:RemoveUserAgentPolicyUse\n+You can disable telemetry when creating a client by setting the `IsTelemetryEnabled` property in the diagnostics options:\n+```C#\n Uri serviceEndpoint = new Uri(\"https://example.contoso.com\");\n TokenCredential credential = new DefaultAzureCredential();\n- \n-SampleClientOptions clientOptions = new SampleClientOptions();\n-clientOptions.AddPolicy(new RemoveUserAgentPolicy(), HttpPipelinePosition.PerRetry);\n+\n+SampleClientOptions clientOptions = new SampleClientOptions()\n+{\n+ Diagnostics = { IsTelemetryEnabled = false }\n+};\n \n SampleClient client = new SampleClient(serviceEndpoint, credential, clientOptions);\n ```\n \n+Alternatively, you can disable telemetry globally by setting the `AZURE_TELEMETRY_DISABLED` environment variable to `true` before creating any clients.",
"comment_created_at": "2025-06-10T22:40:31+00:00",
"comment_author": "KrzysztofCwalina",
"comment_body": "is that really true? @copilot, where did you find out about this?",
"pr_file_module": null
},
{
"comment_id": "2138883720",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50531,
"pr_file": "README.md",
"discussion_id": "2138876254",
"commented_code": "@@ -60,32 +60,25 @@ The software may collect information about you and your use of the software and\n \n Telemetry collection is on by default.\n \n-To opt out, you can disable telemetry at client construction. Creating a [synchronous policy](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Pipeline.md#implementing-a-synchronous-policy) in your application gives you access to the headers sent with each request, allowing you to remove the `User-Agent` header containing client telemetry. To use the policy, you will add it to the options for your specific client as part of client creation. This will disable Azure SDK telemetry for all methods in the client. You will need to register the policy with each client created.\n+To opt out, you can disable telemetry by setting the `IsTelemetryEnabled` property to `false` in the client's `DiagnosticsOptions`. This will prevent the Azure SDK from adding telemetry information to the `User-Agent` header for all requests made by the client.\n \n > NOTE: `HttpClient` may set default user agent headers as part of the .NET platform behavior. This value does not contain any Azure SDK telemetry information.\n \n-An example policy implementation looks like:\n-```C# Snippet:RemoveUserAgentPolicy\n-public class RemoveUserAgentPolicy : HttpPipelineSynchronousPolicy\n-{\n- public override void OnSendingRequest(HttpMessage message)\n- {\n- message.Request.Headers.Remove(HttpHeader.Names.UserAgent);\n- }\n-}\n-```\n-\n-To use it with a client, you would register it to run for every retry attempt as part of your client options:\n-```C# Snippet:RemoveUserAgentPolicyUse\n+You can disable telemetry when creating a client by setting the `IsTelemetryEnabled` property in the diagnostics options:\n+```C#\n Uri serviceEndpoint = new Uri(\"https://example.contoso.com\");\n TokenCredential credential = new DefaultAzureCredential();\n- \n-SampleClientOptions clientOptions = new SampleClientOptions();\n-clientOptions.AddPolicy(new RemoveUserAgentPolicy(), HttpPipelinePosition.PerRetry);\n+\n+SampleClientOptions clientOptions = new SampleClientOptions()\n+{\n+ Diagnostics = { IsTelemetryEnabled = false }\n+};\n \n SampleClient client = new SampleClient(serviceEndpoint, credential, clientOptions);\n ```\n \n+Alternatively, you can disable telemetry globally by setting the `AZURE_TELEMETRY_DISABLED` environment variable to `true` before creating any clients.",
"comment_created_at": "2025-06-10T22:45:13+00:00",
"comment_author": "Copilot",
"comment_body": "Yes, this is accurate. The `AZURE_TELEMETRY_DISABLED` environment variable support is implemented in the Azure.Core source code:\n\n1. **DiagnosticsOptions.cs:75** - `IsTelemetryEnabled = !EnvironmentVariableToBool(Environment.GetEnvironmentVariable(\"AZURE_TELEMETRY_DISABLED\")) ?? true;`\n2. **DiagnosticsOptions.cs:93-95** - XML documentation explicitly states: \"The default value can be controlled process wide by setting `AZURE_TELEMETRY_DISABLED` to `true`, `false`, `1` or `0`.\"\n3. **ClientOptionsTests.cs** - Contains unit test `CanDisableTelemetryWithEnvironmentVariable` that verifies this functionality\n\nThe environment variable is read during DiagnosticsOptions initialization and affects the default value of `IsTelemetryEnabled` for all clients.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2127558101",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"created_at": "2025-06-04T22:46:49+00:00",
"commented_code": "### Features Added\n\n- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n\n - `tenantId` : The tenant where the target resource was created\n - `clientId` : The client identifier for the application, which must be granted access on the target resource\n - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2127558101",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-04T22:46:49+00:00",
"comment_author": "christothes",
"comment_body": "Should we consider making this value something like `AudienceCloud` and accept values like `global` (default), `usgov`, `china` so that users don't need to remember the magic audience strings?",
"pr_file_module": null
},
{
"comment_id": "2127560619",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-04T22:49:01+00:00",
"comment_author": "jsquire",
"comment_body": "I'm good with that, but its @scottaddie's call. I followed the pattern that he asked for and I'm not sure if there are any wider considerations.",
"pr_file_module": null
},
{
"comment_id": "2130246074",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-05T20:30:31+00:00",
"comment_author": "scottaddie",
"comment_body": "+1 to Chris' suggestion. Most customers won't know this magic audience string.",
"pr_file_module": null
},
{
"comment_id": "2132973364",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50436,
"pr_file": "sdk/extensions/Microsoft.Extensions.Azure/CHANGELOG.md",
"discussion_id": "2127558101",
"commented_code": "@@ -4,6 +4,16 @@\n \n ### Features Added\n \n+- Added support for [federated managed identity](https://learn.microsoft.com/entra/workload-id/workload-identity-federation-config-app-trust-managed-identity?tabs=microsoft-entra-admin-center#azureidentity) support in the client factory by specifying configuration item `credential` as \"managedidentityasfederatedidentity\" and providing the following named configuration items:\n+\n+ - `tenantId` : The tenant where the target resource was created\n+ - `clientId` : The client identifier for the application, which must be granted access on the target resource\n+ - `managedIdentityClientId` : The managed identity which you configured as a Federated Identity Credential (FIC)\n+ - `federatedAudience`: One of the following token audiences, specific to the cloud that you're running in:",
"comment_created_at": "2025-06-06T22:27:09+00:00",
"comment_author": "jsquire",
"comment_body": "Changed to `azureCloud`",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,102 @@
[
{
"discussion_id": "2155220117",
"pr_number": 50534,
"pr_file": "sdk/core/Azure.Core/Benchmarks.Nuget/PipelineScenario.cs",
"created_at": "2025-06-18T18:14:23+00:00",
"commented_code": "using System;\nusing System.Net.Http;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core;\nusing Azure.Core.Pipeline;\n\nnamespace Benchmarks.Nuget\n{\n public class PipelineScenario\n {\n public readonly HttpPipeline _pipeline;\n\n public PipelineScenario()\n {\n var options = new BenchmarkClientOptions\n {\n Transport = new HttpClientTransport(new HttpClient())\n };\n _pipeline = HttpPipelineBuilder.Build(options);\n }\n\n public async Task<Azure.Response> SendAsync()\n {\n var message = _pipeline.CreateMessage();\n message.Request.Uri.Reset(new Uri(\"https://www.example.com\"));\n await _pipeline.SendAsync(message, CancellationToken.None);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2155220117",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50534,
"pr_file": "sdk/core/Azure.Core/Benchmarks.Nuget/PipelineScenario.cs",
"discussion_id": "2155220117",
"commented_code": "@@ -0,0 +1,33 @@\n+using System;\n+using System.Net.Http;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core;\n+using Azure.Core.Pipeline;\n+\n+namespace Benchmarks.Nuget\n+{\n+ public class PipelineScenario\n+ {\n+ public readonly HttpPipeline _pipeline;\n+\n+ public PipelineScenario()\n+ {\n+ var options = new BenchmarkClientOptions\n+ {\n+ Transport = new HttpClientTransport(new HttpClient())\n+ };\n+ _pipeline = HttpPipelineBuilder.Build(options);\n+ }\n+\n+ public async Task<Azure.Response> SendAsync()\n+ {\n+ var message = _pipeline.CreateMessage();\n+ message.Request.Uri.Reset(new Uri(\"https://www.example.com\"));\n+ await _pipeline.SendAsync(message, CancellationToken.None);",
"comment_created_at": "2025-06-18T18:14:23+00:00",
"comment_author": "m-redding",
"comment_body": "When you're running this, does this work with the example.com uri?\r\n\r\nYou could consider a few alternatives since we want to avoid network calls to isolate client code:\r\n- mocking the transport - it looks like PipelineBenchmark used to use a mock HTTP handler implementation https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/perf/PipelineBenchmark.cs#L66\r\n- using a TestServer. This is an example of using a test server https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/System.ClientModel/tests/Pipeline/ClientPipelineFunctionalTests.cs#L40-L46\r\n- integrating with the test proxy https://github.com/Azure/azure-sdk-tools/blob/main/tools/test-proxy/Azure.Sdk.Tools.TestProxy/README.md",
"pr_file_module": null
},
{
"comment_id": "2157684780",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50534,
"pr_file": "sdk/core/Azure.Core/Benchmarks.Nuget/PipelineScenario.cs",
"discussion_id": "2155220117",
"commented_code": "@@ -0,0 +1,33 @@\n+using System;\n+using System.Net.Http;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core;\n+using Azure.Core.Pipeline;\n+\n+namespace Benchmarks.Nuget\n+{\n+ public class PipelineScenario\n+ {\n+ public readonly HttpPipeline _pipeline;\n+\n+ public PipelineScenario()\n+ {\n+ var options = new BenchmarkClientOptions\n+ {\n+ Transport = new HttpClientTransport(new HttpClient())\n+ };\n+ _pipeline = HttpPipelineBuilder.Build(options);\n+ }\n+\n+ public async Task<Azure.Response> SendAsync()\n+ {\n+ var message = _pipeline.CreateMessage();\n+ message.Request.Uri.Reset(new Uri(\"https://www.example.com\"));\n+ await _pipeline.SendAsync(message, CancellationToken.None);",
"comment_created_at": "2025-06-19T21:37:22+00:00",
"comment_author": "sa7936",
"comment_body": "I put back the mocking of transport calls ",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124976957",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/tests/Primitives/ServiceBusRetryPolicyTests.cs",
"created_at": "2025-06-03T21:40:01+00:00",
"commented_code": "return null;\n }\n }\n\n private class CustomServerBusyMockRetryPolicy : ServiceBusRetryPolicy\n {\n public const int MaxRetries = 3;\n\n private int _retryCount = 0;\n\n public int CalculateRetryDelayCallCount { get; private set; }\n\n public void SetServerBusyForTest()\n {\n // Set the private _serverBusyState to ServerBusyState (1)\n var field = typeof(ServiceBusRetryPolicy).GetField(\"_serverBusyState\", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124976957",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/tests/Primitives/ServiceBusRetryPolicyTests.cs",
"discussion_id": "2124976957",
"commented_code": "@@ -114,5 +175,81 @@ public override TimeSpan CalculateTryTimeout(int attemptCount)\n return null;\n }\n }\n+\n+ private class CustomServerBusyMockRetryPolicy : ServiceBusRetryPolicy\n+ {\n+ public const int MaxRetries = 3;\n+\n+ private int _retryCount = 0;\n+\n+ public int CalculateRetryDelayCallCount { get; private set; }\n+\n+ public void SetServerBusyForTest()\n+ {\n+ // Set the private _serverBusyState to ServerBusyState (1)\n+ var field = typeof(ServiceBusRetryPolicy).GetField(\"_serverBusyState\", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);",
"comment_created_at": "2025-06-03T21:40:01+00:00",
"comment_author": "Copilot",
"comment_body": "Consider exposing a protected setter or an internal method in ServiceBusRetryPolicy to modify the server busy state instead of using reflection in test helper classes. This will reduce brittleness in tests if the underlying field name changes.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2112263732",
"pr_number": 50304,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/tests/ShareFileStartTransferDownloadTests.cs",
"created_at": "2025-05-28T16:04:05+00:00",
"commented_code": "using Stream sourceStream = await hardlinkClient.OpenReadAsync();\n using Stream destinationStream = File.OpenRead(destinationResource.Uri.LocalPath);\n Assert.AreEqual(sourceStream.Length, destinationStream.Length);\n Assert.AreEqual(sourceStream, destinationStream);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2112263732",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50304,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/tests/ShareFileStartTransferDownloadTests.cs",
"discussion_id": "2112263732",
"commented_code": "@@ -132,6 +132,7 @@ await TestTransferWithTimeout.WaitForCompletionAsync(\n using Stream sourceStream = await hardlinkClient.OpenReadAsync();\n using Stream destinationStream = File.OpenRead(destinationResource.Uri.LocalPath);\n Assert.AreEqual(sourceStream.Length, destinationStream.Length);\n+ Assert.AreEqual(sourceStream, destinationStream);",
"comment_created_at": "2025-05-28T16:04:05+00:00",
"comment_author": "Copilot",
"comment_body": "Comparing two Stream objects with Assert.AreEqual checks reference equality rather than content. Consider reading both streams into byte arrays and comparing their sequences, or using a helper method to verify that the contents match.\n```suggestion\n byte[] sourceBytes;\n byte[] destinationBytes;\n\n using (MemoryStream sourceMemoryStream = new MemoryStream())\n {\n await sourceStream.CopyToAsync(sourceMemoryStream);\n sourceBytes = sourceMemoryStream.ToArray();\n }\n\n using (MemoryStream destinationMemoryStream = new MemoryStream())\n {\n await destinationStream.CopyToAsync(destinationMemoryStream);\n destinationBytes = destinationMemoryStream.ToArray();\n }\n\n Assert.AreEqual(sourceBytes.Length, destinationBytes.Length, \"Stream lengths do not match.\");\n Assert.AreEqual(sourceBytes, destinationBytes, \"Stream contents do not match.\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2103231256",
"pr_number": 50222,
"pr_file": "sdk/communication/Azure.Communication.CallAutomation/tests/CallRecordings/CallRecordingAutomatedLiveTests.cs",
"created_at": "2025-05-22T19:06:18+00:00",
"commented_code": "Assert.AreEqual(StatusCodes.Status200OK, startRecordingResponse.GetRawResponse().Status);\n Assert.NotNull(startRecordingResponse.Value.RecordingId);\n\n // Update the property name from 'PlaySourceId' to 'PlaySourceCacheId' as per the provided type signature.\n var playSource = new FileSource(new Uri(TestEnvironment.FileSourceUrl)) { PlaySourceCacheId = \"test-audio\" };\n var playResponse = await response.CallConnection.GetCallMedia().PlayToAllAsync(playSource);\n Assert.NotNull(playResponse);\n Assert.AreEqual(202, playResponse.GetRawResponse().Status);\n\n await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);\n\n // try stop recording\n var stopRecordingResponse = await client.GetCallRecording().StopAsync(startRecordingResponse.Value.RecordingId);\n Assert.AreEqual(StatusCodes.Status204NoContent, stopRecordingResponse.Status);\n\n await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2103231256",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50222,
"pr_file": "sdk/communication/Azure.Communication.CallAutomation/tests/CallRecordings/CallRecordingAutomatedLiveTests.cs",
"discussion_id": "2103231256",
"commented_code": "@@ -472,10 +472,20 @@ public async Task GetRecordingTest()\n Assert.AreEqual(StatusCodes.Status200OK, startRecordingResponse.GetRawResponse().Status);\n Assert.NotNull(startRecordingResponse.Value.RecordingId);\n \n+ // Update the property name from 'PlaySourceId' to 'PlaySourceCacheId' as per the provided type signature.\n+ var playSource = new FileSource(new Uri(TestEnvironment.FileSourceUrl)) { PlaySourceCacheId = \"test-audio\" };\n+ var playResponse = await response.CallConnection.GetCallMedia().PlayToAllAsync(playSource);\n+ Assert.NotNull(playResponse);\n+ Assert.AreEqual(202, playResponse.GetRawResponse().Status);\n+\n+ await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);\n+\n // try stop recording\n var stopRecordingResponse = await client.GetCallRecording().StopAsync(startRecordingResponse.Value.RecordingId);\n Assert.AreEqual(StatusCodes.Status204NoContent, stopRecordingResponse.Status);\n \n+ await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);",
"comment_created_at": "2025-05-22T19:06:18+00:00",
"comment_author": "Copilot",
"comment_body": "Using fixed delays can lead to flaky tests; consider polling the recording status with a timeout or exponential backoff instead of a hardcoded Task.Delay.\n```suggestion\n await WaitForConditionAsync(\n async () =>\n {\n var recordingState = await client.GetCallRecording().GetRecordingStateAsync(startRecordingResponse.Value.RecordingId).ConfigureAwait(false);\n return recordingState.Value == RecordingState.Active;\n },\n TimeSpan.FromSeconds(10),\n TimeSpan.FromMilliseconds(500)\n ).ConfigureAwait(false);\n\n // try stop recording\n var stopRecordingResponse = await client.GetCallRecording().StopAsync(startRecordingResponse.Value.RecordingId);\n Assert.AreEqual(StatusCodes.Status204NoContent, stopRecordingResponse.Status);\n\n await WaitForConditionAsync(\n async () =>\n {\n var recordingState = await client.GetCallRecording().GetRecordingStateAsync(startRecordingResponse.Value.RecordingId).ConfigureAwait(false);\n return recordingState.Value == RecordingState.Stopped;\n },\n TimeSpan.FromSeconds(10),\n TimeSpan.FromMilliseconds(500)\n ).ConfigureAwait(false);\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -68,107 +68,3 @@ Tests should be deterministic, reliable, and isolated from external dependencies
```
These practices ensure tests remain stable across environments and over time, reducing maintenance costs and improving developer productivity.
[
{
"discussion_id": "2155220117",
"pr_number": 50534,
"pr_file": "sdk/core/Azure.Core/Benchmarks.Nuget/PipelineScenario.cs",
"created_at": "2025-06-18T18:14:23+00:00",
"commented_code": "\ufeffusing System;\nusing System.Net.Http;\nusing System.Threading;\nusing System.Threading.Tasks;\nusing Azure.Core;\nusing Azure.Core.Pipeline;\n\nnamespace Benchmarks.Nuget\n{\n public class PipelineScenario\n {\n public readonly HttpPipeline _pipeline;\n\n public PipelineScenario()\n {\n var options = new BenchmarkClientOptions\n {\n Transport = new HttpClientTransport(new HttpClient())\n };\n _pipeline = HttpPipelineBuilder.Build(options);\n }\n\n public async Task<Azure.Response> SendAsync()\n {\n var message = _pipeline.CreateMessage();\n message.Request.Uri.Reset(new Uri(\"https://www.example.com\"));\n await _pipeline.SendAsync(message, CancellationToken.None);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2155220117",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50534,
"pr_file": "sdk/core/Azure.Core/Benchmarks.Nuget/PipelineScenario.cs",
"discussion_id": "2155220117",
"commented_code": "@@ -0,0 +1,33 @@\n+\ufeffusing System;\n+using System.Net.Http;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core;\n+using Azure.Core.Pipeline;\n+\n+namespace Benchmarks.Nuget\n+{\n+ public class PipelineScenario\n+ {\n+ public readonly HttpPipeline _pipeline;\n+\n+ public PipelineScenario()\n+ {\n+ var options = new BenchmarkClientOptions\n+ {\n+ Transport = new HttpClientTransport(new HttpClient())\n+ };\n+ _pipeline = HttpPipelineBuilder.Build(options);\n+ }\n+\n+ public async Task<Azure.Response> SendAsync()\n+ {\n+ var message = _pipeline.CreateMessage();\n+ message.Request.Uri.Reset(new Uri(\"https://www.example.com\"));\n+ await _pipeline.SendAsync(message, CancellationToken.None);",
"comment_created_at": "2025-06-18T18:14:23+00:00",
"comment_author": "m-redding",
"comment_body": "When you're running this, does this work with the example.com uri?\r\n\r\nYou could consider a few alternatives since we want to avoid network calls to isolate client code:\r\n- mocking the transport - it looks like PipelineBenchmark used to use a mock HTTP handler implementation https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/perf/PipelineBenchmark.cs#L66\r\n- using a TestServer. This is an example of using a test server https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/System.ClientModel/tests/Pipeline/ClientPipelineFunctionalTests.cs#L40-L46\r\n- integrating with the test proxy https://github.com/Azure/azure-sdk-tools/blob/main/tools/test-proxy/Azure.Sdk.Tools.TestProxy/README.md",
"pr_file_module": null
},
{
"comment_id": "2157684780",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50534,
"pr_file": "sdk/core/Azure.Core/Benchmarks.Nuget/PipelineScenario.cs",
"discussion_id": "2155220117",
"commented_code": "@@ -0,0 +1,33 @@\n+\ufeffusing System;\n+using System.Net.Http;\n+using System.Threading;\n+using System.Threading.Tasks;\n+using Azure.Core;\n+using Azure.Core.Pipeline;\n+\n+namespace Benchmarks.Nuget\n+{\n+ public class PipelineScenario\n+ {\n+ public readonly HttpPipeline _pipeline;\n+\n+ public PipelineScenario()\n+ {\n+ var options = new BenchmarkClientOptions\n+ {\n+ Transport = new HttpClientTransport(new HttpClient())\n+ };\n+ _pipeline = HttpPipelineBuilder.Build(options);\n+ }\n+\n+ public async Task<Azure.Response> SendAsync()\n+ {\n+ var message = _pipeline.CreateMessage();\n+ message.Request.Uri.Reset(new Uri(\"https://www.example.com\"));\n+ await _pipeline.SendAsync(message, CancellationToken.None);",
"comment_created_at": "2025-06-19T21:37:22+00:00",
"comment_author": "sa7936",
"comment_body": "I put back the mocking of transport calls ",
"pr_file_module": null
}
]
},
{
"discussion_id": "2124976957",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/tests/Primitives/ServiceBusRetryPolicyTests.cs",
"created_at": "2025-06-03T21:40:01+00:00",
"commented_code": "return null;\n }\n }\n\n private class CustomServerBusyMockRetryPolicy : ServiceBusRetryPolicy\n {\n public const int MaxRetries = 3;\n\n private int _retryCount = 0;\n\n public int CalculateRetryDelayCallCount { get; private set; }\n\n public void SetServerBusyForTest()\n {\n // Set the private _serverBusyState to ServerBusyState (1)\n var field = typeof(ServiceBusRetryPolicy).GetField(\"_serverBusyState\", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2124976957",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50394,
"pr_file": "sdk/servicebus/Azure.Messaging.ServiceBus/tests/Primitives/ServiceBusRetryPolicyTests.cs",
"discussion_id": "2124976957",
"commented_code": "@@ -114,5 +175,81 @@ public override TimeSpan CalculateTryTimeout(int attemptCount)\n return null;\n }\n }\n+\n+ private class CustomServerBusyMockRetryPolicy : ServiceBusRetryPolicy\n+ {\n+ public const int MaxRetries = 3;\n+\n+ private int _retryCount = 0;\n+\n+ public int CalculateRetryDelayCallCount { get; private set; }\n+\n+ public void SetServerBusyForTest()\n+ {\n+ // Set the private _serverBusyState to ServerBusyState (1)\n+ var field = typeof(ServiceBusRetryPolicy).GetField(\"_serverBusyState\", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);",
"comment_created_at": "2025-06-03T21:40:01+00:00",
"comment_author": "Copilot",
"comment_body": "Consider exposing a protected setter or an internal method in ServiceBusRetryPolicy to modify the server busy state instead of using reflection in test helper classes. This will reduce brittleness in tests if the underlying field name changes.",
"pr_file_module": null
}
]
},
{
"discussion_id": "2112263732",
"pr_number": 50304,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/tests/ShareFileStartTransferDownloadTests.cs",
"created_at": "2025-05-28T16:04:05+00:00",
"commented_code": "using Stream sourceStream = await hardlinkClient.OpenReadAsync();\n using Stream destinationStream = File.OpenRead(destinationResource.Uri.LocalPath);\n Assert.AreEqual(sourceStream.Length, destinationStream.Length);\n Assert.AreEqual(sourceStream, destinationStream);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2112263732",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50304,
"pr_file": "sdk/storage/Azure.Storage.DataMovement.Files.Shares/tests/ShareFileStartTransferDownloadTests.cs",
"discussion_id": "2112263732",
"commented_code": "@@ -132,6 +132,7 @@ await TestTransferWithTimeout.WaitForCompletionAsync(\n using Stream sourceStream = await hardlinkClient.OpenReadAsync();\n using Stream destinationStream = File.OpenRead(destinationResource.Uri.LocalPath);\n Assert.AreEqual(sourceStream.Length, destinationStream.Length);\n+ Assert.AreEqual(sourceStream, destinationStream);",
"comment_created_at": "2025-05-28T16:04:05+00:00",
"comment_author": "Copilot",
"comment_body": "Comparing two Stream objects with Assert.AreEqual checks reference equality rather than content. Consider reading both streams into byte arrays and comparing their sequences, or using a helper method to verify that the contents match.\n```suggestion\n byte[] sourceBytes;\n byte[] destinationBytes;\n\n using (MemoryStream sourceMemoryStream = new MemoryStream())\n {\n await sourceStream.CopyToAsync(sourceMemoryStream);\n sourceBytes = sourceMemoryStream.ToArray();\n }\n\n using (MemoryStream destinationMemoryStream = new MemoryStream())\n {\n await destinationStream.CopyToAsync(destinationMemoryStream);\n destinationBytes = destinationMemoryStream.ToArray();\n }\n\n Assert.AreEqual(sourceBytes.Length, destinationBytes.Length, \"Stream lengths do not match.\");\n Assert.AreEqual(sourceBytes, destinationBytes, \"Stream contents do not match.\");\n```",
"pr_file_module": null
}
]
},
{
"discussion_id": "2103231256",
"pr_number": 50222,
"pr_file": "sdk/communication/Azure.Communication.CallAutomation/tests/CallRecordings/CallRecordingAutomatedLiveTests.cs",
"created_at": "2025-05-22T19:06:18+00:00",
"commented_code": "Assert.AreEqual(StatusCodes.Status200OK, startRecordingResponse.GetRawResponse().Status);\n Assert.NotNull(startRecordingResponse.Value.RecordingId);\n\n // Update the property name from 'PlaySourceId' to 'PlaySourceCacheId' as per the provided type signature.\n var playSource = new FileSource(new Uri(TestEnvironment.FileSourceUrl)) { PlaySourceCacheId = \"test-audio\" };\n var playResponse = await response.CallConnection.GetCallMedia().PlayToAllAsync(playSource);\n Assert.NotNull(playResponse);\n Assert.AreEqual(202, playResponse.GetRawResponse().Status);\n\n await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);\n\n // try stop recording\n var stopRecordingResponse = await client.GetCallRecording().StopAsync(startRecordingResponse.Value.RecordingId);\n Assert.AreEqual(StatusCodes.Status204NoContent, stopRecordingResponse.Status);\n\n await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);",
"repo_full_name": "Azure/azure-sdk-for-net",
"discussion_comments": [
{
"comment_id": "2103231256",
"repo_full_name": "Azure/azure-sdk-for-net",
"pr_number": 50222,
"pr_file": "sdk/communication/Azure.Communication.CallAutomation/tests/CallRecordings/CallRecordingAutomatedLiveTests.cs",
"discussion_id": "2103231256",
"commented_code": "@@ -472,10 +472,20 @@ public async Task GetRecordingTest()\n Assert.AreEqual(StatusCodes.Status200OK, startRecordingResponse.GetRawResponse().Status);\n Assert.NotNull(startRecordingResponse.Value.RecordingId);\n \n+ // Update the property name from 'PlaySourceId' to 'PlaySourceCacheId' as per the provided type signature.\n+ var playSource = new FileSource(new Uri(TestEnvironment.FileSourceUrl)) { PlaySourceCacheId = \"test-audio\" };\n+ var playResponse = await response.CallConnection.GetCallMedia().PlayToAllAsync(playSource);\n+ Assert.NotNull(playResponse);\n+ Assert.AreEqual(202, playResponse.GetRawResponse().Status);\n+\n+ await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);\n+\n // try stop recording\n var stopRecordingResponse = await client.GetCallRecording().StopAsync(startRecordingResponse.Value.RecordingId);\n Assert.AreEqual(StatusCodes.Status204NoContent, stopRecordingResponse.Status);\n \n+ await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false);",
"comment_created_at": "2025-05-22T19:06:18+00:00",
"comment_author": "Copilot",
"comment_body": "Using fixed delays can lead to flaky tests; consider polling the recording status with a timeout or exponential backoff instead of a hardcoded Task.Delay.\n```suggestion\n await WaitForConditionAsync(\n async () =>\n {\n var recordingState = await client.GetCallRecording().GetRecordingStateAsync(startRecordingResponse.Value.RecordingId).ConfigureAwait(false);\n return recordingState.Value == RecordingState.Active;\n },\n TimeSpan.FromSeconds(10),\n TimeSpan.FromMilliseconds(500)\n ).ConfigureAwait(false);\n\n // try stop recording\n var stopRecordingResponse = await client.GetCallRecording().StopAsync(startRecordingResponse.Value.RecordingId);\n Assert.AreEqual(StatusCodes.Status204NoContent, stopRecordingResponse.Status);\n\n await WaitForConditionAsync(\n async () =>\n {\n var recordingState = await client.GetCallRecording().GetRecordingStateAsync(startRecordingResponse.Value.RecordingId).ConfigureAwait(false);\n return recordingState.Value == RecordingState.Stopped;\n },\n TimeSpan.FromSeconds(10),\n TimeSpan.FromMilliseconds(500)\n ).ConfigureAwait(false);\n```",
"pr_file_module": null
}
]
}
]

View File

@@ -0,0 +1,206 @@
[
{
"discussion_id": "2098810301",
"pr_number": 14794,
"pr_file": "lib/chef/resource/helpers/path_helpers.rb",
"created_at": "2025-05-20T20:24:51+00:00",
"commented_code": "require \"chef-utils/dist\" unless defined?(ChefUtils::Dist)\nclass Chef\n module ResourceHelpers\n # Helpers for path manipulation\n module PathHelpers\n extend self\n # The habitat binary path for Infra Client\n # @return [String]\n def chef_client_hab_binary_path\n # Find the most recent version by listing directories\n # This is heavy operation and should be avoided but currently habitat does not create a symlink by default\n # and binlink will be created only if `binlink` option is passed so we cannot assume binlink will be present.\n windows = RUBY_PLATFORM =~ /mswin|mingw|windows/ || defined?(ChefUtils) && ChefUtils.windows?\n base_path = \"/hab/pkgs/chef/#{ChefUtils::Dist::Infra::HABITAT_PKG}\"\n base_path = \"C:/#{base_path}\" if windows\n if File.directory?(base_path)\n # Get all version directories\n versions = Dir.glob(\"#{base_path}/*\").select { |d| File.directory?(d) }\n\n if versions.any?\n # Get the latest version (based on modification time)\n latest_version_dir = versions.max_by { |v| File.mtime(v) }",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "2098810301",
"repo_full_name": "chef/chef",
"pr_number": 14794,
"pr_file": "lib/chef/resource/helpers/path_helpers.rb",
"discussion_id": "2098810301",
"commented_code": "@@ -0,0 +1,41 @@\n+require \"chef-utils/dist\" unless defined?(ChefUtils::Dist)\n+class Chef\n+ module ResourceHelpers\n+ # Helpers for path manipulation\n+ module PathHelpers\n+ extend self\n+ # The habitat binary path for Infra Client\n+ # @return [String]\n+ def chef_client_hab_binary_path\n+ # Find the most recent version by listing directories\n+ # This is heavy operation and should be avoided but currently habitat does not create a symlink by default\n+ # and binlink will be created only if `binlink` option is passed so we cannot assume binlink will be present.\n+ windows = RUBY_PLATFORM =~ /mswin|mingw|windows/ || defined?(ChefUtils) && ChefUtils.windows?\n+ base_path = \"/hab/pkgs/chef/#{ChefUtils::Dist::Infra::HABITAT_PKG}\"\n+ base_path = \"C:/#{base_path}\" if windows\n+ if File.directory?(base_path)\n+ # Get all version directories\n+ versions = Dir.glob(\"#{base_path}/*\").select { |d| File.directory?(d) }\n+\n+ if versions.any?\n+ # Get the latest version (based on modification time)\n+ latest_version_dir = versions.max_by { |v| File.mtime(v) }",
"comment_created_at": "2025-05-20T20:24:51+00:00",
"comment_author": "tpowell-progress",
"comment_body": "This is potentially problematic and not necessarily deterministic in that it will likely pick the most recent install vs. probably the intended \"most recent version\" for this logic. Further, although unlikely, any mtime update of the base package directory will place that package at the front of the line for being chosen, so if you renamed the directory or something for a test, you might end up forcing that version to always be chosen?",
"pr_file_module": null
}
]
},
{
"discussion_id": "1688678713",
"pr_number": 14462,
"pr_file": "lib/chef/provider/package/chocolatey.rb",
"created_at": "2024-07-23T20:29:24+00:00",
"commented_code": "# Choco V2 uses 'Search' for remote repositories and 'List' for local packages\n def query_command\n return \"list\" if get_choco_version.match?(/^1/)\n return \"list\" if get_choco_version.match?(/^1/) && !get_choco_version.match?(/^1.4/)",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "1688678713",
"repo_full_name": "chef/chef",
"pr_number": 14462,
"pr_file": "lib/chef/provider/package/chocolatey.rb",
"discussion_id": "1688678713",
"commented_code": "@@ -153,7 +153,7 @@ def get_choco_version\n \n # Choco V2 uses 'Search' for remote repositories and 'List' for local packages\n def query_command\n- return \"list\" if get_choco_version.match?(/^1/)\n+ return \"list\" if get_choco_version.match?(/^1/) && !get_choco_version.match?(/^1.4/)",
"comment_created_at": "2024-07-23T20:29:24+00:00",
"comment_author": "jaymzh",
"comment_body": "This will break again when 1.5 comes out. We should be a bit more thorough here. Maybe something like...\r\n\r\n```suggestion\r\n v_bits = get_choco_version.split('.')\r\n # 1.4+ and 2+\r\n if v_bits[0].to_i > 1 || v_bits[1] > 3\r\n return \"search\"\r\n end\r\n \"list\"\r\n```",
"pr_file_module": null
},
{
"comment_id": "1693325682",
"repo_full_name": "chef/chef",
"pr_number": 14462,
"pr_file": "lib/chef/provider/package/chocolatey.rb",
"discussion_id": "1688678713",
"commented_code": "@@ -153,7 +153,7 @@ def get_choco_version\n \n # Choco V2 uses 'Search' for remote repositories and 'List' for local packages\n def query_command\n- return \"list\" if get_choco_version.match?(/^1/)\n+ return \"list\" if get_choco_version.match?(/^1/) && !get_choco_version.match?(/^1.4/)",
"comment_created_at": "2024-07-26T16:29:48+00:00",
"comment_author": "tpowell-progress",
"comment_body": "@jaymzh I'll raise you a `Gem::Dependency.new('', '< 1.4.0').match?('', get_choco_version)`",
"pr_file_module": null
}
]
},
{
"discussion_id": "1369665177",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"created_at": "2023-10-24T06:06:30+00:00",
"commented_code": "class Chef\n class Resource\n class ChocolateyInstaller < Chef::Resource\n provides :chocolatey_installer\n\n description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n introduced \"18.3\"\n examples <<~DOC\n **Install Chocolatey**\n\n ```ruby\n chocolatey_installer 'latest' do\n action :install\n end\n ```\n\n **Uninstall Chocolatey**\n\n ```ruby\n chocolatey_installer 'Some random verbiage' do\n action :uninstall\n end\n ```\n\n **Install Chocolatey with Parameters**\n\n ```ruby\n chocolatey_installer 'latest' do\n action :install\n download_url \"https://www.contoso.com/foo\"\n chocolatey_version '2.12.24'\n end\n ```\n\n **Upgrade Chocolatey with Parameters**\n\n ```ruby\n chocolatey_installer 'latest' do\n action :upgrade\n chocolatey_version '2.12.24'\n end\n ```\n DOC\n\n allowed_actions :install, :uninstall, :upgrade\n\n property :download_url, String,\n description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n\n property :chocolatey_version, String,\n description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n\n property :use_native_unzip, [TrueClass, FalseClass], default: false,\n description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n\n property :ignore_proxy, [TrueClass, FalseClass], default: false,\n description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n\n property :proxy_url, String,\n description: \"Specifies the proxy URL to use during the download.\"\n\n property :proxy_user, String,\n description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n\n property :proxy_password, String,\n description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n\n load_current_value do\n current_state = is_choco_installed?\n current_value_does_not_exist! if current_state == false\n current_state\n end\n\n def is_choco_installed?\n ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n end\n\n def get_choco_version\n powershell_exec(\"choco --version\").result\n end\n\n def existing_version",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "1369665177",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T06:06:30+00:00",
"comment_author": "jaymzh",
"comment_body": "Does it conform to gem versions? We have other version helpers if not. Gem versions are very strict.",
"pr_file_module": null
},
{
"comment_id": "1370147997",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T13:22:33+00:00",
"comment_author": "johnmccrae",
"comment_body": "I will convert that over then, I was just using that to have a consistent way of matching versions",
"pr_file_module": null
},
{
"comment_id": "1370235896",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T14:05:49+00:00",
"comment_author": "johnmccrae",
"comment_body": "BTW, got a pointer for me? Everything I read says to use Gem::Version",
"pr_file_module": null
},
{
"comment_id": "1370368983",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T15:06:29+00:00",
"comment_author": "tpowell-progress",
"comment_body": "If we're just comparing two *specific* gem versions, `Gem::Version` sounds good to me. If we're looking to have more general version specifiers (`~>`, etc...), it might still work, but we might want to consider how the user would expect things to work in context.",
"pr_file_module": null
},
{
"comment_id": "1370489394",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T16:23:19+00:00",
"comment_author": "johnmccrae",
"comment_body": "I'm a total dork, ignore me. I understand now. I was overthinking this.",
"pr_file_module": null
}
]
},
{
"discussion_id": "1092455427",
"pr_number": 13535,
"pr_file": "lib/chef/resource/apt_repository.rb",
"created_at": "2023-01-31T20:37:07+00:00",
"commented_code": "end.compact\n end\n\n # run the specified command and extract the public key ids\n # accepts the command so it can be used to extract both the current keys\n # and the new keys\n # @param [Array<String>] cmd the command to run\n #\n # @return [Array] an array of key ids\n def extract_public_keys_from_cmd(*cmd)\n so = shell_out(*cmd)\n # Sample output\n # pub:-:4096:1:D94AA3F0EFE21092:1336774248:::-:::scSC::::::23::0:\n so.stdout.split(/\\n/).map do |t|\n z = t.match(/(^pub:.?:\\d*:\\d*:\\w*:[\\d-]*):/)",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "1092455427",
"repo_full_name": "chef/chef",
"pr_number": 13535,
"pr_file": "lib/chef/resource/apt_repository.rb",
"discussion_id": "1092455427",
"commented_code": "@@ -187,6 +187,22 @@ def extract_fingerprints_from_cmd(*cmd)\n end.compact\n end\n \n+ # run the specified command and extract the public key ids\n+ # accepts the command so it can be used to extract both the current keys\n+ # and the new keys\n+ # @param [Array<String>] cmd the command to run\n+ #\n+ # @return [Array] an array of key ids\n+ def extract_public_keys_from_cmd(*cmd)\n+ so = shell_out(*cmd)\n+ # Sample output\n+ # pub:-:4096:1:D94AA3F0EFE21092:1336774248:::-:::scSC::::::23::0:\n+ so.stdout.split(/\\n/).map do |t|\n+ z = t.match(/(^pub:.?:\\d*:\\d*:\\w*:[\\d-]*):/)",
"comment_created_at": "2023-01-31T20:37:07+00:00",
"comment_author": "tpowell-progress",
"comment_body": "Suggest splitting on the `':'` and grabbing the first `n` fields rejoined with `':'` ... It's a little bit harder to sort out how many fields are being grabbed with the regex especially with most of this under the capture group, and the regex isn't performing significant validation, etc...",
"pr_file_module": null
}
]
},
{
"discussion_id": "930358767",
"pr_number": 13069,
"pr_file": "lib/chef/property.rb",
"created_at": "2022-07-26T19:56:06+00:00",
"commented_code": "#\n def required?(action = nil)\n if !action.nil? && options[:required].is_a?(Array)\n options[:required].include?(action)\n (options[:required] & Array(action)).any?",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "930358767",
"repo_full_name": "chef/chef",
"pr_number": 13069,
"pr_file": "lib/chef/property.rb",
"discussion_id": "930358767",
"commented_code": "@@ -305,7 +305,7 @@ def has_default?\n #\n def required?(action = nil)\n if !action.nil? && options[:required].is_a?(Array)\n- options[:required].include?(action)\n+ (options[:required] & Array(action)).any?",
"comment_created_at": "2022-07-26T19:56:06+00:00",
"comment_author": "marcparadise",
"comment_body": "How does this functionally differ from the original check? When running through some scenarios, I can't come up with one that returns a different result given pre-conditions of a non-nil action and :required having an Array. ",
"pr_file_module": null
},
{
"comment_id": "934692971",
"repo_full_name": "chef/chef",
"pr_number": 13069,
"pr_file": "lib/chef/property.rb",
"discussion_id": "930358767",
"commented_code": "@@ -305,7 +305,7 @@ def has_default?\n #\n def required?(action = nil)\n if !action.nil? && options[:required].is_a?(Array)\n- options[:required].include?(action)\n+ (options[:required] & Array(action)).any?",
"comment_created_at": "2022-08-01T16:05:49+00:00",
"comment_author": "sabat",
"comment_body": "> How does this functionally differ from the original check?\r\n\r\nThe original cannot handle a check for multiple actions. It's fine if it's checking for just :create for example, but if `action` is an array with both :create and :delete it would fail with a simple `include?` test. I wrote this over a year ago and the details of the context now escape me, but IIRC there are situations where this needs to test for multiple actions—where `action` is going to be an array of actions, and `include?` only tests for the presence of a single object.",
"pr_file_module": null
}
]
},
{
"discussion_id": "438615154",
"pr_number": 9976,
"pr_file": "lib/chef/provider/package/apt.rb",
"created_at": "2020-06-11T08:09:39+00:00",
"commented_code": "end\n\n def install_package(name, version)\n name = check_availability(name)",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "438615154",
"repo_full_name": "chef/chef",
"pr_number": 9976,
"pr_file": "lib/chef/provider/package/apt.rb",
"discussion_id": "438615154",
"commented_code": "@@ -89,6 +89,7 @@ def locked_packages\n end\n \n def install_package(name, version)\n+ name = check_availability(name)",
"comment_created_at": "2020-06-11T08:09:39+00:00",
"comment_author": "jaymzh",
"comment_body": "This only fixes... part of the bug.\r\n\r\nIt will ensure that if you try to purge `['a', 'b', 'c']` and `c` isn't a known package, that it won't try to purge `c`... which is good...\r\n\r\nBut the actual bug is that if `a` is installed, it'll automatically try to uninstall `b`, and `c` even if they are not installed.\r\n\r\nSo imagine:\r\n* a - installed\r\n* b - uninstalled, but available\r\n* c - uninstalled and unavailable\r\n\r\nCurrent behavior is Chef will try to purge `a`, `b` and `c`\r\n\r\nWith your PR it'll just try to purge `a` and `b`\r\n\r\nBut the *right* behavior is that it should only try to purge `a`.",
"pr_file_module": null
}
]
}
]

View File

@@ -43,211 +43,3 @@ options[:required].include?(action)
```
Remember that algorithm selection significantly impacts code reliability, maintainability, and performance. Choose algorithms that reflect the true intent of the operation rather than just what seems convenient.
[
{
"discussion_id": "2098810301",
"pr_number": 14794,
"pr_file": "lib/chef/resource/helpers/path_helpers.rb",
"created_at": "2025-05-20T20:24:51+00:00",
"commented_code": "require \"chef-utils/dist\" unless defined?(ChefUtils::Dist)\nclass Chef\n module ResourceHelpers\n # Helpers for path manipulation\n module PathHelpers\n extend self\n # The habitat binary path for Infra Client\n # @return [String]\n def chef_client_hab_binary_path\n # Find the most recent version by listing directories\n # This is heavy operation and should be avoided but currently habitat does not create a symlink by default\n # and binlink will be created only if `binlink` option is passed so we cannot assume binlink will be present.\n windows = RUBY_PLATFORM =~ /mswin|mingw|windows/ || defined?(ChefUtils) && ChefUtils.windows?\n base_path = \"/hab/pkgs/chef/#{ChefUtils::Dist::Infra::HABITAT_PKG}\"\n base_path = \"C:/#{base_path}\" if windows\n if File.directory?(base_path)\n # Get all version directories\n versions = Dir.glob(\"#{base_path}/*\").select { |d| File.directory?(d) }\n\n if versions.any?\n # Get the latest version (based on modification time)\n latest_version_dir = versions.max_by { |v| File.mtime(v) }",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "2098810301",
"repo_full_name": "chef/chef",
"pr_number": 14794,
"pr_file": "lib/chef/resource/helpers/path_helpers.rb",
"discussion_id": "2098810301",
"commented_code": "@@ -0,0 +1,41 @@\n+require \"chef-utils/dist\" unless defined?(ChefUtils::Dist)\n+class Chef\n+ module ResourceHelpers\n+ # Helpers for path manipulation\n+ module PathHelpers\n+ extend self\n+ # The habitat binary path for Infra Client\n+ # @return [String]\n+ def chef_client_hab_binary_path\n+ # Find the most recent version by listing directories\n+ # This is heavy operation and should be avoided but currently habitat does not create a symlink by default\n+ # and binlink will be created only if `binlink` option is passed so we cannot assume binlink will be present.\n+ windows = RUBY_PLATFORM =~ /mswin|mingw|windows/ || defined?(ChefUtils) && ChefUtils.windows?\n+ base_path = \"/hab/pkgs/chef/#{ChefUtils::Dist::Infra::HABITAT_PKG}\"\n+ base_path = \"C:/#{base_path}\" if windows\n+ if File.directory?(base_path)\n+ # Get all version directories\n+ versions = Dir.glob(\"#{base_path}/*\").select { |d| File.directory?(d) }\n+\n+ if versions.any?\n+ # Get the latest version (based on modification time)\n+ latest_version_dir = versions.max_by { |v| File.mtime(v) }",
"comment_created_at": "2025-05-20T20:24:51+00:00",
"comment_author": "tpowell-progress",
"comment_body": "This is potentially problematic and not necessarily deterministic in that it will likely pick the most recent install vs. probably the intended \"most recent version\" for this logic. Further, although unlikely, any mtime update of the base package directory will place that package at the front of the line for being chosen, so if you renamed the directory or something for a test, you might end up forcing that version to always be chosen?",
"pr_file_module": null
}
]
},
{
"discussion_id": "1688678713",
"pr_number": 14462,
"pr_file": "lib/chef/provider/package/chocolatey.rb",
"created_at": "2024-07-23T20:29:24+00:00",
"commented_code": "# Choco V2 uses 'Search' for remote repositories and 'List' for local packages\n def query_command\n return \"list\" if get_choco_version.match?(/^1/)\n return \"list\" if get_choco_version.match?(/^1/) && !get_choco_version.match?(/^1.4/)",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "1688678713",
"repo_full_name": "chef/chef",
"pr_number": 14462,
"pr_file": "lib/chef/provider/package/chocolatey.rb",
"discussion_id": "1688678713",
"commented_code": "@@ -153,7 +153,7 @@ def get_choco_version\n \n # Choco V2 uses 'Search' for remote repositories and 'List' for local packages\n def query_command\n- return \"list\" if get_choco_version.match?(/^1/)\n+ return \"list\" if get_choco_version.match?(/^1/) && !get_choco_version.match?(/^1.4/)",
"comment_created_at": "2024-07-23T20:29:24+00:00",
"comment_author": "jaymzh",
"comment_body": "This will break again when 1.5 comes out. We should be a bit more thorough here. Maybe something like...\r\n\r\n```suggestion\r\n v_bits = get_choco_version.split('.')\r\n # 1.4+ and 2+\r\n if v_bits[0].to_i > 1 || v_bits[1] > 3\r\n return \"search\"\r\n end\r\n \"list\"\r\n```",
"pr_file_module": null
},
{
"comment_id": "1693325682",
"repo_full_name": "chef/chef",
"pr_number": 14462,
"pr_file": "lib/chef/provider/package/chocolatey.rb",
"discussion_id": "1688678713",
"commented_code": "@@ -153,7 +153,7 @@ def get_choco_version\n \n # Choco V2 uses 'Search' for remote repositories and 'List' for local packages\n def query_command\n- return \"list\" if get_choco_version.match?(/^1/)\n+ return \"list\" if get_choco_version.match?(/^1/) && !get_choco_version.match?(/^1.4/)",
"comment_created_at": "2024-07-26T16:29:48+00:00",
"comment_author": "tpowell-progress",
"comment_body": "@jaymzh I'll raise you a `Gem::Dependency.new('', '< 1.4.0').match?('', get_choco_version)`",
"pr_file_module": null
}
]
},
{
"discussion_id": "1369665177",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"created_at": "2023-10-24T06:06:30+00:00",
"commented_code": "class Chef\n class Resource\n class ChocolateyInstaller < Chef::Resource\n provides :chocolatey_installer\n\n description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n introduced \"18.3\"\n examples <<~DOC\n **Install Chocolatey**\n\n ```ruby\n chocolatey_installer 'latest' do\n action :install\n end\n ```\n\n **Uninstall Chocolatey**\n\n ```ruby\n chocolatey_installer 'Some random verbiage' do\n action :uninstall\n end\n ```\n\n **Install Chocolatey with Parameters**\n\n ```ruby\n chocolatey_installer 'latest' do\n action :install\n download_url \"https://www.contoso.com/foo\"\n chocolatey_version '2.12.24'\n end\n ```\n\n **Upgrade Chocolatey with Parameters**\n\n ```ruby\n chocolatey_installer 'latest' do\n action :upgrade\n chocolatey_version '2.12.24'\n end\n ```\n DOC\n\n allowed_actions :install, :uninstall, :upgrade\n\n property :download_url, String,\n description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n\n property :chocolatey_version, String,\n description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n\n property :use_native_unzip, [TrueClass, FalseClass], default: false,\n description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n\n property :ignore_proxy, [TrueClass, FalseClass], default: false,\n description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n\n property :proxy_url, String,\n description: \"Specifies the proxy URL to use during the download.\"\n\n property :proxy_user, String,\n description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n\n property :proxy_password, String,\n description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n\n load_current_value do\n current_state = is_choco_installed?\n current_value_does_not_exist! if current_state == false\n current_state\n end\n\n def is_choco_installed?\n ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n end\n\n def get_choco_version\n powershell_exec(\"choco --version\").result\n end\n\n def existing_version",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "1369665177",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T06:06:30+00:00",
"comment_author": "jaymzh",
"comment_body": "Does it conform to gem versions? We have other version helpers if not. Gem versions are very strict.",
"pr_file_module": null
},
{
"comment_id": "1370147997",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T13:22:33+00:00",
"comment_author": "johnmccrae",
"comment_body": "I will convert that over then, I was just using that to have a consistent way of matching versions",
"pr_file_module": null
},
{
"comment_id": "1370235896",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T14:05:49+00:00",
"comment_author": "johnmccrae",
"comment_body": "BTW, got a pointer for me? Everything I read says to use Gem::Version",
"pr_file_module": null
},
{
"comment_id": "1370368983",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T15:06:29+00:00",
"comment_author": "tpowell-progress",
"comment_body": "If we're just comparing two *specific* gem versions, `Gem::Version` sounds good to me. If we're looking to have more general version specifiers (`~>`, etc...), it might still work, but we might want to consider how the user would expect things to work in context.",
"pr_file_module": null
},
{
"comment_id": "1370489394",
"repo_full_name": "chef/chef",
"pr_number": 14043,
"pr_file": "lib/chef/resource/chocolatey_installer.rb",
"discussion_id": "1369665177",
"commented_code": "@@ -0,0 +1,198 @@\n+class Chef\n+ class Resource\n+ class ChocolateyInstaller < Chef::Resource\n+ provides :chocolatey_installer\n+\n+ description \"Use the Chocolatey Installer resource to ensure that Choco is installed to your specification. Use the Chocolatey Feature resource to customize your install\"\n+ introduced \"18.3\"\n+ examples <<~DOC\n+ **Install Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ end\n+ ```\n+\n+ **Uninstall Chocolatey**\n+\n+ ```ruby\n+ chocolatey_installer 'Some random verbiage' do\n+ action :uninstall\n+ end\n+ ```\n+\n+ **Install Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :install\n+ download_url \"https://www.contoso.com/foo\"\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+\n+ **Upgrade Chocolatey with Parameters**\n+\n+ ```ruby\n+ chocolatey_installer 'latest' do\n+ action :upgrade\n+ chocolatey_version '2.12.24'\n+ end\n+ ```\n+ DOC\n+\n+ allowed_actions :install, :uninstall, :upgrade\n+\n+ property :download_url, String,\n+ description: \"The URL to download Chocolatey from. This defaults to the value of $env:ChocolateyDownloadUrl, if it is set, and otherwise falls back to the official Chocolatey community repository to download the Chocolatey package. It can be used for offline installation by providing a path to a Chocolatey.nupkg.\"\n+\n+ property :chocolatey_version, String,\n+ description: \"Specifies a target version of Chocolatey to install. By default, the latest stable version is installed. This will use the value in $env:ChocolateyVersion by default, if that environment variable is present. This parameter is ignored if download_url is set.\"\n+\n+ property :use_native_unzip, [TrueClass, FalseClass], default: false,\n+ description: \"If set, uses built-in Windows decompression tools instead of 7zip when unpacking the downloaded nupkg. This will be set by default if use_native_unzip is set to a value other than 'false' or '0'. This parameter will be ignored in PS 5+ in favour of using the Expand-Archive built in PowerShell cmdlet directly.\"\n+\n+ property :ignore_proxy, [TrueClass, FalseClass], default: false,\n+ description: \"If set, ignores any configured proxy. This will override any proxy environment variables or parameters. This will be set by default if ignore_proxy is set to a value other than 'false' or '0'.\"\n+\n+ property :proxy_url, String,\n+ description: \"Specifies the proxy URL to use during the download.\"\n+\n+ property :proxy_user, String,\n+ description: \"The username to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_password are set\"\n+\n+ property :proxy_password, String,\n+ description: \"The password to use to build a proxy credential with. Will be consumed by the proxy_credential property if both this property and proxy_user are set\"\n+\n+ load_current_value do\n+ current_state = is_choco_installed?\n+ current_value_does_not_exist! if current_state == false\n+ current_state\n+ end\n+\n+ def is_choco_installed?\n+ ::File.exist?(\"#{ENV[\"ALLUSERSPROFILE\"]}\\\\chocolatey\\\\bin\\\\choco.exe\")\n+ end\n+\n+ def get_choco_version\n+ powershell_exec(\"choco --version\").result\n+ end\n+\n+ def existing_version",
"comment_created_at": "2023-10-24T16:23:19+00:00",
"comment_author": "johnmccrae",
"comment_body": "I'm a total dork, ignore me. I understand now. I was overthinking this.",
"pr_file_module": null
}
]
},
{
"discussion_id": "1092455427",
"pr_number": 13535,
"pr_file": "lib/chef/resource/apt_repository.rb",
"created_at": "2023-01-31T20:37:07+00:00",
"commented_code": "end.compact\n end\n\n # run the specified command and extract the public key ids\n # accepts the command so it can be used to extract both the current keys\n # and the new keys\n # @param [Array<String>] cmd the command to run\n #\n # @return [Array] an array of key ids\n def extract_public_keys_from_cmd(*cmd)\n so = shell_out(*cmd)\n # Sample output\n # pub:-:4096:1:D94AA3F0EFE21092:1336774248:::-:::scSC::::::23::0:\n so.stdout.split(/\\n/).map do |t|\n z = t.match(/(^pub:.?:\\d*:\\d*:\\w*:[\\d-]*):/)",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "1092455427",
"repo_full_name": "chef/chef",
"pr_number": 13535,
"pr_file": "lib/chef/resource/apt_repository.rb",
"discussion_id": "1092455427",
"commented_code": "@@ -187,6 +187,22 @@ def extract_fingerprints_from_cmd(*cmd)\n end.compact\n end\n \n+ # run the specified command and extract the public key ids\n+ # accepts the command so it can be used to extract both the current keys\n+ # and the new keys\n+ # @param [Array<String>] cmd the command to run\n+ #\n+ # @return [Array] an array of key ids\n+ def extract_public_keys_from_cmd(*cmd)\n+ so = shell_out(*cmd)\n+ # Sample output\n+ # pub:-:4096:1:D94AA3F0EFE21092:1336774248:::-:::scSC::::::23::0:\n+ so.stdout.split(/\\n/).map do |t|\n+ z = t.match(/(^pub:.?:\\d*:\\d*:\\w*:[\\d-]*):/)",
"comment_created_at": "2023-01-31T20:37:07+00:00",
"comment_author": "tpowell-progress",
"comment_body": "Suggest splitting on the `':'` and grabbing the first `n` fields rejoined with `':'` ... It's a little bit harder to sort out how many fields are being grabbed with the regex especially with most of this under the capture group, and the regex isn't performing significant validation, etc...",
"pr_file_module": null
}
]
},
{
"discussion_id": "930358767",
"pr_number": 13069,
"pr_file": "lib/chef/property.rb",
"created_at": "2022-07-26T19:56:06+00:00",
"commented_code": "#\n def required?(action = nil)\n if !action.nil? && options[:required].is_a?(Array)\n options[:required].include?(action)\n (options[:required] & Array(action)).any?",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "930358767",
"repo_full_name": "chef/chef",
"pr_number": 13069,
"pr_file": "lib/chef/property.rb",
"discussion_id": "930358767",
"commented_code": "@@ -305,7 +305,7 @@ def has_default?\n #\n def required?(action = nil)\n if !action.nil? && options[:required].is_a?(Array)\n- options[:required].include?(action)\n+ (options[:required] & Array(action)).any?",
"comment_created_at": "2022-07-26T19:56:06+00:00",
"comment_author": "marcparadise",
"comment_body": "How does this functionally differ from the original check? When running through some scenarios, I can't come up with one that returns a different result given pre-conditions of a non-nil action and :required having an Array. ",
"pr_file_module": null
},
{
"comment_id": "934692971",
"repo_full_name": "chef/chef",
"pr_number": 13069,
"pr_file": "lib/chef/property.rb",
"discussion_id": "930358767",
"commented_code": "@@ -305,7 +305,7 @@ def has_default?\n #\n def required?(action = nil)\n if !action.nil? && options[:required].is_a?(Array)\n- options[:required].include?(action)\n+ (options[:required] & Array(action)).any?",
"comment_created_at": "2022-08-01T16:05:49+00:00",
"comment_author": "sabat",
"comment_body": "> How does this functionally differ from the original check?\r\n\r\nThe original cannot handle a check for multiple actions. It's fine if it's checking for just :create for example, but if `action` is an array with both :create and :delete it would fail with a simple `include?` test. I wrote this over a year ago and the details of the context now escape me, but IIRC there are situations where this needs to test for multiple actions\u2014where `action` is going to be an array of actions, and `include?` only tests for the presence of a single object.",
"pr_file_module": null
}
]
},
{
"discussion_id": "438615154",
"pr_number": 9976,
"pr_file": "lib/chef/provider/package/apt.rb",
"created_at": "2020-06-11T08:09:39+00:00",
"commented_code": "end\n\n def install_package(name, version)\n name = check_availability(name)",
"repo_full_name": "chef/chef",
"discussion_comments": [
{
"comment_id": "438615154",
"repo_full_name": "chef/chef",
"pr_number": 9976,
"pr_file": "lib/chef/provider/package/apt.rb",
"discussion_id": "438615154",
"commented_code": "@@ -89,6 +89,7 @@ def locked_packages\n end\n \n def install_package(name, version)\n+ name = check_availability(name)",
"comment_created_at": "2020-06-11T08:09:39+00:00",
"comment_author": "jaymzh",
"comment_body": "This only fixes... part of the bug.\r\n\r\nIt will ensure that if you try to purge `['a', 'b', 'c']` and `c` isn't a known package, that it won't try to purge `c`... which is good...\r\n\r\nBut the actual bug is that if `a` is installed, it'll automatically try to uninstall `b`, and `c` even if they are not installed.\r\n\r\nSo imagine:\r\n* a - installed\r\n* b - uninstalled, but available\r\n* c - uninstalled and unavailable\r\n\r\nCurrent behavior is Chef will try to purge `a`, `b` and `c`\r\n\r\nWith your PR it'll just try to purge `a` and `b`\r\n\r\nBut the *right* behavior is that it should only try to purge `a`.",
"pr_file_module": null
}
]
}
]

Some files were not shown because too many files have changed in this diff Show More