Socket
Socket
Sign inDemoInstall

@azure/cognitiveservices-face

Package Overview
Dependencies
25
Maintainers
6
Versions
8
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 4.1.0 to 4.2.0

2

dist/cognitiveservices-face.min.js

@@ -1,1 +0,1 @@

!function(e,a){"object"==typeof exports&&"undefined"!=typeof module?a(exports,require("@azure/ms-rest-js")):"function"==typeof define&&define.amd?define(["exports","@azure/ms-rest-js"],a):a(((e=e||self).Azure=e.Azure||{},e.Azure.CognitiveservicesFace={}),e.msRest)}(this,function(e,o){"use strict";var t=function(e,a){return(t=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var r in a)a.hasOwnProperty(r)&&(e[r]=a[r])})(e,a)};function a(e,a){function r(){this.constructor=e}t(e,a),e.prototype=null===a?Object.create(a):(r.prototype=a.prototype,new r)}var r=function(){return(r=Object.assign||function(e){for(var a,r=1,t=arguments.length;r<t;r++)for(var s in a=arguments[r])Object.prototype.hasOwnProperty.call(a,s)&&(e[s]=a[s]);return e}).apply(this,arguments)},s=Object.freeze({__proto__:null}),i={serializedName:"Error",type:{name:"Composite",className:"ErrorModel",modelProperties:{code:{serializedName:"code",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},p={serializedName:"APIError",type:{name:"Composite",className:"APIError",modelProperties:{error:{serializedName:"error",type:{name:"Composite",className:"ErrorModel"}}}}},n={serializedName:"FaceRectangle",type:{name:"Composite",className:"FaceRectangle",modelProperties:{width:{required:!0,serializedName:"width",type:{name:"Number"}},height:{required:!0,serializedName:"height",type:{name:"Number"}},left:{required:!0,serializedName:"left",type:{name:"Number"}},top:{required:!0,serializedName:"top",type:{name:"Number"}}}}},d={serializedName:"Coordinate",type:{name:"Composite",className:"Coordinate",modelProperties:{x:{required:!0,serializedName:"x",type:{name:"Number"}},y:{required:!0,serializedName:"y",type:{name:"Number"}}}}},l={serializedName:"FaceLandmarks",type:{name:"Composite",className:"FaceLandmarks",modelProperties:{pupilLeft:{serializedName:"pupilLeft",type:{name:"Composite",className:"Coordinate"}},pupilRight:{serializedName:"pupilRight",type:{name:"Composite",className:"Coordinate"}},noseTip:{serializedName:"noseTip",type:{name:"Composite",className:"Coordinate"}},mouthLeft:{serializedName:"mouthLeft",type:{name:"Composite",className:"Coordinate"}},mouthRight:{serializedName:"mouthRight",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftOuter:{serializedName:"eyebrowLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftInner:{serializedName:"eyebrowLeftInner",type:{name:"Composite",className:"Coordinate"}},eyeLeftOuter:{serializedName:"eyeLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyeLeftTop:{serializedName:"eyeLeftTop",type:{name:"Composite",className:"Coordinate"}},eyeLeftBottom:{serializedName:"eyeLeftBottom",type:{name:"Composite",className:"Coordinate"}},eyeLeftInner:{serializedName:"eyeLeftInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightInner:{serializedName:"eyebrowRightInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightOuter:{serializedName:"eyebrowRightOuter",type:{name:"Composite",className:"Coordinate"}},eyeRightInner:{serializedName:"eyeRightInner",type:{name:"Composite",className:"Coordinate"}},eyeRightTop:{serializedName:"eyeRightTop",type:{name:"Composite",className:"Coordinate"}},eyeRightBottom:{serializedName:"eyeRightBottom",type:{name:"Composite",className:"Coordinate"}},eyeRightOuter:{serializedName:"eyeRightOuter",type:{name:"Composite",className:"Coordinate"}},noseRootLeft:{serializedName:"noseRootLeft",type:{name:"Composite",className:"Coordinate"}},noseRootRight:{serializedName:"noseRootRight",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarTop:{serializedName:"noseLeftAlarTop",type:{name:"Composite",className:"Coordinate"}},noseRightAlarTop:{serializedName:"noseRightAlarTop",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarOutTip:{serializedName:"noseLeftAlarOutTip",type:{name:"Composite",className:"Coordinate"}},noseRightAlarOutTip:{serializedName:"noseRightAlarOutTip",type:{name:"Composite",className:"Coordinate"}},upperLipTop:{serializedName:"upperLipTop",type:{name:"Composite",className:"Coordinate"}},upperLipBottom:{serializedName:"upperLipBottom",type:{name:"Composite",className:"Coordinate"}},underLipTop:{serializedName:"underLipTop",type:{name:"Composite",className:"Coordinate"}},underLipBottom:{serializedName:"underLipBottom",type:{name:"Composite",className:"Coordinate"}}}}},m={serializedName:"FacialHair",type:{name:"Composite",className:"FacialHair",modelProperties:{moustache:{nullable:!1,serializedName:"moustache",type:{name:"Number"}},beard:{nullable:!1,serializedName:"beard",type:{name:"Number"}},sideburns:{nullable:!1,serializedName:"sideburns",type:{name:"Number"}}}}},u={serializedName:"HeadPose",type:{name:"Composite",className:"HeadPose",modelProperties:{roll:{nullable:!1,serializedName:"roll",type:{name:"Number"}},yaw:{nullable:!1,serializedName:"yaw",type:{name:"Number"}},pitch:{nullable:!1,serializedName:"pitch",type:{name:"Number"}}}}},c={serializedName:"Emotion",type:{name:"Composite",className:"Emotion",modelProperties:{anger:{nullable:!1,serializedName:"anger",type:{name:"Number"}},contempt:{nullable:!1,serializedName:"contempt",type:{name:"Number"}},disgust:{nullable:!1,serializedName:"disgust",type:{name:"Number"}},fear:{nullable:!1,serializedName:"fear",type:{name:"Number"}},happiness:{nullable:!1,serializedName:"happiness",type:{name:"Number"}},neutral:{nullable:!1,serializedName:"neutral",type:{name:"Number"}},sadness:{nullable:!1,serializedName:"sadness",type:{name:"Number"}},surprise:{nullable:!1,serializedName:"surprise",type:{name:"Number"}}}}},y={serializedName:"HairColor",type:{name:"Composite",className:"HairColor",modelProperties:{color:{nullable:!1,serializedName:"color",type:{name:"Enum",allowedValues:["unknown","white","gray","blond","brown","red","black","other"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},h={serializedName:"Hair",type:{name:"Composite",className:"Hair",modelProperties:{bald:{nullable:!1,serializedName:"bald",type:{name:"Number"}},invisible:{nullable:!1,serializedName:"invisible",type:{name:"Boolean"}},hairColor:{serializedName:"hairColor",type:{name:"Sequence",element:{type:{name:"Composite",className:"HairColor"}}}}}}},N={serializedName:"Makeup",type:{name:"Composite",className:"Makeup",modelProperties:{eyeMakeup:{nullable:!1,serializedName:"eyeMakeup",type:{name:"Boolean"}},lipMakeup:{nullable:!1,serializedName:"lipMakeup",type:{name:"Boolean"}}}}},f={serializedName:"Occlusion",type:{name:"Composite",className:"Occlusion",modelProperties:{foreheadOccluded:{nullable:!1,serializedName:"foreheadOccluded",type:{name:"Boolean"}},eyeOccluded:{nullable:!1,serializedName:"eyeOccluded",type:{name:"Boolean"}},mouthOccluded:{nullable:!1,serializedName:"mouthOccluded",type:{name:"Boolean"}}}}},g={serializedName:"Accessory",type:{name:"Composite",className:"Accessory",modelProperties:{type:{nullable:!1,serializedName:"type",type:{name:"Enum",allowedValues:["headWear","glasses","mask"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},P={serializedName:"Blur",type:{name:"Composite",className:"Blur",modelProperties:{blurLevel:{nullable:!1,serializedName:"blurLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},z={serializedName:"Exposure",type:{name:"Composite",className:"Exposure",modelProperties:{exposureLevel:{nullable:!1,serializedName:"exposureLevel",type:{name:"Enum",allowedValues:["UnderExposure","GoodExposure","OverExposure"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},I={serializedName:"Noise",type:{name:"Composite",className:"Noise",modelProperties:{noiseLevel:{nullable:!1,serializedName:"noiseLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},q={serializedName:"FaceAttributes",type:{name:"Composite",className:"FaceAttributes",modelProperties:{age:{serializedName:"age",type:{name:"Number"}},gender:{serializedName:"gender",type:{name:"Enum",allowedValues:["male","female"]}},smile:{serializedName:"smile",type:{name:"Number"}},facialHair:{serializedName:"facialHair",type:{name:"Composite",className:"FacialHair"}},glasses:{serializedName:"glasses",type:{name:"Enum",allowedValues:["noGlasses","readingGlasses","sunglasses","swimmingGoggles"]}},headPose:{serializedName:"headPose",type:{name:"Composite",className:"HeadPose"}},emotion:{serializedName:"emotion",type:{name:"Composite",className:"Emotion"}},hair:{serializedName:"hair",type:{name:"Composite",className:"Hair"}},makeup:{serializedName:"makeup",type:{name:"Composite",className:"Makeup"}},occlusion:{serializedName:"occlusion",type:{name:"Composite",className:"Occlusion"}},accessories:{serializedName:"accessories",type:{name:"Sequence",element:{type:{name:"Composite",className:"Accessory"}}}},blur:{serializedName:"blur",type:{name:"Composite",className:"Blur"}},exposure:{serializedName:"exposure",type:{name:"Composite",className:"Exposure"}},noise:{serializedName:"noise",type:{name:"Composite",className:"Noise"}}}}},M={serializedName:"DetectedFace",type:{name:"Composite",className:"DetectedFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}},faceRectangle:{required:!0,serializedName:"faceRectangle",type:{name:"Composite",className:"FaceRectangle"}},faceLandmarks:{serializedName:"faceLandmarks",type:{name:"Composite",className:"FaceLandmarks"}},faceAttributes:{serializedName:"faceAttributes",type:{name:"Composite",className:"FaceAttributes"}}}}},b={serializedName:"FindSimilarRequest",type:{name:"Composite",className:"FindSimilarRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},faceListId:{serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largeFaceListId:{serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},faceIds:{serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:20,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"matchPerson",type:{name:"Enum",allowedValues:["matchPerson","matchFace"]}}}}},F={serializedName:"SimilarFace",type:{name:"Composite",className:"SimilarFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},persistedFaceId:{serializedName:"persistedFaceId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},L={serializedName:"GroupRequest",type:{name:"Composite",className:"GroupRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},C={serializedName:"GroupResult",type:{name:"Composite",className:"GroupResult",modelProperties:{groups:{required:!0,serializedName:"groups",type:{name:"Sequence",element:{type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}},messyGroup:{serializedName:"messyGroup",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},R={serializedName:"IdentifyRequest",type:{name:"Composite",className:"IdentifyRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:10},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:1,constraints:{InclusiveMaximum:5,InclusiveMinimum:1},type:{name:"Number"}},confidenceThreshold:{serializedName:"confidenceThreshold",type:{name:"Number"}}}}},S={serializedName:"IdentifyCandidate",type:{name:"Composite",className:"IdentifyCandidate",modelProperties:{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},G={serializedName:"IdentifyResult",type:{name:"Composite",className:"IdentifyResult",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},candidates:{required:!0,serializedName:"candidates",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyCandidate"}}}}}}},O={serializedName:"VerifyFaceToPersonRequest",type:{name:"Composite",className:"VerifyFaceToPersonRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}}}}},T={serializedName:"VerifyFaceToFaceRequest",type:{name:"Composite",className:"VerifyFaceToFaceRequest",modelProperties:{faceId1:{required:!0,serializedName:"faceId1",type:{name:"Uuid"}},faceId2:{required:!0,serializedName:"faceId2",type:{name:"Uuid"}}}}},E={serializedName:"VerifyResult",type:{name:"Composite",className:"VerifyResult",modelProperties:{isIdentical:{required:!0,serializedName:"isIdentical",type:{name:"Boolean"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},D={serializedName:"PersistedFace",type:{name:"Composite",className:"PersistedFace",modelProperties:{persistedFaceId:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}},userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},A={serializedName:"NameAndUserDataContract",type:{name:"Composite",className:"NameAndUserDataContract",modelProperties:{name:{serializedName:"name",constraints:{MaxLength:128},type:{name:"String"}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},_={serializedName:"MetaDataContract",type:{name:"Composite",className:"MetaDataContract",modelProperties:r(r({},A.type.modelProperties),{recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}})}},U={serializedName:"FaceList",type:{name:"Composite",className:"FaceList",modelProperties:r(r({},_.type.modelProperties),{faceListId:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},persistedFaces:{serializedName:"persistedFaces",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}})}},x={serializedName:"PersonGroup",type:{name:"Composite",className:"PersonGroup",modelProperties:r(r({},_.type.modelProperties),{personGroupId:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},w={serializedName:"Person",type:{name:"Composite",className:"Person",modelProperties:r(r({},A.type.modelProperties),{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},persistedFaceIds:{serializedName:"persistedFaceIds",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}})}},B={serializedName:"LargeFaceList",type:{name:"Composite",className:"LargeFaceList",modelProperties:r(r({},_.type.modelProperties),{largeFaceListId:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},v={serializedName:"LargePersonGroup",type:{name:"Composite",className:"LargePersonGroup",modelProperties:r(r({},_.type.modelProperties),{largePersonGroupId:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},H={serializedName:"UpdateFaceRequest",type:{name:"Composite",className:"UpdateFaceRequest",modelProperties:{userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},V={serializedName:"TrainingStatus",type:{name:"Composite",className:"TrainingStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["nonstarted","running","succeeded","failed"]}},created:{required:!0,serializedName:"createdDateTime",type:{name:"DateTime"}},lastAction:{serializedName:"lastActionDateTime",type:{name:"DateTime"}},lastSuccessfulTraining:{serializedName:"lastSuccessfulTrainingDateTime",type:{name:"DateTime"}},message:{serializedName:"message",type:{name:"String"}}}}},k={serializedName:"ApplySnapshotRequest",type:{name:"Composite",className:"ApplySnapshotRequest",modelProperties:{objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"CreateNew",type:{name:"Enum",allowedValues:["CreateNew"]}}}}},j={serializedName:"Snapshot",type:{name:"Composite",className:"Snapshot",modelProperties:{id:{required:!0,serializedName:"id",type:{name:"Uuid"}},account:{required:!0,serializedName:"account",type:{name:"String"}},type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastUpdateTime:{required:!0,serializedName:"lastUpdateTime",type:{name:"DateTime"}}}}},$={serializedName:"TakeSnapshotRequest",type:{name:"Composite",className:"TakeSnapshotRequest",modelProperties:{type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},Q={serializedName:"UpdateSnapshotRequest",type:{name:"Composite",className:"UpdateSnapshotRequest",modelProperties:{applyScope:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},W={serializedName:"OperationStatus",type:{name:"Composite",className:"OperationStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["notstarted","running","succeeded","failed"]}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastActionTime:{serializedName:"lastActionTime",type:{name:"DateTime"}},resourceLocation:{serializedName:"resourceLocation",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},J={serializedName:"ImageUrl",type:{name:"Composite",className:"ImageUrl",modelProperties:{url:{required:!0,serializedName:"url",type:{name:"String"}}}}},K={serializedName:"snapshot-take-headers",type:{name:"Composite",className:"SnapshotTakeHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},X={serializedName:"snapshot-apply-headers",type:{name:"Composite",className:"SnapshotApplyHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},Y=Object.freeze({__proto__:null,ErrorModel:i,APIError:p,FaceRectangle:n,Coordinate:d,FaceLandmarks:l,FacialHair:m,HeadPose:u,Emotion:c,HairColor:y,Hair:h,Makeup:N,Occlusion:f,Accessory:g,Blur:P,Exposure:z,Noise:I,FaceAttributes:q,DetectedFace:M,FindSimilarRequest:b,SimilarFace:F,GroupRequest:L,GroupResult:C,IdentifyRequest:R,IdentifyCandidate:S,IdentifyResult:G,VerifyFaceToPersonRequest:O,VerifyFaceToFaceRequest:T,VerifyResult:E,PersistedFace:D,NameAndUserDataContract:A,MetaDataContract:_,FaceList:U,PersonGroup:x,Person:w,LargeFaceList:B,LargePersonGroup:v,UpdateFaceRequest:H,TrainingStatus:V,ApplySnapshotRequest:k,Snapshot:j,TakeSnapshotRequest:$,UpdateSnapshotRequest:Q,OperationStatus:W,ImageUrl:J,SnapshotTakeHeaders:K,SnapshotApplyHeaders:X}),Z=Object.freeze({__proto__:null,Accessory:g,APIError:p,Blur:P,Coordinate:d,DetectedFace:M,Emotion:c,ErrorModel:i,Exposure:z,FaceAttributes:q,FaceLandmarks:l,FaceRectangle:n,FacialHair:m,FindSimilarRequest:b,GroupRequest:L,GroupResult:C,Hair:h,HairColor:y,HeadPose:u,IdentifyCandidate:S,IdentifyRequest:R,IdentifyResult:G,ImageUrl:J,Makeup:N,Noise:I,Occlusion:f,SimilarFace:F,VerifyFaceToFaceRequest:T,VerifyFaceToPersonRequest:O,VerifyResult:E}),ee={parameterPath:["options","applyScope"],mapper:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},collectionFormat:o.QueryCollectionFormat.Csv},ae={parameterPath:["options","detectionModel"],mapper:{serializedName:"detectionModel",defaultValue:"detection_01",type:{name:"String"}}},re={parameterPath:"endpoint",mapper:{required:!0,serializedName:"Endpoint",defaultValue:"",type:{name:"String"}},skipEncoding:!0},te={parameterPath:"faceListId",mapper:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},se={parameterPath:"largeFaceListId",mapper:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},oe={parameterPath:"largePersonGroupId",mapper:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},ie={parameterPath:"persistedFaceId",mapper:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}}},pe={parameterPath:"personGroupId",mapper:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},ne={parameterPath:"personId",mapper:{required:!0,serializedName:"personId",type:{name:"Uuid"}}},de={parameterPath:["options","recognitionModel"],mapper:{serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}},le={parameterPath:["options","returnFaceAttributes"],mapper:{serializedName:"returnFaceAttributes",type:{name:"Sequence",element:{type:{name:"Enum",allowedValues:["age","gender","headPose","smile","facialHair","glasses","emotion","hair","makeup","occlusion","accessories","blur","exposure","noise"]}}}},collectionFormat:o.QueryCollectionFormat.Csv},me={parameterPath:["options","returnFaceId"],mapper:{serializedName:"returnFaceId",defaultValue:!0,type:{name:"Boolean"}}},ue={parameterPath:["options","returnFaceLandmarks"],mapper:{serializedName:"returnFaceLandmarks",defaultValue:!1,type:{name:"Boolean"}}},ce={parameterPath:["options","returnRecognitionModel"],mapper:{serializedName:"returnRecognitionModel",defaultValue:!1,type:{name:"Boolean"}}},ye={parameterPath:"snapshotId",mapper:{required:!0,serializedName:"snapshotId",type:{name:"Uuid"}}},he={parameterPath:["options","start"],mapper:{serializedName:"start",type:{name:"String"}}},Ne={parameterPath:["options","start"],mapper:{serializedName:"start",constraints:{MaxLength:64},type:{name:"String"}}},fe={parameterPath:["options","targetFace"],mapper:{serializedName:"targetFace",type:{name:"Sequence",element:{type:{name:"Number"}}}},collectionFormat:o.QueryCollectionFormat.Csv},ge={parameterPath:["options","top"],mapper:{serializedName:"top",constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},Pe={parameterPath:["options","top"],mapper:{serializedName:"top",defaultValue:1e3,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},ze={parameterPath:["options","userData"],mapper:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}},Ie=(qe.prototype.findSimilar=function(e,a,r){return this.client.sendOperationRequest({faceId:e,options:a},be,r)},qe.prototype.group=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},Fe,r)},qe.prototype.identify=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},Le,r)},qe.prototype.verifyFaceToFace=function(e,a,r,t){return this.client.sendOperationRequest({faceId1:e,faceId2:a,options:r},Ce,t)},qe.prototype.detectWithUrl=function(e,a,r){return this.client.sendOperationRequest({url:e,options:a},Re,r)},qe.prototype.verifyFaceToPerson=function(e,a,r,t){return this.client.sendOperationRequest({faceId:e,personId:a,options:r},Se,t)},qe.prototype.detectWithStream=function(e,a,r){return this.client.sendOperationRequest({image:e,options:a},Ge,r)},qe);function qe(e){this.client=e}var Me=new o.Serializer(Z),be={httpMethod:"POST",path:"findsimilars",urlParameters:[re],requestBody:{parameterPath:{faceId:"faceId",faceListId:["options","faceListId"],largeFaceListId:["options","largeFaceListId"],faceIds:["options","faceIds"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],mode:["options","mode"]},mapper:r(r({},b),{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"SimilarFace"}}}}},default:{bodyMapper:p}},serializer:Me},Fe={httpMethod:"POST",path:"group",urlParameters:[re],requestBody:{parameterPath:{faceIds:"faceIds"},mapper:r(r({},L),{required:!0})},responses:{200:{bodyMapper:C},default:{bodyMapper:p}},serializer:Me},Le={httpMethod:"POST",path:"identify",urlParameters:[re],requestBody:{parameterPath:{faceIds:"faceIds",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],confidenceThreshold:["options","confidenceThreshold"]},mapper:r(r({},R),{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyResult"}}}}},default:{bodyMapper:p}},serializer:Me},Ce={httpMethod:"POST",path:"verify",urlParameters:[re],requestBody:{parameterPath:{faceId1:"faceId1",faceId2:"faceId2"},mapper:r(r({},T),{required:!0})},responses:{200:{bodyMapper:E},default:{bodyMapper:p}},serializer:Me},Re={httpMethod:"POST",path:"detect",urlParameters:[re],queryParameters:[me,ue,le,de,ce,ae],requestBody:{parameterPath:{url:"url"},mapper:r(r({},J),{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:Me},Se={httpMethod:"POST",path:"verify",urlParameters:[re],requestBody:{parameterPath:{faceId:"faceId",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],personId:"personId"},mapper:r(r({},O),{required:!0})},responses:{200:{bodyMapper:E},default:{bodyMapper:p}},serializer:Me},Ge={httpMethod:"POST",path:"detect",urlParameters:[re],queryParameters:[me,ue,le,de,ce,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:Me},Oe=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:U,ImageUrl:J,LargeFaceList:B,LargePersonGroup:v,MetaDataContract:_,NameAndUserDataContract:A,PersistedFace:D,Person:w,PersonGroup:x,UpdateFaceRequest:H}),Te=(Ee.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ae,r)},Ee.prototype.list=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},_e,r)},Ee.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},Ue,t)},Ee.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},xe,t)},Ee.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},we,t)},Ee.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},Be,s)},Ee.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},ve,s)},Ee.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},He,s)},Ee.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,url:r,options:t},Ve,s)},Ee.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,image:r,options:t},ke,s)},Ee);function Ee(e){this.client=e}var De=new o.Serializer(Oe),Ae={httpMethod:"POST",path:"persongroups/{personGroupId}/persons",urlParameters:[re,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{bodyMapper:w},default:{bodyMapper:p}},serializer:De},_e={httpMethod:"GET",path:"persongroups/{personGroupId}/persons",urlParameters:[re,pe],queryParameters:[he,ge],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:De},Ue={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[re,pe,ne],responses:{200:{},default:{bodyMapper:p}},serializer:De},xe={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[re,pe,ne],responses:{200:{bodyMapper:w},default:{bodyMapper:p}},serializer:De},we={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[re,pe,ne],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:De},Be={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,pe,ne,ie],responses:{200:{},default:{bodyMapper:p}},serializer:De},ve={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,pe,ne,ie],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:De},He={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,pe,ne,ie],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r(r({},H),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:De},Ve={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,pe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r(r({},J),{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:De},ke={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,pe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:De},je=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:U,LargeFaceList:B,LargePersonGroup:v,MetaDataContract:_,NameAndUserDataContract:A,PersistedFace:D,Person:w,PersonGroup:x,TrainingStatus:V}),$e=(Qe.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Je,r)},Qe.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ke,r)},Qe.prototype.get=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Xe,r)},Qe.prototype.update=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ye,r)},Qe.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ze,r)},Qe.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},ea,a)},Qe.prototype.train=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},aa,r)},Qe);function Qe(e){this.client=e}var We=new o.Serializer(je),Je={httpMethod:"PUT",path:"persongroups/{personGroupId}",urlParameters:[re,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:We},Ke={httpMethod:"DELETE",path:"persongroups/{personGroupId}",urlParameters:[re,pe],responses:{200:{},default:{bodyMapper:p}},serializer:We},Xe={httpMethod:"GET",path:"persongroups/{personGroupId}",urlParameters:[re,pe],queryParameters:[ce],responses:{200:{bodyMapper:x},default:{bodyMapper:p}},serializer:We},Ye={httpMethod:"PATCH",path:"persongroups/{personGroupId}",urlParameters:[re,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:We},Ze={httpMethod:"GET",path:"persongroups/{personGroupId}/training",urlParameters:[re,pe],responses:{200:{bodyMapper:V},default:{bodyMapper:p}},serializer:We},ea={httpMethod:"GET",path:"persongroups",urlParameters:[re],queryParameters:[Ne,Pe,ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersonGroup"}}}}},default:{bodyMapper:p}},serializer:We},aa={httpMethod:"POST",path:"persongroups/{personGroupId}/train",urlParameters:[re,pe],responses:{202:{},default:{bodyMapper:p}},serializer:We},ra=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:U,ImageUrl:J,LargeFaceList:B,LargePersonGroup:v,MetaDataContract:_,NameAndUserDataContract:A,PersistedFace:D,Person:w,PersonGroup:x}),ta=(sa.prototype.create=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},ia,r)},sa.prototype.get=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},pa,r)},sa.prototype.update=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},na,r)},sa.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},da,r)},sa.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},la,a)},sa.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,persistedFaceId:a,options:r},ma,t)},sa.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,url:a,options:r},ua,t)},sa.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,image:a,options:r},ca,t)},sa);function sa(e){this.client=e}var oa=new o.Serializer(ra),ia={httpMethod:"PUT",path:"facelists/{faceListId}",urlParameters:[re,te],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:oa},pa={httpMethod:"GET",path:"facelists/{faceListId}",urlParameters:[re,te],queryParameters:[ce],responses:{200:{bodyMapper:U},default:{bodyMapper:p}},serializer:oa},na={httpMethod:"PATCH",path:"facelists/{faceListId}",urlParameters:[re,te],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:oa},da={httpMethod:"DELETE",path:"facelists/{faceListId}",urlParameters:[re,te],responses:{200:{},default:{bodyMapper:p}},serializer:oa},la={httpMethod:"GET",path:"facelists",urlParameters:[re],queryParameters:[ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"FaceList"}}}}},default:{bodyMapper:p}},serializer:oa},ma={httpMethod:"DELETE",path:"facelists/{faceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,te,ie],responses:{200:{},default:{bodyMapper:p}},serializer:oa},ua={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[re,te],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r(r({},J),{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:oa},ca={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[re,te],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:oa},ya=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:U,ImageUrl:J,LargeFaceList:B,LargePersonGroup:v,MetaDataContract:_,NameAndUserDataContract:A,PersistedFace:D,Person:w,PersonGroup:x,UpdateFaceRequest:H}),ha=(Na.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},ga,r)},Na.prototype.list=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Pa,r)},Na.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},za,t)},Na.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},Ia,t)},Na.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},qa,t)},Na.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Ma,s)},Na.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},ba,s)},Na.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Fa,s)},Na.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,url:r,options:t},La,s)},Na.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,image:r,options:t},Ca,s)},Na);function Na(e){this.client=e}var fa=new o.Serializer(ya),ga={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[re,oe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{bodyMapper:w},default:{bodyMapper:p}},serializer:fa},Pa={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[re,oe],queryParameters:[he,ge],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:fa},za={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[re,oe,ne],responses:{200:{},default:{bodyMapper:p}},serializer:fa},Ia={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[re,oe,ne],responses:{200:{bodyMapper:w},default:{bodyMapper:p}},serializer:fa},qa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[re,oe,ne],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:fa},Ma={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,oe,ne,ie],responses:{200:{},default:{bodyMapper:p}},serializer:fa},ba={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,oe,ne,ie],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:fa},Fa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,oe,ne,ie],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r(r({},H),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:fa},La={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,oe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r(r({},J),{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:fa},Ca={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,oe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:fa},Ra=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:U,LargeFaceList:B,LargePersonGroup:v,MetaDataContract:_,NameAndUserDataContract:A,PersistedFace:D,Person:w,PersonGroup:x,TrainingStatus:V}),Sa=(Ga.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Ta,r)},Ga.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Ea,r)},Ga.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Da,r)},Ga.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Aa,r)},Ga.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},_a,r)},Ga.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Ua,a)},Ga.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},xa,r)},Ga);function Ga(e){this.client=e}var Oa=new o.Serializer(Ra),Ta={httpMethod:"PUT",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Oa},Ea={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],responses:{200:{},default:{bodyMapper:p}},serializer:Oa},Da={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],queryParameters:[ce],responses:{200:{bodyMapper:v},default:{bodyMapper:p}},serializer:Oa},Aa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Oa},_a={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/training",urlParameters:[re,oe],responses:{200:{bodyMapper:V},default:{bodyMapper:p}},serializer:Oa},Ua={httpMethod:"GET",path:"largepersongroups",urlParameters:[re],queryParameters:[Ne,Pe,ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargePersonGroup"}}}}},default:{bodyMapper:p}},serializer:Oa},xa={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/train",urlParameters:[re,oe],responses:{202:{},default:{bodyMapper:p}},serializer:Oa},wa=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:U,ImageUrl:J,LargeFaceList:B,LargePersonGroup:v,MetaDataContract:_,NameAndUserDataContract:A,PersistedFace:D,Person:w,PersonGroup:x,TrainingStatus:V,UpdateFaceRequest:H}),Ba=(va.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Va,r)},va.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ka,r)},va.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ja,r)},va.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},$a,r)},va.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Qa,r)},va.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Wa,a)},va.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Ja,r)},va.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Ka,t)},va.prototype.getFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Xa,t)},va.prototype.updateFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Ya,t)},va.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,url:a,options:r},Za,t)},va.prototype.listFaces=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},er,r)},va.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,image:a,options:r},ar,t)},va);function va(e){this.client=e}var Ha=new o.Serializer(wa),Va={httpMethod:"PUT",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Ha},ka={httpMethod:"GET",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],queryParameters:[ce],responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:Ha},ja={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},A),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Ha},$a={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],responses:{200:{},default:{bodyMapper:p}},serializer:Ha},Qa={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/training",urlParameters:[re,se],responses:{200:{bodyMapper:V},default:{bodyMapper:p}},serializer:Ha},Wa={httpMethod:"GET",path:"largefacelists",urlParameters:[re],queryParameters:[ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargeFaceList"}}}}},default:{bodyMapper:p}},serializer:Ha},Ja={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/train",urlParameters:[re,se],responses:{202:{},default:{bodyMapper:p}},serializer:Ha},Ka={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,se,ie],responses:{200:{},default:{bodyMapper:p}},serializer:Ha},Xa={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,se,ie],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Ha},Ya={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,se,ie],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r(r({},H),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Ha},Za={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[re,se],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r(r({},J),{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Ha},er={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[re,se],queryParameters:[he,ge],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}},default:{bodyMapper:p}},serializer:Ha},ar={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[re,se],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Ha},rr=Object.freeze({__proto__:null,APIError:p,ApplySnapshotRequest:k,ErrorModel:i,OperationStatus:W,Snapshot:j,SnapshotApplyHeaders:X,SnapshotTakeHeaders:K,TakeSnapshotRequest:$,UpdateSnapshotRequest:Q}),tr=(sr.prototype.take=function(e,a,r,t,s){return this.client.sendOperationRequest({type:e,objectId:a,applyScope:r,options:t},pr,s)},sr.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},nr,a)},sr.prototype.get=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},dr,r)},sr.prototype.update=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},lr,r)},sr.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},mr,r)},sr.prototype.apply=function(e,a,r,t){return this.client.sendOperationRequest({snapshotId:e,objectId:a,options:r},ur,t)},sr.prototype.getOperationStatus=function(e,a,r){return this.client.sendOperationRequest({operationId:e,options:a},cr,r)},sr);function sr(e){this.client=e}var or,ir=new o.Serializer(rr),pr={httpMethod:"POST",path:"snapshots",urlParameters:[re],requestBody:{parameterPath:{type:"type",objectId:"objectId",applyScope:"applyScope",userData:["options","userData"]},mapper:r(r({},$),{required:!0})},responses:{202:{headersMapper:K},default:{bodyMapper:p}},serializer:ir},nr={httpMethod:"GET",path:"snapshots",urlParameters:[re],queryParameters:[{parameterPath:["options","type"],mapper:{serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}}},ee],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Snapshot"}}}}},default:{bodyMapper:p}},serializer:ir},dr={httpMethod:"GET",path:"snapshots/{snapshotId}",urlParameters:[re,ye],responses:{200:{bodyMapper:j},default:{bodyMapper:p}},serializer:ir},lr={httpMethod:"PATCH",path:"snapshots/{snapshotId}",urlParameters:[re,ye],requestBody:{parameterPath:{applyScope:["options","applyScope"],userData:["options","userData"]},mapper:r(r({},Q),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ir},mr={httpMethod:"DELETE",path:"snapshots/{snapshotId}",urlParameters:[re,ye],responses:{200:{},default:{bodyMapper:p}},serializer:ir},ur={httpMethod:"POST",path:"snapshots/{snapshotId}/apply",urlParameters:[re,ye],requestBody:{parameterPath:{objectId:"objectId",mode:["options","mode"]},mapper:r(r({},k),{required:!0})},responses:{202:{headersMapper:X},default:{bodyMapper:p}},serializer:ir},cr={httpMethod:"GET",path:"operations/{operationId}",urlParameters:[re,{parameterPath:"operationId",mapper:{required:!0,serializedName:"operationId",type:{name:"Uuid"}}}],responses:{200:{bodyMapper:W},default:{bodyMapper:p}},serializer:ir},yr=(a(hr,or=o.ServiceClient),hr);function hr(e,a,r){var t,s=this;if(null==a)throw new Error("'endpoint' cannot be null.");if(null==e)throw new Error("'credentials' cannot be null.");return(r=r||{}).userAgent||(t=o.getDefaultUserAgentValue(),r.userAgent="@azure/cognitiveservices-face/4.1.0 "+t),(s=or.call(this,e,r)||this).baseUri="{Endpoint}/face/v1.0",s.requestContentType="application/json; charset=utf-8",s.endpoint=a,s.credentials=e,s}var Nr,fr=(a(gr,Nr=yr),gr);function gr(e,a,r){var t=Nr.call(this,e,a,r)||this;return t.face=new Ie(t),t.personGroupPerson=new Te(t),t.personGroup=new $e(t),t.faceList=new ta(t),t.largePersonGroupPerson=new ha(t),t.largePersonGroup=new Sa(t),t.largeFaceList=new Ba(t),t.snapshot=new tr(t),t}e.Face=Ie,e.FaceClient=fr,e.FaceClientContext=yr,e.FaceListOperations=ta,e.FaceMappers=Y,e.FaceModels=s,e.LargeFaceListOperations=Ba,e.LargePersonGroupOperations=Sa,e.LargePersonGroupPerson=ha,e.PersonGroupOperations=$e,e.PersonGroupPerson=Te,e.SnapshotOperations=tr,Object.defineProperty(e,"__esModule",{value:!0})});
!function(e,a){"object"==typeof exports&&"undefined"!=typeof module?a(exports,require("@azure/ms-rest-js")):"function"==typeof define&&define.amd?define(["exports","@azure/ms-rest-js"],a):a(((e=e||self).Azure=e.Azure||{},e.Azure.CognitiveservicesFace={}),e.msRest)}(this,function(e,o){"use strict";var t=function(e,a){return(t=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var r in a)a.hasOwnProperty(r)&&(e[r]=a[r])})(e,a)};function a(e,a){function r(){this.constructor=e}t(e,a),e.prototype=null===a?Object.create(a):(r.prototype=a.prototype,new r)}var r=function(){return(r=Object.assign||function(e){for(var a,r=1,t=arguments.length;r<t;r++)for(var s in a=arguments[r])Object.prototype.hasOwnProperty.call(a,s)&&(e[s]=a[s]);return e}).apply(this,arguments)},s=Object.freeze({__proto__:null}),i={serializedName:"Error",type:{name:"Composite",className:"ErrorModel",modelProperties:{code:{serializedName:"code",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},p={serializedName:"APIError",type:{name:"Composite",className:"APIError",modelProperties:{error:{serializedName:"error",type:{name:"Composite",className:"ErrorModel"}}}}},n={serializedName:"FaceRectangle",type:{name:"Composite",className:"FaceRectangle",modelProperties:{width:{required:!0,serializedName:"width",type:{name:"Number"}},height:{required:!0,serializedName:"height",type:{name:"Number"}},left:{required:!0,serializedName:"left",type:{name:"Number"}},top:{required:!0,serializedName:"top",type:{name:"Number"}}}}},d={serializedName:"Coordinate",type:{name:"Composite",className:"Coordinate",modelProperties:{x:{required:!0,serializedName:"x",type:{name:"Number"}},y:{required:!0,serializedName:"y",type:{name:"Number"}}}}},l={serializedName:"FaceLandmarks",type:{name:"Composite",className:"FaceLandmarks",modelProperties:{pupilLeft:{serializedName:"pupilLeft",type:{name:"Composite",className:"Coordinate"}},pupilRight:{serializedName:"pupilRight",type:{name:"Composite",className:"Coordinate"}},noseTip:{serializedName:"noseTip",type:{name:"Composite",className:"Coordinate"}},mouthLeft:{serializedName:"mouthLeft",type:{name:"Composite",className:"Coordinate"}},mouthRight:{serializedName:"mouthRight",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftOuter:{serializedName:"eyebrowLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftInner:{serializedName:"eyebrowLeftInner",type:{name:"Composite",className:"Coordinate"}},eyeLeftOuter:{serializedName:"eyeLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyeLeftTop:{serializedName:"eyeLeftTop",type:{name:"Composite",className:"Coordinate"}},eyeLeftBottom:{serializedName:"eyeLeftBottom",type:{name:"Composite",className:"Coordinate"}},eyeLeftInner:{serializedName:"eyeLeftInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightInner:{serializedName:"eyebrowRightInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightOuter:{serializedName:"eyebrowRightOuter",type:{name:"Composite",className:"Coordinate"}},eyeRightInner:{serializedName:"eyeRightInner",type:{name:"Composite",className:"Coordinate"}},eyeRightTop:{serializedName:"eyeRightTop",type:{name:"Composite",className:"Coordinate"}},eyeRightBottom:{serializedName:"eyeRightBottom",type:{name:"Composite",className:"Coordinate"}},eyeRightOuter:{serializedName:"eyeRightOuter",type:{name:"Composite",className:"Coordinate"}},noseRootLeft:{serializedName:"noseRootLeft",type:{name:"Composite",className:"Coordinate"}},noseRootRight:{serializedName:"noseRootRight",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarTop:{serializedName:"noseLeftAlarTop",type:{name:"Composite",className:"Coordinate"}},noseRightAlarTop:{serializedName:"noseRightAlarTop",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarOutTip:{serializedName:"noseLeftAlarOutTip",type:{name:"Composite",className:"Coordinate"}},noseRightAlarOutTip:{serializedName:"noseRightAlarOutTip",type:{name:"Composite",className:"Coordinate"}},upperLipTop:{serializedName:"upperLipTop",type:{name:"Composite",className:"Coordinate"}},upperLipBottom:{serializedName:"upperLipBottom",type:{name:"Composite",className:"Coordinate"}},underLipTop:{serializedName:"underLipTop",type:{name:"Composite",className:"Coordinate"}},underLipBottom:{serializedName:"underLipBottom",type:{name:"Composite",className:"Coordinate"}}}}},m={serializedName:"FacialHair",type:{name:"Composite",className:"FacialHair",modelProperties:{moustache:{nullable:!1,serializedName:"moustache",type:{name:"Number"}},beard:{nullable:!1,serializedName:"beard",type:{name:"Number"}},sideburns:{nullable:!1,serializedName:"sideburns",type:{name:"Number"}}}}},u={serializedName:"HeadPose",type:{name:"Composite",className:"HeadPose",modelProperties:{roll:{nullable:!1,serializedName:"roll",type:{name:"Number"}},yaw:{nullable:!1,serializedName:"yaw",type:{name:"Number"}},pitch:{nullable:!1,serializedName:"pitch",type:{name:"Number"}}}}},c={serializedName:"Emotion",type:{name:"Composite",className:"Emotion",modelProperties:{anger:{nullable:!1,serializedName:"anger",type:{name:"Number"}},contempt:{nullable:!1,serializedName:"contempt",type:{name:"Number"}},disgust:{nullable:!1,serializedName:"disgust",type:{name:"Number"}},fear:{nullable:!1,serializedName:"fear",type:{name:"Number"}},happiness:{nullable:!1,serializedName:"happiness",type:{name:"Number"}},neutral:{nullable:!1,serializedName:"neutral",type:{name:"Number"}},sadness:{nullable:!1,serializedName:"sadness",type:{name:"Number"}},surprise:{nullable:!1,serializedName:"surprise",type:{name:"Number"}}}}},y={serializedName:"HairColor",type:{name:"Composite",className:"HairColor",modelProperties:{color:{nullable:!1,serializedName:"color",type:{name:"Enum",allowedValues:["unknown","white","gray","blond","brown","red","black","other"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},h={serializedName:"Hair",type:{name:"Composite",className:"Hair",modelProperties:{bald:{nullable:!1,serializedName:"bald",type:{name:"Number"}},invisible:{nullable:!1,serializedName:"invisible",type:{name:"Boolean"}},hairColor:{serializedName:"hairColor",type:{name:"Sequence",element:{type:{name:"Composite",className:"HairColor"}}}}}}},N={serializedName:"Makeup",type:{name:"Composite",className:"Makeup",modelProperties:{eyeMakeup:{nullable:!1,serializedName:"eyeMakeup",type:{name:"Boolean"}},lipMakeup:{nullable:!1,serializedName:"lipMakeup",type:{name:"Boolean"}}}}},f={serializedName:"Occlusion",type:{name:"Composite",className:"Occlusion",modelProperties:{foreheadOccluded:{nullable:!1,serializedName:"foreheadOccluded",type:{name:"Boolean"}},eyeOccluded:{nullable:!1,serializedName:"eyeOccluded",type:{name:"Boolean"}},mouthOccluded:{nullable:!1,serializedName:"mouthOccluded",type:{name:"Boolean"}}}}},P={serializedName:"Accessory",type:{name:"Composite",className:"Accessory",modelProperties:{type:{nullable:!1,serializedName:"type",type:{name:"Enum",allowedValues:["headWear","glasses","mask"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},g={serializedName:"Blur",type:{name:"Composite",className:"Blur",modelProperties:{blurLevel:{nullable:!1,serializedName:"blurLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},z={serializedName:"Exposure",type:{name:"Composite",className:"Exposure",modelProperties:{exposureLevel:{nullable:!1,serializedName:"exposureLevel",type:{name:"Enum",allowedValues:["UnderExposure","GoodExposure","OverExposure"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},I={serializedName:"Noise",type:{name:"Composite",className:"Noise",modelProperties:{noiseLevel:{nullable:!1,serializedName:"noiseLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},M={serializedName:"Mask",type:{name:"Composite",className:"Mask",modelProperties:{type:{nullable:!1,serializedName:"type",type:{name:"Enum",allowedValues:["noMask","faceMask","otherMaskOrOcclusion","uncertain"]}},noseAndMouthCovered:{nullable:!1,serializedName:"noseAndMouthCovered",type:{name:"Boolean"}}}}},q={serializedName:"FaceAttributes",type:{name:"Composite",className:"FaceAttributes",modelProperties:{age:{serializedName:"age",type:{name:"Number"}},gender:{serializedName:"gender",type:{name:"Enum",allowedValues:["male","female"]}},smile:{serializedName:"smile",type:{name:"Number"}},facialHair:{serializedName:"facialHair",type:{name:"Composite",className:"FacialHair"}},glasses:{serializedName:"glasses",type:{name:"Enum",allowedValues:["noGlasses","readingGlasses","sunglasses","swimmingGoggles"]}},headPose:{serializedName:"headPose",type:{name:"Composite",className:"HeadPose"}},emotion:{serializedName:"emotion",type:{name:"Composite",className:"Emotion"}},hair:{serializedName:"hair",type:{name:"Composite",className:"Hair"}},makeup:{serializedName:"makeup",type:{name:"Composite",className:"Makeup"}},occlusion:{serializedName:"occlusion",type:{name:"Composite",className:"Occlusion"}},accessories:{serializedName:"accessories",type:{name:"Sequence",element:{type:{name:"Composite",className:"Accessory"}}}},blur:{serializedName:"blur",type:{name:"Composite",className:"Blur"}},exposure:{serializedName:"exposure",type:{name:"Composite",className:"Exposure"}},noise:{serializedName:"noise",type:{name:"Composite",className:"Noise"}},mask:{serializedName:"mask",type:{name:"Composite",className:"Mask"}}}}},b={serializedName:"DetectedFace",type:{name:"Composite",className:"DetectedFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}},faceRectangle:{required:!0,serializedName:"faceRectangle",type:{name:"Composite",className:"FaceRectangle"}},faceLandmarks:{serializedName:"faceLandmarks",type:{name:"Composite",className:"FaceLandmarks"}},faceAttributes:{serializedName:"faceAttributes",type:{name:"Composite",className:"FaceAttributes"}}}}},F={serializedName:"FindSimilarRequest",type:{name:"Composite",className:"FindSimilarRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},faceListId:{serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largeFaceListId:{serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},faceIds:{serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:20,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"matchPerson",type:{name:"Enum",allowedValues:["matchPerson","matchFace"]}}}}},L={serializedName:"SimilarFace",type:{name:"Composite",className:"SimilarFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},persistedFaceId:{serializedName:"persistedFaceId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},C={serializedName:"GroupRequest",type:{name:"Composite",className:"GroupRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},R={serializedName:"GroupResult",type:{name:"Composite",className:"GroupResult",modelProperties:{groups:{required:!0,serializedName:"groups",type:{name:"Sequence",element:{type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}},messyGroup:{serializedName:"messyGroup",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},S={serializedName:"IdentifyRequest",type:{name:"Composite",className:"IdentifyRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:10},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:1,constraints:{InclusiveMaximum:5,InclusiveMinimum:1},type:{name:"Number"}},confidenceThreshold:{serializedName:"confidenceThreshold",type:{name:"Number"}}}}},G={serializedName:"IdentifyCandidate",type:{name:"Composite",className:"IdentifyCandidate",modelProperties:{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},O={serializedName:"IdentifyResult",type:{name:"Composite",className:"IdentifyResult",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},candidates:{required:!0,serializedName:"candidates",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyCandidate"}}}}}}},T={serializedName:"VerifyFaceToPersonRequest",type:{name:"Composite",className:"VerifyFaceToPersonRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}}}}},E={serializedName:"VerifyFaceToFaceRequest",type:{name:"Composite",className:"VerifyFaceToFaceRequest",modelProperties:{faceId1:{required:!0,serializedName:"faceId1",type:{name:"Uuid"}},faceId2:{required:!0,serializedName:"faceId2",type:{name:"Uuid"}}}}},D={serializedName:"VerifyResult",type:{name:"Composite",className:"VerifyResult",modelProperties:{isIdentical:{required:!0,serializedName:"isIdentical",type:{name:"Boolean"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},A={serializedName:"PersistedFace",type:{name:"Composite",className:"PersistedFace",modelProperties:{persistedFaceId:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}},userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},_={serializedName:"NameAndUserDataContract",type:{name:"Composite",className:"NameAndUserDataContract",modelProperties:{name:{serializedName:"name",constraints:{MaxLength:128},type:{name:"String"}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},U={serializedName:"MetaDataContract",type:{name:"Composite",className:"MetaDataContract",modelProperties:r(r({},_.type.modelProperties),{recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}})}},x={serializedName:"FaceList",type:{name:"Composite",className:"FaceList",modelProperties:r(r({},U.type.modelProperties),{faceListId:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},persistedFaces:{serializedName:"persistedFaces",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}})}},w={serializedName:"PersonGroup",type:{name:"Composite",className:"PersonGroup",modelProperties:r(r({},U.type.modelProperties),{personGroupId:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},v={serializedName:"Person",type:{name:"Composite",className:"Person",modelProperties:r(r({},_.type.modelProperties),{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},persistedFaceIds:{serializedName:"persistedFaceIds",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}})}},B={serializedName:"LargeFaceList",type:{name:"Composite",className:"LargeFaceList",modelProperties:r(r({},U.type.modelProperties),{largeFaceListId:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},k={serializedName:"LargePersonGroup",type:{name:"Composite",className:"LargePersonGroup",modelProperties:r(r({},U.type.modelProperties),{largePersonGroupId:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},V={serializedName:"UpdateFaceRequest",type:{name:"Composite",className:"UpdateFaceRequest",modelProperties:{userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},H={serializedName:"TrainingStatus",type:{name:"Composite",className:"TrainingStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["nonstarted","running","succeeded","failed"]}},created:{required:!0,serializedName:"createdDateTime",type:{name:"DateTime"}},lastAction:{serializedName:"lastActionDateTime",type:{name:"DateTime"}},lastSuccessfulTraining:{serializedName:"lastSuccessfulTrainingDateTime",type:{name:"DateTime"}},message:{serializedName:"message",type:{name:"String"}}}}},j={serializedName:"ApplySnapshotRequest",type:{name:"Composite",className:"ApplySnapshotRequest",modelProperties:{objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"CreateNew",type:{name:"Enum",allowedValues:["CreateNew"]}}}}},$={serializedName:"Snapshot",type:{name:"Composite",className:"Snapshot",modelProperties:{id:{required:!0,serializedName:"id",type:{name:"Uuid"}},account:{required:!0,serializedName:"account",type:{name:"String"}},type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastUpdateTime:{required:!0,serializedName:"lastUpdateTime",type:{name:"DateTime"}}}}},Q={serializedName:"TakeSnapshotRequest",type:{name:"Composite",className:"TakeSnapshotRequest",modelProperties:{type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},W={serializedName:"UpdateSnapshotRequest",type:{name:"Composite",className:"UpdateSnapshotRequest",modelProperties:{applyScope:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},J={serializedName:"OperationStatus",type:{name:"Composite",className:"OperationStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["notstarted","running","succeeded","failed"]}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastActionTime:{serializedName:"lastActionTime",type:{name:"DateTime"}},resourceLocation:{serializedName:"resourceLocation",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},K={serializedName:"ImageUrl",type:{name:"Composite",className:"ImageUrl",modelProperties:{url:{required:!0,serializedName:"url",type:{name:"String"}}}}},X={serializedName:"snapshot-take-headers",type:{name:"Composite",className:"SnapshotTakeHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},Y={serializedName:"snapshot-apply-headers",type:{name:"Composite",className:"SnapshotApplyHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},Z=Object.freeze({__proto__:null,ErrorModel:i,APIError:p,FaceRectangle:n,Coordinate:d,FaceLandmarks:l,FacialHair:m,HeadPose:u,Emotion:c,HairColor:y,Hair:h,Makeup:N,Occlusion:f,Accessory:P,Blur:g,Exposure:z,Noise:I,Mask:M,FaceAttributes:q,DetectedFace:b,FindSimilarRequest:F,SimilarFace:L,GroupRequest:C,GroupResult:R,IdentifyRequest:S,IdentifyCandidate:G,IdentifyResult:O,VerifyFaceToPersonRequest:T,VerifyFaceToFaceRequest:E,VerifyResult:D,PersistedFace:A,NameAndUserDataContract:_,MetaDataContract:U,FaceList:x,PersonGroup:w,Person:v,LargeFaceList:B,LargePersonGroup:k,UpdateFaceRequest:V,TrainingStatus:H,ApplySnapshotRequest:j,Snapshot:$,TakeSnapshotRequest:Q,UpdateSnapshotRequest:W,OperationStatus:J,ImageUrl:K,SnapshotTakeHeaders:X,SnapshotApplyHeaders:Y}),ee=Object.freeze({__proto__:null,Accessory:P,APIError:p,Blur:g,Coordinate:d,DetectedFace:b,Emotion:c,ErrorModel:i,Exposure:z,FaceAttributes:q,FaceLandmarks:l,FaceRectangle:n,FacialHair:m,FindSimilarRequest:F,GroupRequest:C,GroupResult:R,Hair:h,HairColor:y,HeadPose:u,IdentifyCandidate:G,IdentifyRequest:S,IdentifyResult:O,ImageUrl:K,Makeup:N,Mask:M,Noise:I,Occlusion:f,SimilarFace:L,VerifyFaceToFaceRequest:E,VerifyFaceToPersonRequest:T,VerifyResult:D}),ae={parameterPath:["options","applyScope"],mapper:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},collectionFormat:o.QueryCollectionFormat.Csv},re={parameterPath:["options","detectionModel"],mapper:{serializedName:"detectionModel",defaultValue:"detection_01",type:{name:"String"}}},P={parameterPath:"endpoint",mapper:{required:!0,serializedName:"Endpoint",defaultValue:"",type:{name:"String"}},skipEncoding:!0},g={parameterPath:["options","faceIdTimeToLive"],mapper:{serializedName:"faceIdTimeToLive",defaultValue:86400,constraints:{InclusiveMaximum:86400,InclusiveMinimum:60},type:{name:"Number"}}},d={parameterPath:"faceListId",mapper:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},b={parameterPath:"largeFaceListId",mapper:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},c={parameterPath:"largePersonGroupId",mapper:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},z={parameterPath:"persistedFaceId",mapper:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}}},q={parameterPath:"personGroupId",mapper:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},l={parameterPath:"personId",mapper:{required:!0,serializedName:"personId",type:{name:"Uuid"}}},n={parameterPath:["options","recognitionModel"],mapper:{serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}},m={parameterPath:["options","returnFaceAttributes"],mapper:{serializedName:"returnFaceAttributes",type:{name:"Sequence",element:{type:{name:"Enum",allowedValues:["age","gender","headPose","smile","facialHair","glasses","emotion","hair","makeup","occlusion","accessories","blur","exposure","noise","mask"]}}}},collectionFormat:o.QueryCollectionFormat.Csv},h={parameterPath:["options","returnFaceId"],mapper:{serializedName:"returnFaceId",defaultValue:!0,type:{name:"Boolean"}}},y={parameterPath:["options","returnFaceLandmarks"],mapper:{serializedName:"returnFaceLandmarks",defaultValue:!1,type:{name:"Boolean"}}},u={parameterPath:["options","returnRecognitionModel"],mapper:{serializedName:"returnRecognitionModel",defaultValue:!1,type:{name:"Boolean"}}},G={parameterPath:"snapshotId",mapper:{required:!0,serializedName:"snapshotId",type:{name:"Uuid"}}},O={parameterPath:["options","start"],mapper:{serializedName:"start",type:{name:"String"}}},N={parameterPath:["options","start"],mapper:{serializedName:"start",constraints:{MaxLength:64},type:{name:"String"}}},M={parameterPath:["options","targetFace"],mapper:{serializedName:"targetFace",type:{name:"Sequence",element:{type:{name:"Number"}}}},collectionFormat:o.QueryCollectionFormat.Csv},I={parameterPath:["options","top"],mapper:{serializedName:"top",constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},f={parameterPath:["options","top"],mapper:{serializedName:"top",defaultValue:1e3,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},L={parameterPath:["options","userData"],mapper:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}},te=(se.prototype.findSimilar=function(e,a,r){return this.client.sendOperationRequest({faceId:e,options:a},oe,r)},se.prototype.group=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},ie,r)},se.prototype.identify=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},pe,r)},se.prototype.verifyFaceToFace=function(e,a,r,t){return this.client.sendOperationRequest({faceId1:e,faceId2:a,options:r},ne,t)},se.prototype.detectWithUrl=function(e,a,r){return this.client.sendOperationRequest({url:e,options:a},de,r)},se.prototype.verifyFaceToPerson=function(e,a,r,t){return this.client.sendOperationRequest({faceId:e,personId:a,options:r},le,t)},se.prototype.detectWithStream=function(e,a,r){return this.client.sendOperationRequest({image:e,options:a},me,r)},se);function se(e){this.client=e}var ee=new o.Serializer(ee),oe={httpMethod:"POST",path:"findsimilars",urlParameters:[P],requestBody:{parameterPath:{faceId:"faceId",faceListId:["options","faceListId"],largeFaceListId:["options","largeFaceListId"],faceIds:["options","faceIds"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],mode:["options","mode"]},mapper:r(r({},F),{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"SimilarFace"}}}}},default:{bodyMapper:p}},serializer:ee},ie={httpMethod:"POST",path:"group",urlParameters:[P],requestBody:{parameterPath:{faceIds:"faceIds"},mapper:r(r({},C),{required:!0})},responses:{200:{bodyMapper:R},default:{bodyMapper:p}},serializer:ee},pe={httpMethod:"POST",path:"identify",urlParameters:[P],requestBody:{parameterPath:{faceIds:"faceIds",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],confidenceThreshold:["options","confidenceThreshold"]},mapper:r(r({},S),{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyResult"}}}}},default:{bodyMapper:p}},serializer:ee},ne={httpMethod:"POST",path:"verify",urlParameters:[P],requestBody:{parameterPath:{faceId1:"faceId1",faceId2:"faceId2"},mapper:r(r({},E),{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:ee},de={httpMethod:"POST",path:"detect",urlParameters:[P],queryParameters:[h,y,m,n,u,re,g],requestBody:{parameterPath:{url:"url"},mapper:r(r({},K),{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:ee},le={httpMethod:"POST",path:"verify",urlParameters:[P],requestBody:{parameterPath:{faceId:"faceId",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],personId:"personId"},mapper:r(r({},T),{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:ee},me={httpMethod:"POST",path:"detect",urlParameters:[P],queryParameters:[h,y,m,n,u,re,g],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:ee},ee=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:x,ImageUrl:K,LargeFaceList:B,LargePersonGroup:k,MetaDataContract:U,NameAndUserDataContract:_,PersistedFace:A,Person:v,PersonGroup:w,UpdateFaceRequest:V}),ue=(ce.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},ye,r)},ce.prototype.list=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},he,r)},ce.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},Ne,t)},ce.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},fe,t)},ce.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},Pe,t)},ce.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},ge,s)},ce.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},ze,s)},ce.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},Ie,s)},ce.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,url:r,options:t},Me,s)},ce.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,image:r,options:t},qe,s)},ce);function ce(e){this.client=e}var ee=new o.Serializer(ee),ye={httpMethod:"POST",path:"persongroups/{personGroupId}/persons",urlParameters:[P,q],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{bodyMapper:v},default:{bodyMapper:p}},serializer:ee},he={httpMethod:"GET",path:"persongroups/{personGroupId}/persons",urlParameters:[P,q],queryParameters:[O,I],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:ee},Ne={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[P,q,l],responses:{200:{},default:{bodyMapper:p}},serializer:ee},fe={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[P,q,l],responses:{200:{bodyMapper:v},default:{bodyMapper:p}},serializer:ee},Pe={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[P,q,l],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},ge={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[P,q,l,z],responses:{200:{},default:{bodyMapper:p}},serializer:ee},ze={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[P,q,l,z],responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},Ie={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[P,q,l,z],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r(r({},V),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},Me={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[P,q,l],queryParameters:[L,M,re],requestBody:{parameterPath:{url:"url"},mapper:r(r({},K),{required:!0})},responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},qe={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[P,q,l],queryParameters:[L,M,re],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},ee=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:x,LargeFaceList:B,LargePersonGroup:k,MetaDataContract:U,NameAndUserDataContract:_,PersistedFace:A,Person:v,PersonGroup:w,TrainingStatus:H}),be=(Fe.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Le,r)},Fe.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ce,r)},Fe.prototype.get=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Re,r)},Fe.prototype.update=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Se,r)},Fe.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ge,r)},Fe.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Oe,a)},Fe.prototype.train=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Te,r)},Fe);function Fe(e){this.client=e}var ee=new o.Serializer(ee),Le={httpMethod:"PUT",path:"persongroups/{personGroupId}",urlParameters:[P,q],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},U),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},Ce={httpMethod:"DELETE",path:"persongroups/{personGroupId}",urlParameters:[P,q],responses:{200:{},default:{bodyMapper:p}},serializer:ee},Re={httpMethod:"GET",path:"persongroups/{personGroupId}",urlParameters:[P,q],queryParameters:[u],responses:{200:{bodyMapper:w},default:{bodyMapper:p}},serializer:ee},Se={httpMethod:"PATCH",path:"persongroups/{personGroupId}",urlParameters:[P,q],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},Ge={httpMethod:"GET",path:"persongroups/{personGroupId}/training",urlParameters:[P,q],responses:{200:{bodyMapper:H},default:{bodyMapper:p}},serializer:ee},Oe={httpMethod:"GET",path:"persongroups",urlParameters:[P],queryParameters:[N,f,u],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersonGroup"}}}}},default:{bodyMapper:p}},serializer:ee},Te={httpMethod:"POST",path:"persongroups/{personGroupId}/train",urlParameters:[P,q],responses:{202:{},default:{bodyMapper:p}},serializer:ee},ee=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:x,ImageUrl:K,LargeFaceList:B,LargePersonGroup:k,MetaDataContract:U,NameAndUserDataContract:_,PersistedFace:A,Person:v,PersonGroup:w}),Ee=(De.prototype.create=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},Ae,r)},De.prototype.get=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},_e,r)},De.prototype.update=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},Ue,r)},De.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},xe,r)},De.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},we,a)},De.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,persistedFaceId:a,options:r},ve,t)},De.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,url:a,options:r},Be,t)},De.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,image:a,options:r},ke,t)},De);function De(e){this.client=e}var ee=new o.Serializer(ee),Ae={httpMethod:"PUT",path:"facelists/{faceListId}",urlParameters:[P,d],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},U),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},_e={httpMethod:"GET",path:"facelists/{faceListId}",urlParameters:[P,d],queryParameters:[u],responses:{200:{bodyMapper:x},default:{bodyMapper:p}},serializer:ee},Ue={httpMethod:"PATCH",path:"facelists/{faceListId}",urlParameters:[P,d],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},xe={httpMethod:"DELETE",path:"facelists/{faceListId}",urlParameters:[P,d],responses:{200:{},default:{bodyMapper:p}},serializer:ee},we={httpMethod:"GET",path:"facelists",urlParameters:[P],queryParameters:[u],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"FaceList"}}}}},default:{bodyMapper:p}},serializer:ee},ve={httpMethod:"DELETE",path:"facelists/{faceListId}/persistedfaces/{persistedFaceId}",urlParameters:[P,d,z],responses:{200:{},default:{bodyMapper:p}},serializer:ee},Be={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[P,d],queryParameters:[L,M,re],requestBody:{parameterPath:{url:"url"},mapper:r(r({},K),{required:!0})},responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},ke={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[P,d],queryParameters:[L,M,re],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},ee=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:x,ImageUrl:K,LargeFaceList:B,LargePersonGroup:k,MetaDataContract:U,NameAndUserDataContract:_,PersistedFace:A,Person:v,PersonGroup:w,UpdateFaceRequest:V}),Ve=(He.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},je,r)},He.prototype.list=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},$e,r)},He.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},Qe,t)},He.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},We,t)},He.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},Je,t)},He.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Ke,s)},He.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Xe,s)},He.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Ye,s)},He.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,url:r,options:t},Ze,s)},He.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,image:r,options:t},ea,s)},He);function He(e){this.client=e}var ee=new o.Serializer(ee),je={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[P,c],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{bodyMapper:v},default:{bodyMapper:p}},serializer:ee},$e={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[P,c],queryParameters:[O,I],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:ee},Qe={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[P,c,l],responses:{200:{},default:{bodyMapper:p}},serializer:ee},We={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[P,c,l],responses:{200:{bodyMapper:v},default:{bodyMapper:p}},serializer:ee},Je={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[P,c,l],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},Ke={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[P,c,l,z],responses:{200:{},default:{bodyMapper:p}},serializer:ee},Xe={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[P,c,l,z],responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},Ye={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[P,c,l,z],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r(r({},V),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},Ze={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[P,c,l],queryParameters:[L,M,re],requestBody:{parameterPath:{url:"url"},mapper:r(r({},K),{required:!0})},responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},ea={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[P,c,l],queryParameters:[L,M,re],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:ee},ee=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:x,LargeFaceList:B,LargePersonGroup:k,MetaDataContract:U,NameAndUserDataContract:_,PersistedFace:A,Person:v,PersonGroup:w,TrainingStatus:H}),aa=(ra.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},ta,r)},ra.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},sa,r)},ra.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},oa,r)},ra.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},ia,r)},ra.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},pa,r)},ra.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},na,a)},ra.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},da,r)},ra);function ra(e){this.client=e}var ee=new o.Serializer(ee),ta={httpMethod:"PUT",path:"largepersongroups/{largePersonGroupId}",urlParameters:[P,c],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},U),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},sa={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}",urlParameters:[P,c],responses:{200:{},default:{bodyMapper:p}},serializer:ee},oa={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}",urlParameters:[P,c],queryParameters:[u],responses:{200:{bodyMapper:k},default:{bodyMapper:p}},serializer:ee},ia={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}",urlParameters:[P,c],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ee},pa={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/training",urlParameters:[P,c],responses:{200:{bodyMapper:H},default:{bodyMapper:p}},serializer:ee},na={httpMethod:"GET",path:"largepersongroups",urlParameters:[P],queryParameters:[N,f,u],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargePersonGroup"}}}}},default:{bodyMapper:p}},serializer:ee},da={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/train",urlParameters:[P,c],responses:{202:{},default:{bodyMapper:p}},serializer:ee},w=Object.freeze({__proto__:null,APIError:p,ErrorModel:i,FaceList:x,ImageUrl:K,LargeFaceList:B,LargePersonGroup:k,MetaDataContract:U,NameAndUserDataContract:_,PersistedFace:A,Person:v,PersonGroup:w,TrainingStatus:H,UpdateFaceRequest:V}),la=(ma.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ua,r)},ma.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ca,r)},ma.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ya,r)},ma.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ha,r)},ma.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Na,r)},ma.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},fa,a)},ma.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Pa,r)},ma.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},ga,t)},ma.prototype.getFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},za,t)},ma.prototype.updateFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Ia,t)},ma.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,url:a,options:r},Ma,t)},ma.prototype.listFaces=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},qa,r)},ma.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,image:a,options:r},ba,t)},ma);function ma(e){this.client=e}var w=new o.Serializer(w),ua={httpMethod:"PUT",path:"largefacelists/{largeFaceListId}",urlParameters:[P,b],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r(r({},U),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:w},ca={httpMethod:"GET",path:"largefacelists/{largeFaceListId}",urlParameters:[P,b],queryParameters:[u],responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:w},ya={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}",urlParameters:[P,b],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r(r({},_),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:w},ha={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}",urlParameters:[P,b],responses:{200:{},default:{bodyMapper:p}},serializer:w},Na={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/training",urlParameters:[P,b],responses:{200:{bodyMapper:H},default:{bodyMapper:p}},serializer:w},fa={httpMethod:"GET",path:"largefacelists",urlParameters:[P],queryParameters:[u],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargeFaceList"}}}}},default:{bodyMapper:p}},serializer:w},Pa={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/train",urlParameters:[P,b],responses:{202:{},default:{bodyMapper:p}},serializer:w},ga={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[P,b,z],responses:{200:{},default:{bodyMapper:p}},serializer:w},za={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[P,b,z],responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:w},Ia={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[P,b,z],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r(r({},V),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:w},Ma={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[P,b],queryParameters:[L,M,re],requestBody:{parameterPath:{url:"url"},mapper:r(r({},K),{required:!0})},responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:w},qa={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[P,b],queryParameters:[O,I],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}},default:{bodyMapper:p}},serializer:w},ba={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[P,b],queryParameters:[L,M,re],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:A},default:{bodyMapper:p}},serializer:w},i=Object.freeze({__proto__:null,APIError:p,ApplySnapshotRequest:j,ErrorModel:i,OperationStatus:J,Snapshot:$,SnapshotApplyHeaders:Y,SnapshotTakeHeaders:X,TakeSnapshotRequest:Q,UpdateSnapshotRequest:W}),Fa=(La.prototype.take=function(e,a,r,t,s){return this.client.sendOperationRequest({type:e,objectId:a,applyScope:r,options:t},Ra,s)},La.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Sa,a)},La.prototype.get=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},Ga,r)},La.prototype.update=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},Oa,r)},La.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},Ta,r)},La.prototype.apply=function(e,a,r,t){return this.client.sendOperationRequest({snapshotId:e,objectId:a,options:r},Ea,t)},La.prototype.getOperationStatus=function(e,a,r){return this.client.sendOperationRequest({operationId:e,options:a},Da,r)},La);function La(e){this.client=e}var Ca,i=new o.Serializer(i),Ra={httpMethod:"POST",path:"snapshots",urlParameters:[P],requestBody:{parameterPath:{type:"type",objectId:"objectId",applyScope:"applyScope",userData:["options","userData"]},mapper:r(r({},Q),{required:!0})},responses:{202:{headersMapper:X},default:{bodyMapper:p}},serializer:i},Sa={httpMethod:"GET",path:"snapshots",urlParameters:[P],queryParameters:[{parameterPath:["options","type"],mapper:{serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}}},ae],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Snapshot"}}}}},default:{bodyMapper:p}},serializer:i},Ga={httpMethod:"GET",path:"snapshots/{snapshotId}",urlParameters:[P,G],responses:{200:{bodyMapper:$},default:{bodyMapper:p}},serializer:i},Oa={httpMethod:"PATCH",path:"snapshots/{snapshotId}",urlParameters:[P,G],requestBody:{parameterPath:{applyScope:["options","applyScope"],userData:["options","userData"]},mapper:r(r({},W),{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:i},Ta={httpMethod:"DELETE",path:"snapshots/{snapshotId}",urlParameters:[P,G],responses:{200:{},default:{bodyMapper:p}},serializer:i},Ea={httpMethod:"POST",path:"snapshots/{snapshotId}/apply",urlParameters:[P,G],requestBody:{parameterPath:{objectId:"objectId",mode:["options","mode"]},mapper:r(r({},j),{required:!0})},responses:{202:{headersMapper:Y},default:{bodyMapper:p}},serializer:i},Da={httpMethod:"GET",path:"operations/{operationId}",urlParameters:[P,{parameterPath:"operationId",mapper:{required:!0,serializedName:"operationId",type:{name:"Uuid"}}}],responses:{200:{bodyMapper:J},default:{bodyMapper:p}},serializer:i},p=(a(Aa,Ca=o.ServiceClient),Aa);function Aa(e,a,r){var t,s=this;if(null==a)throw new Error("'endpoint' cannot be null.");if(null==e)throw new Error("'credentials' cannot be null.");return(r=r||{}).userAgent||(t=o.getDefaultUserAgentValue(),r.userAgent="@azure/cognitiveservices-face/4.2.0 "+t),(s=Ca.call(this,e,r)||this).baseUri="{Endpoint}/face/v1.0",s.requestContentType="application/json; charset=utf-8",s.endpoint=a,s.credentials=e,s}var _a,i=(a(Ua,_a=p),Ua);function Ua(e,a,r){r=_a.call(this,e,a,r)||this;return r.face=new te(r),r.personGroupPerson=new ue(r),r.personGroup=new be(r),r.faceList=new Ee(r),r.largePersonGroupPerson=new Ve(r),r.largePersonGroup=new aa(r),r.largeFaceList=new la(r),r.snapshot=new Fa(r),r}e.Face=te,e.FaceClient=i,e.FaceClientContext=p,e.FaceListOperations=Ee,e.FaceMappers=Z,e.FaceModels=s,e.LargeFaceListOperations=la,e.LargePersonGroupOperations=aa,e.LargePersonGroupPerson=Ve,e.PersonGroupOperations=be,e.PersonGroupPerson=ue,e.SnapshotOperations=Fa,Object.defineProperty(e,"__esModule",{value:!0})});

@@ -13,3 +13,3 @@ /*

var packageName = "@azure/cognitiveservices-face";
var packageVersion = "4.1.0";
var packageVersion = "4.2.0";
var FaceClientContext = /** @class */ (function (_super) {

@@ -16,0 +16,0 @@ __extends(FaceClientContext, _super);

@@ -1,2 +0,2 @@

export { Accessory, APIError, Blur, Coordinate, DetectedFace, Emotion, ErrorModel, Exposure, FaceAttributes, FaceLandmarks, FaceRectangle, FacialHair, FindSimilarRequest, GroupRequest, GroupResult, Hair, HairColor, HeadPose, IdentifyCandidate, IdentifyRequest, IdentifyResult, ImageUrl, Makeup, Noise, Occlusion, SimilarFace, VerifyFaceToFaceRequest, VerifyFaceToPersonRequest, VerifyResult } from "../models/mappers";
export { Accessory, APIError, Blur, Coordinate, DetectedFace, Emotion, ErrorModel, Exposure, FaceAttributes, FaceLandmarks, FaceRectangle, FacialHair, FindSimilarRequest, GroupRequest, GroupResult, Hair, HairColor, HeadPose, IdentifyCandidate, IdentifyRequest, IdentifyResult, ImageUrl, Makeup, Mask, Noise, Occlusion, SimilarFace, VerifyFaceToFaceRequest, VerifyFaceToPersonRequest, VerifyResult } from "../models/mappers";
//# sourceMappingURL=faceMappers.d.ts.map

@@ -8,3 +8,3 @@ /*

*/
export { Accessory, APIError, Blur, Coordinate, DetectedFace, Emotion, ErrorModel, Exposure, FaceAttributes, FaceLandmarks, FaceRectangle, FacialHair, FindSimilarRequest, GroupRequest, GroupResult, Hair, HairColor, HeadPose, IdentifyCandidate, IdentifyRequest, IdentifyResult, ImageUrl, Makeup, Noise, Occlusion, SimilarFace, VerifyFaceToFaceRequest, VerifyFaceToPersonRequest, VerifyResult } from "../models/mappers";
export { Accessory, APIError, Blur, Coordinate, DetectedFace, Emotion, ErrorModel, Exposure, FaceAttributes, FaceLandmarks, FaceRectangle, FacialHair, FindSimilarRequest, GroupRequest, GroupResult, Hair, HairColor, HeadPose, IdentifyCandidate, IdentifyRequest, IdentifyResult, ImageUrl, Makeup, Mask, Noise, Occlusion, SimilarFace, VerifyFaceToFaceRequest, VerifyFaceToPersonRequest, VerifyResult } from "../models/mappers";
//# sourceMappingURL=faceMappers.js.map

@@ -18,2 +18,3 @@ import * as msRest from "@azure/ms-rest-js";

export declare const Noise: msRest.CompositeMapper;
export declare const Mask: msRest.CompositeMapper;
export declare const FaceAttributes: msRest.CompositeMapper;

@@ -20,0 +21,0 @@ export declare const DetectedFace: msRest.CompositeMapper;

@@ -662,2 +662,31 @@ /*

};
export var Mask = {
serializedName: "Mask",
type: {
name: "Composite",
className: "Mask",
modelProperties: {
type: {
nullable: false,
serializedName: "type",
type: {
name: "Enum",
allowedValues: [
"noMask",
"faceMask",
"otherMaskOrOcclusion",
"uncertain"
]
}
},
noseAndMouthCovered: {
nullable: false,
serializedName: "noseAndMouthCovered",
type: {
name: "Boolean"
}
}
}
}
};
export var FaceAttributes = {

@@ -777,2 +806,9 @@ serializedName: "FaceAttributes",

}
},
mask: {
serializedName: "mask",
type: {
name: "Composite",
className: "Mask"
}
}

@@ -779,0 +815,0 @@ }

@@ -5,2 +5,3 @@ import * as msRest from "@azure/ms-rest-js";

export declare const endpoint: msRest.OperationURLParameter;
export declare const faceIdTimeToLive: msRest.OperationQueryParameter;
export declare const faceListId: msRest.OperationURLParameter;

@@ -7,0 +8,0 @@ export declare const largeFaceListId: msRest.OperationURLParameter;

@@ -54,2 +54,19 @@ /*

};
export var faceIdTimeToLive = {
parameterPath: [
"options",
"faceIdTimeToLive"
],
mapper: {
serializedName: "faceIdTimeToLive",
defaultValue: 86400,
constraints: {
InclusiveMaximum: 86400,
InclusiveMinimum: 60
},
type: {
name: "Number"
}
}
};
export var faceListId = {

@@ -180,3 +197,4 @@ parameterPath: "faceListId",

"exposure",
"noise"
"noise",
"mask"
]

@@ -183,0 +201,0 @@ }

@@ -14,10 +14,11 @@ import * as msRest from "@azure/ms-rest-js";

* Given query face's faceId, to search the similar-looking faces from a faceId array, a face list
* or a large face list. faceId array contains the faces created by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), which
* will expire 24 hours after creation. A "faceListId" is created by [FaceList -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/create) containing
* persistedFaceIds that will not expire. And a "largeFaceListId" is created by [LargeFaceList -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/create)
* containing persistedFaceIds that will also not expire. Depending on the input the returned
* similar faces list contains faceIds or persistedFaceIds ranked by similarity.
* or a large face list. faceId array contains the faces created by [Face - Detect With
* Url](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl) or [Face - Detect With
* Stream](https://docs.microsoft.com/rest/api/faceapi/face/detectwithstream), which will expire at
* the time specified by faceIdTimeToLive after creation. A "faceListId" is created by [FaceList -
* Create](https://docs.microsoft.com/rest/api/faceapi/facelist/create) containing persistedFaceIds
* that will not expire. And a "largeFaceListId" is created by [LargeFaceList -
* Create](https://docs.microsoft.com/rest/api/faceapi/largefacelist/create) containing
* persistedFaceIds that will also not expire. Depending on the input the returned similar faces
* list contains faceIds or persistedFaceIds ranked by similarity.
* <br/>Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the

@@ -32,3 +33,4 @@ * default mode that it tries to find faces of the same person as possible by using internal

* @param faceId FaceId of the query face. User needs to call Face - Detect first to get a valid
* faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call
* faceId. Note that this faceId is not persisted and will expire at the time specified by
* faceIdTimeToLive after the detection call
* @param [options] The optional parameters

@@ -40,3 +42,4 @@ * @returns Promise<Models.FaceFindSimilarResponse>

* @param faceId FaceId of the query face. User needs to call Face - Detect first to get a valid
* faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call
* faceId. Note that this faceId is not persisted and will expire at the time specified by
* faceIdTimeToLive after the detection call
* @param callback The callback

@@ -47,3 +50,4 @@ */

* @param faceId FaceId of the query face. User needs to call Face - Detect first to get a valid
* faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call
* faceId. Note that this faceId is not persisted and will expire at the time specified by
* faceIdTimeToLive after the detection call
* @param options The optional parameters

@@ -63,4 +67,4 @@ * @param callback The callback

* * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface) when
* you only have 2 candidate faces.
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface) when you only have 2
* candidate faces.
* * The 'recognitionModel' associated with the query faces' faceIds should be the same.

@@ -91,5 +95,4 @@ * @param faceIds Array of candidate faceId created by Face - Detect. The maximum is 1000 faces

* identification. See more in [PersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/train) and
* [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/train).
* Train](https://docs.microsoft.com/rest/api/faceapi/persongroup/train) and [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/train).
* <br/>

@@ -106,5 +109,4 @@ *

* confidenceThreshold. If no person is identified, the returned candidates will be an empty array.
* * Try [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar) when you
* need to find similar faces from a face list/large face list instead of a person group/large
* * Try [Face - Find Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar) when
* you need to find similar faces from a face list/large face list instead of a person group/large
* person group.

@@ -165,11 +167,11 @@ * * The 'recognitionModel' associated with the query faces' faceIds should be the same as the

* is an identifier of the face feature and will be used in [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). The
* stored face feature(s) will expire and be deleted 24 hours after the original detection call.
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar). The stored face
* feature(s) will expire and be deleted at the time specified by faceIdTimeToLive after the
* original detection call.
* * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender,
* headPose, smile, facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur,
* exposure and noise. Some of the results returned for specific attributes may not be highly
* accurate.
* exposure, noise, and mask. Some of the results returned for specific attributes may not be
* highly accurate.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is

@@ -180,8 +182,7 @@ * from 1KB to 6MB.

* * For optimal results when querying [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar)
* ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of
* 200x200 pixels (100 pixels between eyes).
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar) ('returnFaceId' is true),
* please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels
* between eyes).
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.

@@ -192,12 +193,3 @@ * Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
*

@@ -209,12 +201,4 @@ * * Different 'recognitionModel' values are provided. If follow-up operations like Verify,

* faceIds will be associated with the specified recognition model. More details, please refer to
* [How to specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'recognition_01': | The default recognition model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). All
* those faceIds created before 2019 March are bonded with this recognition model. |
* | 'recognition_02': | Recognition model released in 2019 March. |
* | 'recognition_03': | Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. |
* [Specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
* @param url Publicly reachable URL of an image

@@ -265,11 +249,11 @@ * @param [options] The optional parameters

* is an identifier of the face feature and will be used in [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). The
* stored face feature(s) will expire and be deleted 24 hours after the original detection call.
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar). The stored face
* feature(s) will expire and be deleted at the time specified by faceIdTimeToLive after the
* original detection call.
* * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender,
* headPose, smile, facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur,
* exposure and noise. Some of the results returned for specific attributes may not be highly
* accurate.
* exposure, noise, and mask. Some of the results returned for specific attributes may not be
* highly accurate.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is

@@ -280,8 +264,7 @@ * from 1KB to 6MB.

* * For optimal results when querying [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar)
* ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of
* 200x200 pixels (100 pixels between eyes).
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar) ('returnFaceId' is true),
* please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels
* between eyes).
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.

@@ -293,12 +276,2 @@ * Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum

* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*
* * Different 'recognitionModel' values are provided. If follow-up operations like Verify,

@@ -309,12 +282,4 @@ * Identify, Find Similar are needed, please specify the recognition model with 'recognitionModel'

* faceIds will be associated with the specified recognition model. More details, please refer to
* [How to specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'recognition_01': | The default recognition model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). All
* those faceIds created before 2019 March are bonded with this recognition model. |
* | 'recognition_02': | Recognition model released in 2019 March. |
* | 'recognition_03': | Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. |
* [Specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
* @param image An image stream.

@@ -321,0 +286,0 @@ * @param [options] The optional parameters

@@ -231,3 +231,4 @@ /*

Parameters.returnRecognitionModel,
Parameters.detectionModel
Parameters.detectionModel,
Parameters.faceIdTimeToLive
],

@@ -304,3 +305,4 @@ requestBody: {

Parameters.returnRecognitionModel,
Parameters.detectionModel
Parameters.detectionModel,
Parameters.faceIdTimeToLive
],

@@ -307,0 +309,0 @@ requestBody: {

@@ -16,17 +16,15 @@ import * as msRest from "@azure/ms-rest-js";

* <br /> Face list is a list of faces, up to 1,000 faces, and used by [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar).
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar).
* <br /> After creation, user should use [FaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/addfacefromurl) to
* import the faces. No image will be stored. Only the extracted face features are stored on server
* until [FaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/delete) is called.
* Face](https://docs.microsoft.com/rest/api/faceapi/facelist/addfacefromurl) to import the faces.
* No image will be stored. Only the extracted face features are stored on server until [FaceList -
* Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is called.
* <br /> Find Similar is used for scenario like finding celebrity-like faces, similar face
* filtering, or as a light way face identification. But if the actual use is to identify person,
* please use [PersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup)
* /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup)
* and [Face - Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* please use [PersonGroup](https://docs.microsoft.com/rest/api/faceapi/persongroup) /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/faceapi/largepersongroup) and [Face -
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br /> Please consider
* [LargeFaceList](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist) when
* the face number is large. It can support up to 1,000,000 faces.
* [LargeFaceList](https://docs.microsoft.com/rest/api/faceapi/largefacelist) when the face number
* is large. It can support up to 1,000,000 faces.
* <br />'recognitionModel' should be specified to associate with this face list. The default value

@@ -38,8 +36,4 @@ * for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly

* model.
* * 'recognition_01': The default recognition model for [FaceList-
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/create). All those
* face lists created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
* Please Refer to [Specify a face recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
* @param faceListId Id referencing a particular face list.

@@ -118,3 +112,3 @@ * @param [options] The optional parameters

* To get face information inside faceList use [FaceList -
* Get](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/get)
* Get](https://docs.microsoft.com/rest/api/faceapi/facelist/get)
* @param [options] The optional parameters

@@ -161,7 +155,6 @@ * @returns Promise<Models.FaceListListResponse>

* will be stored. Only the extracted face feature will be stored on server until [FaceList -
* Delete Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/deleteface) or
* [FaceList - Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/delete)
* is called.
* Delete Face](https://docs.microsoft.com/rest/api/faceapi/facelist/deleteface) or [FaceList -
* Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better detection and recognition precision. Please consider

@@ -174,4 +167,4 @@ * high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -186,12 +179,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [FaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param faceListId Id referencing a particular face list.

@@ -221,7 +205,6 @@ * @param url Publicly reachable URL of an image

* will be stored. Only the extracted face feature will be stored on server until [FaceList -
* Delete Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/deleteface) or
* [FaceList - Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/delete)
* is called.
* Delete Face](https://docs.microsoft.com/rest/api/faceapi/facelist/deleteface) or [FaceList -
* Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better detection and recognition precision. Please consider

@@ -234,4 +217,4 @@ * high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -246,12 +229,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [FaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param faceListId Id referencing a particular face list.

@@ -258,0 +232,0 @@ * @param image An image stream.

@@ -16,18 +16,15 @@ import * as msRest from "@azure/ms-rest-js";

* <br /> Large face list is a list of faces, up to 1,000,000 faces, and used by [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar).
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar).
* <br /> After creation, user should use [LargeFaceList Face -
* Add](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/addfacefromurl) to
* import the faces and [LargeFaceList -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/train) to make
* it ready for [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). No image
* Add](https://docs.microsoft.com/rest/api/faceapi/largefacelist/addfacefromurl) to import the
* faces and [LargeFaceList -
* Train](https://docs.microsoft.com/rest/api/faceapi/largefacelist/train) to make it ready for
* [Face - Find Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar). No image
* will be stored. Only the extracted face features are stored on server until [LargeFaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/delete) is called.
* <br /> Find Similar is used for scenario like finding celebrity-like faces, similar face
* filtering, or as a light way face identification. But if the actual use is to identify person,
* please use [PersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup)
* /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup)
* and [Face - Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* please use [PersonGroup](https://docs.microsoft.com/rest/api/faceapi/persongroup) /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/faceapi/largepersongroup) and [Face -
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br/>'recognitionModel' should be specified to associate with this large face list. The default

@@ -38,9 +35,4 @@ * value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly

* features in a large face list can't be updated to features extracted by another version of
* recognition model.
* * 'recognition_01': The default recognition model for [LargeFaceList-
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/create). All
* those large face lists created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
* recognition model. Please refer to [Specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
*

@@ -142,3 +134,3 @@ * Large face list quota:

* To get face information inside largeFaceList use [LargeFaceList Face -
* Get](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/getface)<br />
* Get](https://docs.microsoft.com/rest/api/faceapi/largefacelist/getface)<br />
* * Large face lists are stored in alphabetical order of largeFaceListId.

@@ -258,9 +250,7 @@ * * "start" parameter (string, optional) is a user-provided largeFaceListId value that returned

* will be stored. Only the extracted face feature will be stored on server until [LargeFaceList
* Face -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/deleteface) or
* [LargeFaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/delete) is
* Face - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/deleteface) or
* [LargeFaceList - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -272,4 +262,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -285,11 +275,2 @@ * will cause failures.

* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargeFaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*

@@ -342,9 +323,7 @@ * Quota:

* will be stored. Only the extracted face feature will be stored on server until [LargeFaceList
* Face -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/deleteface) or
* [LargeFaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/delete) is
* Face - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/deleteface) or
* [LargeFaceList - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -356,4 +335,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -368,12 +347,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargeFaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
*

@@ -380,0 +350,0 @@ * Quota:

@@ -19,13 +19,11 @@ import * as msRest from "@azure/ms-rest-js";

* <br /> After creation, use [LargePersonGroup Person -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/create)
* to add person into the group, and call [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/train) to get
* this group ready for [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* Create](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/create) to add person
* into the group, and call [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/train) to get this group
* ready for [Face - Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br /> No image will be stored. Only the person's extracted face features and userData will be
* stored on server until [LargePersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/delete)
* or [LargePersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/delete) or
* [LargePersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/delete)
* is called.
* <br/>'recognitionModel' should be specified to associate with this large person group. The

@@ -36,9 +34,4 @@ * default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please

* Existing face features in a large person group can't be updated to features extracted by another
* version of recognition model.
* * 'recognition_01': The default recognition model for [LargePersonGroup -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/create). All
* those large person groups created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
* version of recognition model. Please refer to [Specify a face recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
*

@@ -86,5 +79,4 @@ * Large person group quota:

* recognitionModel. This API returns large person group information only, use [LargePersonGroup
* Person -
* List](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/list)
* instead to retrieve person information under the large person group.
* Person - List](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/list) instead
* to retrieve person information under the large person group.
* @param largePersonGroupId Id referencing a particular large person group.

@@ -91,0 +83,0 @@ * @param [options] The optional parameters

@@ -196,10 +196,9 @@ import * as msRest from "@azure/ms-rest-js";

* PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/deleteface),
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/deleteface),
* [LargePersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/delete)
* or [LargePersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/delete) or
* [LargePersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/delete)
* is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -212,4 +211,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -225,11 +224,2 @@ * will cause failures.

* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargePersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param largePersonGroupId Id referencing a particular large person group.

@@ -263,10 +253,9 @@ * @param personId Id referencing a particular person.

* PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/deleteface),
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/deleteface),
* [LargePersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/delete)
* or [LargePersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/delete) or
* [LargePersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/delete)
* is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -279,4 +268,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -291,12 +280,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargePersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param largePersonGroupId Id referencing a particular large person group.

@@ -303,0 +283,0 @@ * @param personId Id referencing a particular person.

@@ -18,13 +18,10 @@ import * as msRest from "@azure/ms-rest-js";

* <br /> After creation, use [PersonGroup Person -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/create) to
* add persons into the group, and then call [PersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/train) to get this
* group ready for [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* Create](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/create) to add persons
* into the group, and then call [PersonGroup -
* Train](https://docs.microsoft.com/rest/api/faceapi/persongroup/train) to get this group ready
* for [Face - Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br /> No image will be stored. Only the person's extracted face features and userData will be
* stored on server until [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is called.
* <br/>'recognitionModel' should be specified to associate with this person group. The default

@@ -36,8 +33,2 @@ * value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly

* recognition model.
* * 'recognition_01': The default recognition model for [PersonGroup -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/create). All
* those person groups created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
*

@@ -48,3 +39,3 @@ * Person group quota:

* * to handle larger scale face identification problem, please consider using
* [LargePersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup).
* [LargePersonGroup](https://docs.microsoft.com/rest/api/faceapi/largepersongroup).
* @param personGroupId Id referencing a particular person group.

@@ -88,3 +79,3 @@ * @param [options] The optional parameters

* personGroup, use [PersonGroup Person -
* List](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/list).
* List](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/list).
* @param personGroupId Id referencing a particular person group.

@@ -91,0 +82,0 @@ * @param [options] The optional parameters

@@ -170,10 +170,8 @@ import * as msRest from "@azure/ms-rest-js";

* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/deleteface),
* [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/deleteface), [PersonGroup
* Person - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or
* [PersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -186,4 +184,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -220,10 +218,8 @@ * will cause failures.

* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/deleteface),
* [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/deleteface), [PersonGroup
* Person - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or
* [PersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -236,4 +232,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* an error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -248,12 +244,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [PersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param personGroupId Id referencing a particular person group.

@@ -286,10 +273,8 @@ * @param personId Id referencing a particular person.

* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/deleteface),
* [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/deleteface), [PersonGroup
* Person - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or
* [PersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -302,4 +287,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* an error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -314,12 +299,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [PersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param personGroupId Id referencing a particular person group.

@@ -326,0 +302,0 @@ * @param personId Id referencing a particular person.

The MIT License (MIT)
Copyright (c) 2020 Microsoft
Copyright (c) 2021 Microsoft

@@ -5,0 +5,0 @@ Permission is hereby granted, free of charge, to any person obtaining a copy

@@ -5,3 +5,3 @@ {

"description": "FaceClient Library with typescript type definitions for node.js and browser.",
"version": "4.1.0",
"version": "4.2.0",
"dependencies": {

@@ -8,0 +8,0 @@ "@azure/ms-rest-js": "^2.0.4",

@@ -27,3 +27,3 @@ ## An isomorphic javascript sdk for - FaceClient

##### Sample code
The following sample detects the facial features on the given image. To know more, refer to the [Azure Documentation on Face APIs](https://docs.microsoft.com/en-us/azure/cognitive-services/face/overview)
The following sample detects the facial features on the given image. To know more, refer to the [Azure Documentation on Face APIs](https://docs.microsoft.com/azure/cognitive-services/face/overview)

@@ -30,0 +30,0 @@ ```javascript

@@ -14,3 +14,3 @@ /*

const packageName = "@azure/cognitiveservices-face";
const packageVersion = "4.1.0";
const packageVersion = "4.2.0";

@@ -17,0 +17,0 @@ export class FaceClientContext extends msRest.ServiceClient {

@@ -33,2 +33,3 @@ /*

Makeup,
Mask,
Noise,

@@ -35,0 +36,0 @@ Occlusion,

@@ -681,2 +681,32 @@ /*

export const Mask: msRest.CompositeMapper = {
serializedName: "Mask",
type: {
name: "Composite",
className: "Mask",
modelProperties: {
type: {
nullable: false,
serializedName: "type",
type: {
name: "Enum",
allowedValues: [
"noMask",
"faceMask",
"otherMaskOrOcclusion",
"uncertain"
]
}
},
noseAndMouthCovered: {
nullable: false,
serializedName: "noseAndMouthCovered",
type: {
name: "Boolean"
}
}
}
}
};
export const FaceAttributes: msRest.CompositeMapper = {

@@ -796,2 +826,9 @@ serializedName: "FaceAttributes",

}
},
mask: {
serializedName: "mask",
type: {
name: "Composite",
className: "Mask"
}
}

@@ -798,0 +835,0 @@ }

@@ -56,2 +56,19 @@ /*

};
export const faceIdTimeToLive: msRest.OperationQueryParameter = {
parameterPath: [
"options",
"faceIdTimeToLive"
],
mapper: {
serializedName: "faceIdTimeToLive",
defaultValue: 86400,
constraints: {
InclusiveMaximum: 86400,
InclusiveMinimum: 60
},
type: {
name: "Number"
}
}
};
export const faceListId: msRest.OperationURLParameter = {

@@ -182,3 +199,4 @@ parameterPath: "faceListId",

"exposure",
"noise"
"noise",
"mask"
]

@@ -185,0 +203,0 @@ }

@@ -31,10 +31,11 @@ /*

* Given query face's faceId, to search the similar-looking faces from a faceId array, a face list
* or a large face list. faceId array contains the faces created by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), which
* will expire 24 hours after creation. A "faceListId" is created by [FaceList -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/create) containing
* persistedFaceIds that will not expire. And a "largeFaceListId" is created by [LargeFaceList -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/create)
* containing persistedFaceIds that will also not expire. Depending on the input the returned
* similar faces list contains faceIds or persistedFaceIds ranked by similarity.
* or a large face list. faceId array contains the faces created by [Face - Detect With
* Url](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl) or [Face - Detect With
* Stream](https://docs.microsoft.com/rest/api/faceapi/face/detectwithstream), which will expire at
* the time specified by faceIdTimeToLive after creation. A "faceListId" is created by [FaceList -
* Create](https://docs.microsoft.com/rest/api/faceapi/facelist/create) containing persistedFaceIds
* that will not expire. And a "largeFaceListId" is created by [LargeFaceList -
* Create](https://docs.microsoft.com/rest/api/faceapi/largefacelist/create) containing
* persistedFaceIds that will also not expire. Depending on the input the returned similar faces
* list contains faceIds or persistedFaceIds ranked by similarity.
* <br/>Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the

@@ -49,3 +50,4 @@ * default mode that it tries to find faces of the same person as possible by using internal

* @param faceId FaceId of the query face. User needs to call Face - Detect first to get a valid
* faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call
* faceId. Note that this faceId is not persisted and will expire at the time specified by
* faceIdTimeToLive after the detection call
* @param [options] The optional parameters

@@ -57,3 +59,4 @@ * @returns Promise<Models.FaceFindSimilarResponse>

* @param faceId FaceId of the query face. User needs to call Face - Detect first to get a valid
* faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call
* faceId. Note that this faceId is not persisted and will expire at the time specified by
* faceIdTimeToLive after the detection call
* @param callback The callback

@@ -64,3 +67,4 @@ */

* @param faceId FaceId of the query face. User needs to call Face - Detect first to get a valid
* faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call
* faceId. Note that this faceId is not persisted and will expire at the time specified by
* faceIdTimeToLive after the detection call
* @param options The optional parameters

@@ -90,4 +94,4 @@ * @param callback The callback

* * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface) when
* you only have 2 candidate faces.
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface) when you only have 2
* candidate faces.
* * The 'recognitionModel' associated with the query faces' faceIds should be the same.

@@ -128,5 +132,4 @@ * @param faceIds Array of candidate faceId created by Face - Detect. The maximum is 1000 faces

* identification. See more in [PersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/train) and
* [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/train).
* Train](https://docs.microsoft.com/rest/api/faceapi/persongroup/train) and [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/train).
* <br/>

@@ -143,5 +146,4 @@ *

* confidenceThreshold. If no person is identified, the returned candidates will be an empty array.
* * Try [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar) when you
* need to find similar faces from a face list/large face list instead of a person group/large
* * Try [Face - Find Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar) when
* you need to find similar faces from a face list/large face list instead of a person group/large
* person group.

@@ -223,11 +225,11 @@ * * The 'recognitionModel' associated with the query faces' faceIds should be the same as the

* is an identifier of the face feature and will be used in [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). The
* stored face feature(s) will expire and be deleted 24 hours after the original detection call.
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar). The stored face
* feature(s) will expire and be deleted at the time specified by faceIdTimeToLive after the
* original detection call.
* * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender,
* headPose, smile, facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur,
* exposure and noise. Some of the results returned for specific attributes may not be highly
* accurate.
* exposure, noise, and mask. Some of the results returned for specific attributes may not be
* highly accurate.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is

@@ -238,8 +240,7 @@ * from 1KB to 6MB.

* * For optimal results when querying [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar)
* ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of
* 200x200 pixels (100 pixels between eyes).
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar) ('returnFaceId' is true),
* please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels
* between eyes).
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.

@@ -250,12 +251,3 @@ * Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
*

@@ -267,12 +259,4 @@ * * Different 'recognitionModel' values are provided. If follow-up operations like Verify,

* faceIds will be associated with the specified recognition model. More details, please refer to
* [How to specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'recognition_01': | The default recognition model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). All
* those faceIds created before 2019 March are bonded with this recognition model. |
* | 'recognition_02': | Recognition model released in 2019 March. |
* | 'recognition_03': | Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. |
* [Specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
* @param url Publicly reachable URL of an image

@@ -344,11 +328,11 @@ * @param [options] The optional parameters

* is an identifier of the face feature and will be used in [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). The
* stored face feature(s) will expire and be deleted 24 hours after the original detection call.
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar). The stored face
* feature(s) will expire and be deleted at the time specified by faceIdTimeToLive after the
* original detection call.
* * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender,
* headPose, smile, facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur,
* exposure and noise. Some of the results returned for specific attributes may not be highly
* accurate.
* exposure, noise, and mask. Some of the results returned for specific attributes may not be
* highly accurate.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is

@@ -359,8 +343,7 @@ * from 1KB to 6MB.

* * For optimal results when querying [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and
* [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar)
* ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of
* 200x200 pixels (100 pixels between eyes).
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify), [Face -
* Verify](https://docs.microsoft.com/rest/api/faceapi/face/verifyfacetoface), and [Face - Find
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar) ('returnFaceId' is true),
* please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels
* between eyes).
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.

@@ -372,12 +355,2 @@ * Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum

* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*
* * Different 'recognitionModel' values are provided. If follow-up operations like Verify,

@@ -388,12 +361,4 @@ * Identify, Find Similar are needed, please specify the recognition model with 'recognitionModel'

* faceIds will be associated with the specified recognition model. More details, please refer to
* [How to specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'recognition_01': | The default recognition model for [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). All
* those faceIds created before 2019 March are bonded with this recognition model. |
* | 'recognition_02': | Recognition model released in 2019 March. |
* | 'recognition_03': | Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. |
* [Specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
* @param image An image stream.

@@ -603,3 +568,4 @@ * @param [options] The optional parameters

Parameters.returnRecognitionModel,
Parameters.detectionModel
Parameters.detectionModel,
Parameters.faceIdTimeToLive
],

@@ -684,3 +650,4 @@ requestBody: {

Parameters.returnRecognitionModel,
Parameters.detectionModel
Parameters.detectionModel,
Parameters.faceIdTimeToLive
],

@@ -687,0 +654,0 @@ requestBody: {

@@ -33,17 +33,15 @@ /*

* <br /> Face list is a list of faces, up to 1,000 faces, and used by [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar).
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar).
* <br /> After creation, user should use [FaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/addfacefromurl) to
* import the faces. No image will be stored. Only the extracted face features are stored on server
* until [FaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/delete) is called.
* Face](https://docs.microsoft.com/rest/api/faceapi/facelist/addfacefromurl) to import the faces.
* No image will be stored. Only the extracted face features are stored on server until [FaceList -
* Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is called.
* <br /> Find Similar is used for scenario like finding celebrity-like faces, similar face
* filtering, or as a light way face identification. But if the actual use is to identify person,
* please use [PersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup)
* /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup)
* and [Face - Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* please use [PersonGroup](https://docs.microsoft.com/rest/api/faceapi/persongroup) /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/faceapi/largepersongroup) and [Face -
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br /> Please consider
* [LargeFaceList](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist) when
* the face number is large. It can support up to 1,000,000 faces.
* [LargeFaceList](https://docs.microsoft.com/rest/api/faceapi/largefacelist) when the face number
* is large. It can support up to 1,000,000 faces.
* <br />'recognitionModel' should be specified to associate with this face list. The default value

@@ -55,8 +53,4 @@ * for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly

* model.
* * 'recognition_01': The default recognition model for [FaceList-
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/create). All those
* face lists created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
* Please Refer to [Specify a face recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
* @param faceListId Id referencing a particular face list.

@@ -175,3 +169,3 @@ * @param [options] The optional parameters

* To get face information inside faceList use [FaceList -
* Get](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/get)
* Get](https://docs.microsoft.com/rest/api/faceapi/facelist/get)
* @param [options] The optional parameters

@@ -238,7 +232,6 @@ * @returns Promise<Models.FaceListListResponse>

* will be stored. Only the extracted face feature will be stored on server until [FaceList -
* Delete Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/deleteface) or
* [FaceList - Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/delete)
* is called.
* Delete Face](https://docs.microsoft.com/rest/api/faceapi/facelist/deleteface) or [FaceList -
* Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better detection and recognition precision. Please consider

@@ -251,4 +244,4 @@ * high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -263,12 +256,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [FaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param faceListId Id referencing a particular face list.

@@ -309,7 +293,6 @@ * @param url Publicly reachable URL of an image

* will be stored. Only the extracted face feature will be stored on server until [FaceList -
* Delete Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/deleteface) or
* [FaceList - Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/delete)
* is called.
* Delete Face](https://docs.microsoft.com/rest/api/faceapi/facelist/deleteface) or [FaceList -
* Delete](https://docs.microsoft.com/rest/api/faceapi/facelist/delete) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better detection and recognition precision. Please consider

@@ -322,4 +305,4 @@ * high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -334,12 +317,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [FaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param faceListId Id referencing a particular face list.

@@ -346,0 +320,0 @@ * @param image An image stream.

@@ -33,18 +33,15 @@ /*

* <br /> Large face list is a list of faces, up to 1,000,000 faces, and used by [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar).
* Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar).
* <br /> After creation, user should use [LargeFaceList Face -
* Add](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/addfacefromurl) to
* import the faces and [LargeFaceList -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/train) to make
* it ready for [Face - Find
* Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). No image
* Add](https://docs.microsoft.com/rest/api/faceapi/largefacelist/addfacefromurl) to import the
* faces and [LargeFaceList -
* Train](https://docs.microsoft.com/rest/api/faceapi/largefacelist/train) to make it ready for
* [Face - Find Similar](https://docs.microsoft.com/rest/api/faceapi/face/findsimilar). No image
* will be stored. Only the extracted face features are stored on server until [LargeFaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/delete) is called.
* <br /> Find Similar is used for scenario like finding celebrity-like faces, similar face
* filtering, or as a light way face identification. But if the actual use is to identify person,
* please use [PersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup)
* /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup)
* and [Face - Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* please use [PersonGroup](https://docs.microsoft.com/rest/api/faceapi/persongroup) /
* [LargePersonGroup](https://docs.microsoft.com/rest/api/faceapi/largepersongroup) and [Face -
* Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br/>'recognitionModel' should be specified to associate with this large face list. The default

@@ -55,9 +52,4 @@ * value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly

* features in a large face list can't be updated to features extracted by another version of
* recognition model.
* * 'recognition_01': The default recognition model for [LargeFaceList-
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/create). All
* those large face lists created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
* recognition model. Please refer to [Specify a recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
*

@@ -209,3 +201,3 @@ * Large face list quota:

* To get face information inside largeFaceList use [LargeFaceList Face -
* Get](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/getface)<br />
* Get](https://docs.microsoft.com/rest/api/faceapi/largefacelist/getface)<br />
* * Large face lists are stored in alphabetical order of largeFaceListId.

@@ -377,9 +369,7 @@ * * "start" parameter (string, optional) is a user-provided largeFaceListId value that returned

* will be stored. Only the extracted face feature will be stored on server until [LargeFaceList
* Face -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/deleteface) or
* [LargeFaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/delete) is
* Face - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/deleteface) or
* [LargeFaceList - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -391,4 +381,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -404,11 +394,2 @@ * will cause failures.

* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargeFaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*

@@ -482,9 +463,7 @@ * Quota:

* will be stored. Only the extracted face feature will be stored on server until [LargeFaceList
* Face -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/deleteface) or
* [LargeFaceList -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/delete) is
* Face - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/deleteface) or
* [LargeFaceList - Delete](https://docs.microsoft.com/rest/api/faceapi/largefacelist/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -496,4 +475,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -508,12 +487,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargeFaceList - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
*

@@ -520,0 +490,0 @@ * Quota:

@@ -36,13 +36,11 @@ /*

* <br /> After creation, use [LargePersonGroup Person -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/create)
* to add person into the group, and call [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/train) to get
* this group ready for [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* Create](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/create) to add person
* into the group, and call [LargePersonGroup -
* Train](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/train) to get this group
* ready for [Face - Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br /> No image will be stored. Only the person's extracted face features and userData will be
* stored on server until [LargePersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/delete)
* or [LargePersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/delete) or
* [LargePersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/delete)
* is called.
* <br/>'recognitionModel' should be specified to associate with this large person group. The

@@ -53,9 +51,4 @@ * default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please

* Existing face features in a large person group can't be updated to features extracted by another
* version of recognition model.
* * 'recognition_01': The default recognition model for [LargePersonGroup -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/create). All
* those large person groups created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
* version of recognition model. Please refer to [Specify a face recognition
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model).
*

@@ -123,5 +116,4 @@ * Large person group quota:

* recognitionModel. This API returns large person group information only, use [LargePersonGroup
* Person -
* List](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/list)
* instead to retrieve person information under the large person group.
* Person - List](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/list) instead
* to retrieve person information under the large person group.
* @param largePersonGroupId Id referencing a particular large person group.

@@ -128,0 +120,0 @@ * @param [options] The optional parameters

@@ -302,10 +302,9 @@ /*

* PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/deleteface),
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/deleteface),
* [LargePersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/delete)
* or [LargePersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/delete) or
* [LargePersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/delete)
* is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -318,4 +317,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -331,11 +330,2 @@ * will cause failures.

* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargePersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param largePersonGroupId Id referencing a particular large person group.

@@ -381,10 +371,9 @@ * @param personId Id referencing a particular person.

* PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/deleteface),
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/deleteface),
* [LargePersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/delete)
* or [LargePersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroupperson/delete) or
* [LargePersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/largepersongroup/delete)
* is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -397,4 +386,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -409,12 +398,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargePersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param largePersonGroupId Id referencing a particular large person group.

@@ -421,0 +401,0 @@ * @param personId Id referencing a particular person.

@@ -35,13 +35,10 @@ /*

* <br /> After creation, use [PersonGroup Person -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/create) to
* add persons into the group, and then call [PersonGroup -
* Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/train) to get this
* group ready for [Face -
* Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify).
* Create](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/create) to add persons
* into the group, and then call [PersonGroup -
* Train](https://docs.microsoft.com/rest/api/faceapi/persongroup/train) to get this group ready
* for [Face - Identify](https://docs.microsoft.com/rest/api/faceapi/face/identify).
* <br /> No image will be stored. Only the person's extracted face features and userData will be
* stored on server until [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* called.
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is called.
* <br/>'recognitionModel' should be specified to associate with this person group. The default

@@ -53,8 +50,2 @@ * value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly

* recognition model.
* * 'recognition_01': The default recognition model for [PersonGroup -
* Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/create). All
* those person groups created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March.
* * 'recognition_03': Recognition model released in 2020 May. 'recognition_03' is recommended
* since its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
*

@@ -65,3 +56,3 @@ * Person group quota:

* * to handle larger scale face identification problem, please consider using
* [LargePersonGroup](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup).
* [LargePersonGroup](https://docs.microsoft.com/rest/api/faceapi/largepersongroup).
* @param personGroupId Id referencing a particular person group.

@@ -125,3 +116,3 @@ * @param [options] The optional parameters

* personGroup, use [PersonGroup Person -
* List](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/list).
* List](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/list).
* @param personGroupId Id referencing a particular person group.

@@ -128,0 +119,0 @@ * @param [options] The optional parameters

@@ -264,10 +264,8 @@ /*

* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/deleteface),
* [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/deleteface), [PersonGroup
* Person - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or
* [PersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -280,4 +278,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -326,10 +324,8 @@ * will cause failures.

* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/deleteface),
* [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/deleteface), [PersonGroup
* Person - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or
* [PersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -342,4 +338,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* an error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -354,12 +350,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [PersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param personGroupId Id referencing a particular person group.

@@ -404,10 +391,8 @@ * @param personId Id referencing a particular person.

* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/deleteface),
* [PersonGroup Person -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/delete) or
* [PersonGroup -
* Delete](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/delete) is
* Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/deleteface), [PersonGroup
* Person - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/delete) or
* [PersonGroup - Delete](https://docs.microsoft.com/rest/api/faceapi/persongroup/delete) is
* called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl).
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl).
* * Higher face image quality means better recognition precision. Please consider high-quality

@@ -420,4 +405,4 @@ * faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.

* an error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), there’s
* no guarantee to detect and add the face successfully.
* Detect](https://docs.microsoft.com/rest/api/faceapi/face/detectwithurl), there’s no guarantee to
* detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions

@@ -432,12 +417,3 @@ * will cause failures.

* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [PersonGroup Person - Add
* Face](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroupperson/addfacefromurl).
* Recommend for near frontal face detection. For scenarios with exceptionally large angle
* (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be
* detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model).
* @param personGroupId Id referencing a particular person group.

@@ -444,0 +420,0 @@ * @param personId Id referencing a particular person.

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc