Socket
Socket
Sign inDemoInstall

@azure/cognitiveservices-face

Package Overview
Dependencies
21
Maintainers
6
Versions
8
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 3.0.0 to 3.1.0

src/faceClient.ts

2

dist/cognitiveservices-face.min.js

@@ -1,1 +0,1 @@

!function(e,a){"object"==typeof exports&&"undefined"!=typeof module?a(exports,require("@azure/ms-rest-js")):"function"==typeof define&&define.amd?define(["exports","@azure/ms-rest-js"],a):a((e.Azure=e.Azure||{},e.Azure.CognitiveservicesFace={}),e.msRest)}(this,function(e,i){"use strict";var t=function(e,a){return(t=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var r in a)a.hasOwnProperty(r)&&(e[r]=a[r])})(e,a)};function a(e,a){function r(){this.constructor=e}t(e,a),e.prototype=null===a?Object.create(a):(r.prototype=a.prototype,new r)}var r=function(){return(r=Object.assign||function(e){for(var a,r=1,t=arguments.length;r<t;r++)for(var s in a=arguments[r])Object.prototype.hasOwnProperty.call(a,s)&&(e[s]=a[s]);return e}).apply(this,arguments)},s=Object.freeze({}),o={serializedName:"Error",type:{name:"Composite",className:"ErrorModel",modelProperties:{code:{serializedName:"code",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},p={serializedName:"APIError",type:{name:"Composite",className:"APIError",modelProperties:{error:{serializedName:"error",type:{name:"Composite",className:"ErrorModel"}}}}},n={serializedName:"FaceRectangle",type:{name:"Composite",className:"FaceRectangle",modelProperties:{width:{required:!0,serializedName:"width",type:{name:"Number"}},height:{required:!0,serializedName:"height",type:{name:"Number"}},left:{required:!0,serializedName:"left",type:{name:"Number"}},top:{required:!0,serializedName:"top",type:{name:"Number"}}}}},d={serializedName:"Coordinate",type:{name:"Composite",className:"Coordinate",modelProperties:{x:{required:!0,serializedName:"x",type:{name:"Number"}},y:{required:!0,serializedName:"y",type:{name:"Number"}}}}},l={serializedName:"FaceLandmarks",type:{name:"Composite",className:"FaceLandmarks",modelProperties:{pupilLeft:{serializedName:"pupilLeft",type:{name:"Composite",className:"Coordinate"}},pupilRight:{serializedName:"pupilRight",type:{name:"Composite",className:"Coordinate"}},noseTip:{serializedName:"noseTip",type:{name:"Composite",className:"Coordinate"}},mouthLeft:{serializedName:"mouthLeft",type:{name:"Composite",className:"Coordinate"}},mouthRight:{serializedName:"mouthRight",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftOuter:{serializedName:"eyebrowLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftInner:{serializedName:"eyebrowLeftInner",type:{name:"Composite",className:"Coordinate"}},eyeLeftOuter:{serializedName:"eyeLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyeLeftTop:{serializedName:"eyeLeftTop",type:{name:"Composite",className:"Coordinate"}},eyeLeftBottom:{serializedName:"eyeLeftBottom",type:{name:"Composite",className:"Coordinate"}},eyeLeftInner:{serializedName:"eyeLeftInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightInner:{serializedName:"eyebrowRightInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightOuter:{serializedName:"eyebrowRightOuter",type:{name:"Composite",className:"Coordinate"}},eyeRightInner:{serializedName:"eyeRightInner",type:{name:"Composite",className:"Coordinate"}},eyeRightTop:{serializedName:"eyeRightTop",type:{name:"Composite",className:"Coordinate"}},eyeRightBottom:{serializedName:"eyeRightBottom",type:{name:"Composite",className:"Coordinate"}},eyeRightOuter:{serializedName:"eyeRightOuter",type:{name:"Composite",className:"Coordinate"}},noseRootLeft:{serializedName:"noseRootLeft",type:{name:"Composite",className:"Coordinate"}},noseRootRight:{serializedName:"noseRootRight",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarTop:{serializedName:"noseLeftAlarTop",type:{name:"Composite",className:"Coordinate"}},noseRightAlarTop:{serializedName:"noseRightAlarTop",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarOutTip:{serializedName:"noseLeftAlarOutTip",type:{name:"Composite",className:"Coordinate"}},noseRightAlarOutTip:{serializedName:"noseRightAlarOutTip",type:{name:"Composite",className:"Coordinate"}},upperLipTop:{serializedName:"upperLipTop",type:{name:"Composite",className:"Coordinate"}},upperLipBottom:{serializedName:"upperLipBottom",type:{name:"Composite",className:"Coordinate"}},underLipTop:{serializedName:"underLipTop",type:{name:"Composite",className:"Coordinate"}},underLipBottom:{serializedName:"underLipBottom",type:{name:"Composite",className:"Coordinate"}}}}},m={serializedName:"FacialHair",type:{name:"Composite",className:"FacialHair",modelProperties:{moustache:{nullable:!1,serializedName:"moustache",type:{name:"Number"}},beard:{nullable:!1,serializedName:"beard",type:{name:"Number"}},sideburns:{nullable:!1,serializedName:"sideburns",type:{name:"Number"}}}}},u={serializedName:"HeadPose",type:{name:"Composite",className:"HeadPose",modelProperties:{roll:{nullable:!1,serializedName:"roll",type:{name:"Number"}},yaw:{nullable:!1,serializedName:"yaw",type:{name:"Number"}},pitch:{nullable:!1,serializedName:"pitch",type:{name:"Number"}}}}},c={serializedName:"Emotion",type:{name:"Composite",className:"Emotion",modelProperties:{anger:{nullable:!1,serializedName:"anger",type:{name:"Number"}},contempt:{nullable:!1,serializedName:"contempt",type:{name:"Number"}},disgust:{nullable:!1,serializedName:"disgust",type:{name:"Number"}},fear:{nullable:!1,serializedName:"fear",type:{name:"Number"}},happiness:{nullable:!1,serializedName:"happiness",type:{name:"Number"}},neutral:{nullable:!1,serializedName:"neutral",type:{name:"Number"}},sadness:{nullable:!1,serializedName:"sadness",type:{name:"Number"}},surprise:{nullable:!1,serializedName:"surprise",type:{name:"Number"}}}}},y={serializedName:"HairColor",type:{name:"Composite",className:"HairColor",modelProperties:{color:{nullable:!1,serializedName:"color",type:{name:"Enum",allowedValues:["unknown","white","gray","blond","brown","red","black","other"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},h={serializedName:"Hair",type:{name:"Composite",className:"Hair",modelProperties:{bald:{nullable:!1,serializedName:"bald",type:{name:"Number"}},invisible:{nullable:!1,serializedName:"invisible",type:{name:"Boolean"}},hairColor:{serializedName:"hairColor",type:{name:"Sequence",element:{type:{name:"Composite",className:"HairColor"}}}}}}},f={serializedName:"Makeup",type:{name:"Composite",className:"Makeup",modelProperties:{eyeMakeup:{nullable:!1,serializedName:"eyeMakeup",type:{name:"Boolean"}},lipMakeup:{nullable:!1,serializedName:"lipMakeup",type:{name:"Boolean"}}}}},N={serializedName:"Occlusion",type:{name:"Composite",className:"Occlusion",modelProperties:{foreheadOccluded:{nullable:!1,serializedName:"foreheadOccluded",type:{name:"Boolean"}},eyeOccluded:{nullable:!1,serializedName:"eyeOccluded",type:{name:"Boolean"}},mouthOccluded:{nullable:!1,serializedName:"mouthOccluded",type:{name:"Boolean"}}}}},g={serializedName:"Accessory",type:{name:"Composite",className:"Accessory",modelProperties:{type:{nullable:!1,serializedName:"type",type:{name:"Enum",allowedValues:["headWear","glasses","mask"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},P={serializedName:"Blur",type:{name:"Composite",className:"Blur",modelProperties:{blurLevel:{nullable:!1,serializedName:"blurLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},z={serializedName:"Exposure",type:{name:"Composite",className:"Exposure",modelProperties:{exposureLevel:{nullable:!1,serializedName:"exposureLevel",type:{name:"Enum",allowedValues:["UnderExposure","GoodExposure","OverExposure"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},I={serializedName:"Noise",type:{name:"Composite",className:"Noise",modelProperties:{noiseLevel:{nullable:!1,serializedName:"noiseLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},q={serializedName:"FaceAttributes",type:{name:"Composite",className:"FaceAttributes",modelProperties:{age:{serializedName:"age",type:{name:"Number"}},gender:{serializedName:"gender",type:{name:"Enum",allowedValues:["male","female"]}},smile:{serializedName:"smile",type:{name:"Number"}},facialHair:{serializedName:"facialHair",type:{name:"Composite",className:"FacialHair"}},glasses:{serializedName:"glasses",type:{name:"Enum",allowedValues:["noGlasses","readingGlasses","sunglasses","swimmingGoggles"]}},headPose:{serializedName:"headPose",type:{name:"Composite",className:"HeadPose"}},emotion:{serializedName:"emotion",type:{name:"Composite",className:"Emotion"}},hair:{serializedName:"hair",type:{name:"Composite",className:"Hair"}},makeup:{serializedName:"makeup",type:{name:"Composite",className:"Makeup"}},occlusion:{serializedName:"occlusion",type:{name:"Composite",className:"Occlusion"}},accessories:{serializedName:"accessories",type:{name:"Sequence",element:{type:{name:"Composite",className:"Accessory"}}}},blur:{serializedName:"blur",type:{name:"Composite",className:"Blur"}},exposure:{serializedName:"exposure",type:{name:"Composite",className:"Exposure"}},noise:{serializedName:"noise",type:{name:"Composite",className:"Noise"}}}}},M={serializedName:"DetectedFace",type:{name:"Composite",className:"DetectedFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}},faceRectangle:{required:!0,serializedName:"faceRectangle",type:{name:"Composite",className:"FaceRectangle"}},faceLandmarks:{serializedName:"faceLandmarks",type:{name:"Composite",className:"FaceLandmarks"}},faceAttributes:{serializedName:"faceAttributes",type:{name:"Composite",className:"FaceAttributes"}}}}},b={serializedName:"FindSimilarRequest",type:{name:"Composite",className:"FindSimilarRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},faceListId:{serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largeFaceListId:{serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},faceIds:{serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:20,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"matchPerson",type:{name:"Enum",allowedValues:["matchPerson","matchFace"]}}}}},F={serializedName:"SimilarFace",type:{name:"Composite",className:"SimilarFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},persistedFaceId:{serializedName:"persistedFaceId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},L={serializedName:"GroupRequest",type:{name:"Composite",className:"GroupRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},C={serializedName:"GroupResult",type:{name:"Composite",className:"GroupResult",modelProperties:{groups:{required:!0,serializedName:"groups",type:{name:"Sequence",element:{type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}},messyGroup:{serializedName:"messyGroup",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},R={serializedName:"IdentifyRequest",type:{name:"Composite",className:"IdentifyRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:10},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:1,constraints:{InclusiveMaximum:5,InclusiveMinimum:1},type:{name:"Number"}},confidenceThreshold:{serializedName:"confidenceThreshold",type:{name:"Number"}}}}},S={serializedName:"IdentifyCandidate",type:{name:"Composite",className:"IdentifyCandidate",modelProperties:{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},G={serializedName:"IdentifyResult",type:{name:"Composite",className:"IdentifyResult",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},candidates:{required:!0,serializedName:"candidates",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyCandidate"}}}}}}},O={serializedName:"VerifyFaceToPersonRequest",type:{name:"Composite",className:"VerifyFaceToPersonRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}}}}},T={serializedName:"VerifyFaceToFaceRequest",type:{name:"Composite",className:"VerifyFaceToFaceRequest",modelProperties:{faceId1:{required:!0,serializedName:"faceId1",type:{name:"Uuid"}},faceId2:{required:!0,serializedName:"faceId2",type:{name:"Uuid"}}}}},E={serializedName:"VerifyResult",type:{name:"Composite",className:"VerifyResult",modelProperties:{isIdentical:{required:!0,serializedName:"isIdentical",type:{name:"Boolean"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},D={serializedName:"PersistedFace",type:{name:"Composite",className:"PersistedFace",modelProperties:{persistedFaceId:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}},userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},A={serializedName:"NameAndUserDataContract",type:{name:"Composite",className:"NameAndUserDataContract",modelProperties:{name:{serializedName:"name",constraints:{MaxLength:128},type:{name:"String"}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},U={serializedName:"MetaDataContract",type:{name:"Composite",className:"MetaDataContract",modelProperties:r({},A.type.modelProperties,{recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}})}},x={serializedName:"FaceList",type:{name:"Composite",className:"FaceList",modelProperties:r({},U.type.modelProperties,{faceListId:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},persistedFaces:{serializedName:"persistedFaces",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}})}},w={serializedName:"PersonGroup",type:{name:"Composite",className:"PersonGroup",modelProperties:r({},U.type.modelProperties,{personGroupId:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},B={serializedName:"Person",type:{name:"Composite",className:"Person",modelProperties:r({},A.type.modelProperties,{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},persistedFaceIds:{serializedName:"persistedFaceIds",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}})}},v={serializedName:"LargeFaceList",type:{name:"Composite",className:"LargeFaceList",modelProperties:r({},U.type.modelProperties,{largeFaceListId:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},H={serializedName:"LargePersonGroup",type:{name:"Composite",className:"LargePersonGroup",modelProperties:r({},U.type.modelProperties,{largePersonGroupId:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},V={serializedName:"UpdateFaceRequest",type:{name:"Composite",className:"UpdateFaceRequest",modelProperties:{userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},k={serializedName:"TrainingStatus",type:{name:"Composite",className:"TrainingStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["nonstarted","running","succeeded","failed"]}},created:{required:!0,serializedName:"createdDateTime",type:{name:"DateTime"}},lastAction:{serializedName:"lastActionDateTime",type:{name:"DateTime"}},lastSuccessfulTraining:{serializedName:"lastSuccessfulTrainingDateTime",type:{name:"DateTime"}},message:{serializedName:"message",type:{name:"String"}}}}},j={serializedName:"ApplySnapshotRequest",type:{name:"Composite",className:"ApplySnapshotRequest",modelProperties:{objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"CreateNew",type:{name:"Enum",allowedValues:["CreateNew"]}}}}},_={serializedName:"Snapshot",type:{name:"Composite",className:"Snapshot",modelProperties:{id:{required:!0,serializedName:"id",type:{name:"Uuid"}},account:{required:!0,serializedName:"account",type:{name:"String"}},type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastUpdateTime:{required:!0,serializedName:"lastUpdateTime",type:{name:"DateTime"}}}}},$={serializedName:"TakeSnapshotRequest",type:{name:"Composite",className:"TakeSnapshotRequest",modelProperties:{type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},Q={serializedName:"UpdateSnapshotRequest",type:{name:"Composite",className:"UpdateSnapshotRequest",modelProperties:{applyScope:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},W={serializedName:"OperationStatus",type:{name:"Composite",className:"OperationStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["notstarted","running","succeeded","failed"]}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastActionTime:{serializedName:"lastActionTime",type:{name:"DateTime"}},resourceLocation:{serializedName:"resourceLocation",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},J={serializedName:"ImageUrl",type:{name:"Composite",className:"ImageUrl",modelProperties:{url:{required:!0,serializedName:"url",type:{name:"String"}}}}},K={serializedName:"snapshot-take-headers",type:{name:"Composite",className:"SnapshotTakeHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},X={serializedName:"snapshot-apply-headers",type:{name:"Composite",className:"SnapshotApplyHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},Y=Object.freeze({ErrorModel:o,APIError:p,FaceRectangle:n,Coordinate:d,FaceLandmarks:l,FacialHair:m,HeadPose:u,Emotion:c,HairColor:y,Hair:h,Makeup:f,Occlusion:N,Accessory:g,Blur:P,Exposure:z,Noise:I,FaceAttributes:q,DetectedFace:M,FindSimilarRequest:b,SimilarFace:F,GroupRequest:L,GroupResult:C,IdentifyRequest:R,IdentifyCandidate:S,IdentifyResult:G,VerifyFaceToPersonRequest:O,VerifyFaceToFaceRequest:T,VerifyResult:E,PersistedFace:D,NameAndUserDataContract:A,MetaDataContract:U,FaceList:x,PersonGroup:w,Person:B,LargeFaceList:v,LargePersonGroup:H,UpdateFaceRequest:V,TrainingStatus:k,ApplySnapshotRequest:j,Snapshot:_,TakeSnapshotRequest:$,UpdateSnapshotRequest:Q,OperationStatus:W,ImageUrl:J,SnapshotTakeHeaders:K,SnapshotApplyHeaders:X}),Z=Object.freeze({Accessory:g,APIError:p,Blur:P,Coordinate:d,DetectedFace:M,Emotion:c,ErrorModel:o,Exposure:z,FaceAttributes:q,FaceLandmarks:l,FaceRectangle:n,FacialHair:m,FindSimilarRequest:b,GroupRequest:L,GroupResult:C,Hair:h,HairColor:y,HeadPose:u,IdentifyCandidate:S,IdentifyRequest:R,IdentifyResult:G,ImageUrl:J,Makeup:f,Noise:I,Occlusion:N,SimilarFace:F,VerifyFaceToFaceRequest:T,VerifyFaceToPersonRequest:O,VerifyResult:E}),ee={parameterPath:["options","applyScope"],mapper:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},collectionFormat:i.QueryCollectionFormat.Csv},ae={parameterPath:"endpoint",mapper:{required:!0,serializedName:"Endpoint",defaultValue:"",type:{name:"String"}},skipEncoding:!0},re={parameterPath:"faceListId",mapper:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},te={parameterPath:"largeFaceListId",mapper:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},se={parameterPath:"largePersonGroupId",mapper:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},oe={parameterPath:"persistedFaceId",mapper:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}}},ie={parameterPath:"personGroupId",mapper:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},pe={parameterPath:"personId",mapper:{required:!0,serializedName:"personId",type:{name:"Uuid"}}},ne={parameterPath:["options","recognitionModel"],mapper:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}},de={parameterPath:["options","returnFaceAttributes"],mapper:{serializedName:"returnFaceAttributes",type:{name:"Sequence",element:{type:{name:"Enum",allowedValues:["age","gender","headPose","smile","facialHair","glasses","emotion","hair","makeup","occlusion","accessories","blur","exposure","noise"]}}}},collectionFormat:i.QueryCollectionFormat.Csv},le={parameterPath:["options","returnFaceId"],mapper:{serializedName:"returnFaceId",defaultValue:!0,type:{name:"Boolean"}}},me={parameterPath:["options","returnFaceLandmarks"],mapper:{serializedName:"returnFaceLandmarks",defaultValue:!1,type:{name:"Boolean"}}},ue={parameterPath:["options","returnRecognitionModel"],mapper:{serializedName:"returnRecognitionModel",defaultValue:!1,type:{name:"Boolean"}}},ce={parameterPath:"snapshotId",mapper:{required:!0,serializedName:"snapshotId",type:{name:"Uuid"}}},ye={parameterPath:["options","start"],mapper:{serializedName:"start",type:{name:"String"}}},he={parameterPath:["options","start"],mapper:{serializedName:"start",constraints:{MaxLength:64},type:{name:"String"}}},fe={parameterPath:["options","targetFace"],mapper:{serializedName:"targetFace",type:{name:"Sequence",element:{type:{name:"Number"}}}},collectionFormat:i.QueryCollectionFormat.Csv},Ne={parameterPath:["options","top"],mapper:{serializedName:"top",constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},ge={parameterPath:["options","top"],mapper:{serializedName:"top",defaultValue:1e3,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},Pe={parameterPath:["options","userData"],mapper:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}},ze=function(){function e(e){this.client=e}return e.prototype.findSimilar=function(e,a,r){return this.client.sendOperationRequest({faceId:e,options:a},qe,r)},e.prototype.group=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},Me,r)},e.prototype.identify=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},be,r)},e.prototype.verifyFaceToFace=function(e,a,r,t){return this.client.sendOperationRequest({faceId1:e,faceId2:a,options:r},Fe,t)},e.prototype.detectWithUrl=function(e,a,r){return this.client.sendOperationRequest({url:e,options:a},Le,r)},e.prototype.verifyFaceToPerson=function(e,a,r,t){return this.client.sendOperationRequest({faceId:e,personId:a,options:r},Ce,t)},e.prototype.detectWithStream=function(e,a,r){return this.client.sendOperationRequest({image:e,options:a},Re,r)},e}(),Ie=new i.Serializer(Z),qe={httpMethod:"POST",path:"findsimilars",urlParameters:[ae],requestBody:{parameterPath:{faceId:"faceId",faceListId:["options","faceListId"],largeFaceListId:["options","largeFaceListId"],faceIds:["options","faceIds"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],mode:["options","mode"]},mapper:r({},b,{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"SimilarFace"}}}}},default:{bodyMapper:p}},serializer:Ie},Me={httpMethod:"POST",path:"group",urlParameters:[ae],requestBody:{parameterPath:{faceIds:"faceIds"},mapper:r({},L,{required:!0})},responses:{200:{bodyMapper:C},default:{bodyMapper:p}},serializer:Ie},be={httpMethod:"POST",path:"identify",urlParameters:[ae],requestBody:{parameterPath:{faceIds:"faceIds",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],confidenceThreshold:["options","confidenceThreshold"]},mapper:r({},R,{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyResult"}}}}},default:{bodyMapper:p}},serializer:Ie},Fe={httpMethod:"POST",path:"verify",urlParameters:[ae],requestBody:{parameterPath:{faceId1:"faceId1",faceId2:"faceId2"},mapper:r({},T,{required:!0})},responses:{200:{bodyMapper:E},default:{bodyMapper:p}},serializer:Ie},Le={httpMethod:"POST",path:"detect",urlParameters:[ae],queryParameters:[le,me,de,ne,ue],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:Ie},Ce={httpMethod:"POST",path:"verify",urlParameters:[ae],requestBody:{parameterPath:{faceId:"faceId",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],personId:"personId"},mapper:r({},O,{required:!0})},responses:{200:{bodyMapper:E},default:{bodyMapper:p}},serializer:Ie},Re={httpMethod:"POST",path:"detect",urlParameters:[ae],queryParameters:[le,me,de,ne,ue],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:Ie},Se=Object.freeze({APIError:p,ErrorModel:o,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,UpdateFaceRequest:V}),Ge=function(){function e(e){this.client=e}return e.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Te,r)},e.prototype.list=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ee,r)},e.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},De,t)},e.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},Ae,t)},e.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},Ue,t)},e.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},xe,s)},e.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},we,s)},e.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},Be,s)},e.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,url:r,options:t},ve,s)},e.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,image:r,options:t},He,s)},e}(),Oe=new i.Serializer(Se),Te={httpMethod:"POST",path:"persongroups/{personGroupId}/persons",urlParameters:[ae,ie],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:Oe},Ee={httpMethod:"GET",path:"persongroups/{personGroupId}/persons",urlParameters:[ae,ie],queryParameters:[ye,Ne],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:Oe},De={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[ae,ie,pe],responses:{200:{},default:{bodyMapper:p}},serializer:Oe},Ae={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[ae,ie,pe],responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:Oe},Ue={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[ae,ie,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Oe},xe={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,ie,pe,oe],responses:{200:{},default:{bodyMapper:p}},serializer:Oe},we={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,ie,pe,oe],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Oe},Be={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,ie,pe,oe],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r({},V,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Oe},ve={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[ae,ie,pe],queryParameters:[Pe,fe],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Oe},He={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[ae,ie,pe],queryParameters:[Pe,fe],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Oe},Ve=Object.freeze({APIError:p,ErrorModel:o,FaceList:x,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,TrainingStatus:k}),ke=function(){function e(e){this.client=e}return e.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},_e,r)},e.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},$e,r)},e.prototype.get=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Qe,r)},e.prototype.update=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},We,r)},e.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Je,r)},e.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Ke,a)},e.prototype.train=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Xe,r)},e}(),je=new i.Serializer(Ve),_e={httpMethod:"PUT",path:"persongroups/{personGroupId}",urlParameters:[ae,ie],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:je},$e={httpMethod:"DELETE",path:"persongroups/{personGroupId}",urlParameters:[ae,ie],responses:{200:{},default:{bodyMapper:p}},serializer:je},Qe={httpMethod:"GET",path:"persongroups/{personGroupId}",urlParameters:[ae,ie],queryParameters:[ue],responses:{200:{bodyMapper:w},default:{bodyMapper:p}},serializer:je},We={httpMethod:"PATCH",path:"persongroups/{personGroupId}",urlParameters:[ae,ie],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:je},Je={httpMethod:"GET",path:"persongroups/{personGroupId}/training",urlParameters:[ae,ie],responses:{200:{bodyMapper:k},default:{bodyMapper:p}},serializer:je},Ke={httpMethod:"GET",path:"persongroups",urlParameters:[ae],queryParameters:[he,ge,ue],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersonGroup"}}}}},default:{bodyMapper:p}},serializer:je},Xe={httpMethod:"POST",path:"persongroups/{personGroupId}/train",urlParameters:[ae,ie],responses:{202:{},default:{bodyMapper:p}},serializer:je},Ye=Object.freeze({APIError:p,ErrorModel:o,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w}),Ze=function(){function e(e){this.client=e}return e.prototype.create=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},aa,r)},e.prototype.get=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},ra,r)},e.prototype.update=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},ta,r)},e.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},sa,r)},e.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},oa,a)},e.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,persistedFaceId:a,options:r},ia,t)},e.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,url:a,options:r},pa,t)},e.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,image:a,options:r},na,t)},e}(),ea=new i.Serializer(Ye),aa={httpMethod:"PUT",path:"facelists/{faceListId}",urlParameters:[ae,re],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ea},ra={httpMethod:"GET",path:"facelists/{faceListId}",urlParameters:[ae,re],queryParameters:[ue],responses:{200:{bodyMapper:x},default:{bodyMapper:p}},serializer:ea},ta={httpMethod:"PATCH",path:"facelists/{faceListId}",urlParameters:[ae,re],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ea},sa={httpMethod:"DELETE",path:"facelists/{faceListId}",urlParameters:[ae,re],responses:{200:{},default:{bodyMapper:p}},serializer:ea},oa={httpMethod:"GET",path:"facelists",urlParameters:[ae],queryParameters:[ue],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"FaceList"}}}}},default:{bodyMapper:p}},serializer:ea},ia={httpMethod:"DELETE",path:"facelists/{faceListId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,re,oe],responses:{200:{},default:{bodyMapper:p}},serializer:ea},pa={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[ae,re],queryParameters:[Pe,fe],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:ea},na={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[ae,re],queryParameters:[Pe,fe],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:ea},da=Object.freeze({APIError:p,ErrorModel:o,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,UpdateFaceRequest:V}),la=function(){function e(e){this.client=e}return e.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},ua,r)},e.prototype.list=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},ca,r)},e.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},ya,t)},e.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},ha,t)},e.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},fa,t)},e.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Na,s)},e.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},ga,s)},e.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Pa,s)},e.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,url:r,options:t},za,s)},e.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,image:r,options:t},Ia,s)},e}(),ma=new i.Serializer(da),ua={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[ae,se],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:ma},ca={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[ae,se],queryParameters:[ye,Ne],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:ma},ya={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[ae,se,pe],responses:{200:{},default:{bodyMapper:p}},serializer:ma},ha={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[ae,se,pe],responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:ma},fa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[ae,se,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ma},Na={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,se,pe,oe],responses:{200:{},default:{bodyMapper:p}},serializer:ma},ga={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,se,pe,oe],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:ma},Pa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,se,pe,oe],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r({},V,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ma},za={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[ae,se,pe],queryParameters:[Pe,fe],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:ma},Ia={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[ae,se,pe],queryParameters:[Pe,fe],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:ma},qa=Object.freeze({APIError:p,ErrorModel:o,FaceList:x,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,TrainingStatus:k}),Ma=function(){function e(e){this.client=e}return e.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Fa,r)},e.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},La,r)},e.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Ca,r)},e.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Ra,r)},e.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Sa,r)},e.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Ga,a)},e.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Oa,r)},e}(),ba=new i.Serializer(qa),Fa={httpMethod:"PUT",path:"largepersongroups/{largePersonGroupId}",urlParameters:[ae,se],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ba},La={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}",urlParameters:[ae,se],responses:{200:{},default:{bodyMapper:p}},serializer:ba},Ca={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}",urlParameters:[ae,se],queryParameters:[ue],responses:{200:{bodyMapper:H},default:{bodyMapper:p}},serializer:ba},Ra={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}",urlParameters:[ae,se],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ba},Sa={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/training",urlParameters:[ae,se],responses:{200:{bodyMapper:k},default:{bodyMapper:p}},serializer:ba},Ga={httpMethod:"GET",path:"largepersongroups",urlParameters:[ae],queryParameters:[he,ge,ue],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargePersonGroup"}}}}},default:{bodyMapper:p}},serializer:ba},Oa={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/train",urlParameters:[ae,se],responses:{202:{},default:{bodyMapper:p}},serializer:ba},Ta=Object.freeze({APIError:p,ErrorModel:o,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,TrainingStatus:k,UpdateFaceRequest:V}),Ea=function(){function e(e){this.client=e}return e.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Aa,r)},e.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Ua,r)},e.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},xa,r)},e.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},wa,r)},e.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Ba,r)},e.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},va,a)},e.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Ha,r)},e.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Va,t)},e.prototype.getFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},ka,t)},e.prototype.updateFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},ja,t)},e.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,url:a,options:r},_a,t)},e.prototype.listFaces=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},$a,r)},e.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,image:a,options:r},Qa,t)},e}(),Da=new i.Serializer(Ta),Aa={httpMethod:"PUT",path:"largefacelists/{largeFaceListId}",urlParameters:[ae,te],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Da},Ua={httpMethod:"GET",path:"largefacelists/{largeFaceListId}",urlParameters:[ae,te],queryParameters:[ue],responses:{200:{bodyMapper:v},default:{bodyMapper:p}},serializer:Da},xa={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}",urlParameters:[ae,te],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Da},wa={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}",urlParameters:[ae,te],responses:{200:{},default:{bodyMapper:p}},serializer:Da},Ba={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/training",urlParameters:[ae,te],responses:{200:{bodyMapper:k},default:{bodyMapper:p}},serializer:Da},va={httpMethod:"GET",path:"largefacelists",urlParameters:[ae],queryParameters:[ue],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargeFaceList"}}}}},default:{bodyMapper:p}},serializer:Da},Ha={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/train",urlParameters:[ae,te],responses:{202:{},default:{bodyMapper:p}},serializer:Da},Va={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,te,oe],responses:{200:{},default:{bodyMapper:p}},serializer:Da},ka={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,te,oe],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Da},ja={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[ae,te,oe],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r({},V,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Da},_a={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[ae,te],queryParameters:[Pe,fe],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Da},$a={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[ae,te],queryParameters:[ye,Ne],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}},default:{bodyMapper:p}},serializer:Da},Qa={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[ae,te],queryParameters:[Pe,fe],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Da},Wa=Object.freeze({APIError:p,ApplySnapshotRequest:j,ErrorModel:o,OperationStatus:W,Snapshot:_,SnapshotApplyHeaders:X,SnapshotTakeHeaders:K,TakeSnapshotRequest:$,UpdateSnapshotRequest:Q}),Ja=function(){function e(e){this.client=e}return e.prototype.take=function(e,a,r,t,s){return this.client.sendOperationRequest({type:e,objectId:a,applyScope:r,options:t},Xa,s)},e.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Ya,a)},e.prototype.get=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},Za,r)},e.prototype.update=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},er,r)},e.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},ar,r)},e.prototype.apply=function(e,a,r,t){return this.client.sendOperationRequest({snapshotId:e,objectId:a,options:r},rr,t)},e.prototype.getOperationStatus=function(e,a,r){return this.client.sendOperationRequest({operationId:e,options:a},tr,r)},e}(),Ka=new i.Serializer(Wa),Xa={httpMethod:"POST",path:"snapshots",urlParameters:[ae],requestBody:{parameterPath:{type:"type",objectId:"objectId",applyScope:"applyScope",userData:["options","userData"]},mapper:r({},$,{required:!0})},responses:{202:{headersMapper:K},default:{bodyMapper:p}},serializer:Ka},Ya={httpMethod:"GET",path:"snapshots",urlParameters:[ae],queryParameters:[{parameterPath:["options","type"],mapper:{serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}}},ee],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Snapshot"}}}}},default:{bodyMapper:p}},serializer:Ka},Za={httpMethod:"GET",path:"snapshots/{snapshotId}",urlParameters:[ae,ce],responses:{200:{bodyMapper:_},default:{bodyMapper:p}},serializer:Ka},er={httpMethod:"PATCH",path:"snapshots/{snapshotId}",urlParameters:[ae,ce],requestBody:{parameterPath:{applyScope:["options","applyScope"],userData:["options","userData"]},mapper:r({},Q,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Ka},ar={httpMethod:"DELETE",path:"snapshots/{snapshotId}",urlParameters:[ae,ce],responses:{200:{},default:{bodyMapper:p}},serializer:Ka},rr={httpMethod:"POST",path:"snapshots/{snapshotId}/apply",urlParameters:[ae,ce],requestBody:{parameterPath:{objectId:"objectId",mode:["options","mode"]},mapper:r({},j,{required:!0})},responses:{202:{headersMapper:X},default:{bodyMapper:p}},serializer:Ka},tr={httpMethod:"GET",path:"operations/{operationId}",urlParameters:[ae,{parameterPath:"operationId",mapper:{required:!0,serializedName:"operationId",type:{name:"Uuid"}}}],responses:{200:{bodyMapper:W},default:{bodyMapper:p}},serializer:Ka},sr=function(o){function e(e,a,r){var t=this;if(null==a)throw new Error("'endpoint' cannot be null.");if(null==e)throw new Error("'credentials' cannot be null.");if(r||(r={}),!r.userAgent){var s=i.getDefaultUserAgentValue();r.userAgent="@azure/cognitiveservices-face/3.0.0 "+s}return(t=o.call(this,e,r)||this).baseUri="{Endpoint}/face/v1.0",t.requestContentType="application/json; charset=utf-8",t.endpoint=a,t.credentials=e,t}return a(e,o),e}(i.ServiceClient),or=function(s){function e(e,a,r){var t=s.call(this,e,a,r)||this;return t.face=new ze(t),t.personGroupPerson=new Ge(t),t.personGroup=new ke(t),t.faceList=new Ze(t),t.largePersonGroupPerson=new la(t),t.largePersonGroup=new Ma(t),t.largeFaceList=new Ea(t),t.snapshot=new Ja(t),t}return a(e,s),e}(sr);e.FaceClient=or,e.FaceClientContext=sr,e.FaceModels=s,e.FaceMappers=Y,e.Face=ze,e.PersonGroupPerson=Ge,e.PersonGroupOperations=ke,e.FaceListOperations=Ze,e.LargePersonGroupPerson=la,e.LargePersonGroupOperations=Ma,e.LargeFaceListOperations=Ea,e.SnapshotOperations=Ja,Object.defineProperty(e,"__esModule",{value:!0})});
!function(e,a){"object"==typeof exports&&"undefined"!=typeof module?a(exports,require("@azure/ms-rest-js")):"function"==typeof define&&define.amd?define(["exports","@azure/ms-rest-js"],a):a((e.Azure=e.Azure||{},e.Azure.CognitiveservicesFace={}),e.msRest)}(this,function(e,o){"use strict";var t=function(e,a){return(t=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var r in a)a.hasOwnProperty(r)&&(e[r]=a[r])})(e,a)};function a(e,a){function r(){this.constructor=e}t(e,a),e.prototype=null===a?Object.create(a):(r.prototype=a.prototype,new r)}var r=function(){return(r=Object.assign||function(e){for(var a,r=1,t=arguments.length;r<t;r++)for(var s in a=arguments[r])Object.prototype.hasOwnProperty.call(a,s)&&(e[s]=a[s]);return e}).apply(this,arguments)},s=Object.freeze({}),i={serializedName:"Error",type:{name:"Composite",className:"ErrorModel",modelProperties:{code:{serializedName:"code",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},p={serializedName:"APIError",type:{name:"Composite",className:"APIError",modelProperties:{error:{serializedName:"error",type:{name:"Composite",className:"ErrorModel"}}}}},n={serializedName:"FaceRectangle",type:{name:"Composite",className:"FaceRectangle",modelProperties:{width:{required:!0,serializedName:"width",type:{name:"Number"}},height:{required:!0,serializedName:"height",type:{name:"Number"}},left:{required:!0,serializedName:"left",type:{name:"Number"}},top:{required:!0,serializedName:"top",type:{name:"Number"}}}}},d={serializedName:"Coordinate",type:{name:"Composite",className:"Coordinate",modelProperties:{x:{required:!0,serializedName:"x",type:{name:"Number"}},y:{required:!0,serializedName:"y",type:{name:"Number"}}}}},l={serializedName:"FaceLandmarks",type:{name:"Composite",className:"FaceLandmarks",modelProperties:{pupilLeft:{serializedName:"pupilLeft",type:{name:"Composite",className:"Coordinate"}},pupilRight:{serializedName:"pupilRight",type:{name:"Composite",className:"Coordinate"}},noseTip:{serializedName:"noseTip",type:{name:"Composite",className:"Coordinate"}},mouthLeft:{serializedName:"mouthLeft",type:{name:"Composite",className:"Coordinate"}},mouthRight:{serializedName:"mouthRight",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftOuter:{serializedName:"eyebrowLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyebrowLeftInner:{serializedName:"eyebrowLeftInner",type:{name:"Composite",className:"Coordinate"}},eyeLeftOuter:{serializedName:"eyeLeftOuter",type:{name:"Composite",className:"Coordinate"}},eyeLeftTop:{serializedName:"eyeLeftTop",type:{name:"Composite",className:"Coordinate"}},eyeLeftBottom:{serializedName:"eyeLeftBottom",type:{name:"Composite",className:"Coordinate"}},eyeLeftInner:{serializedName:"eyeLeftInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightInner:{serializedName:"eyebrowRightInner",type:{name:"Composite",className:"Coordinate"}},eyebrowRightOuter:{serializedName:"eyebrowRightOuter",type:{name:"Composite",className:"Coordinate"}},eyeRightInner:{serializedName:"eyeRightInner",type:{name:"Composite",className:"Coordinate"}},eyeRightTop:{serializedName:"eyeRightTop",type:{name:"Composite",className:"Coordinate"}},eyeRightBottom:{serializedName:"eyeRightBottom",type:{name:"Composite",className:"Coordinate"}},eyeRightOuter:{serializedName:"eyeRightOuter",type:{name:"Composite",className:"Coordinate"}},noseRootLeft:{serializedName:"noseRootLeft",type:{name:"Composite",className:"Coordinate"}},noseRootRight:{serializedName:"noseRootRight",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarTop:{serializedName:"noseLeftAlarTop",type:{name:"Composite",className:"Coordinate"}},noseRightAlarTop:{serializedName:"noseRightAlarTop",type:{name:"Composite",className:"Coordinate"}},noseLeftAlarOutTip:{serializedName:"noseLeftAlarOutTip",type:{name:"Composite",className:"Coordinate"}},noseRightAlarOutTip:{serializedName:"noseRightAlarOutTip",type:{name:"Composite",className:"Coordinate"}},upperLipTop:{serializedName:"upperLipTop",type:{name:"Composite",className:"Coordinate"}},upperLipBottom:{serializedName:"upperLipBottom",type:{name:"Composite",className:"Coordinate"}},underLipTop:{serializedName:"underLipTop",type:{name:"Composite",className:"Coordinate"}},underLipBottom:{serializedName:"underLipBottom",type:{name:"Composite",className:"Coordinate"}}}}},m={serializedName:"FacialHair",type:{name:"Composite",className:"FacialHair",modelProperties:{moustache:{nullable:!1,serializedName:"moustache",type:{name:"Number"}},beard:{nullable:!1,serializedName:"beard",type:{name:"Number"}},sideburns:{nullable:!1,serializedName:"sideburns",type:{name:"Number"}}}}},u={serializedName:"HeadPose",type:{name:"Composite",className:"HeadPose",modelProperties:{roll:{nullable:!1,serializedName:"roll",type:{name:"Number"}},yaw:{nullable:!1,serializedName:"yaw",type:{name:"Number"}},pitch:{nullable:!1,serializedName:"pitch",type:{name:"Number"}}}}},c={serializedName:"Emotion",type:{name:"Composite",className:"Emotion",modelProperties:{anger:{nullable:!1,serializedName:"anger",type:{name:"Number"}},contempt:{nullable:!1,serializedName:"contempt",type:{name:"Number"}},disgust:{nullable:!1,serializedName:"disgust",type:{name:"Number"}},fear:{nullable:!1,serializedName:"fear",type:{name:"Number"}},happiness:{nullable:!1,serializedName:"happiness",type:{name:"Number"}},neutral:{nullable:!1,serializedName:"neutral",type:{name:"Number"}},sadness:{nullable:!1,serializedName:"sadness",type:{name:"Number"}},surprise:{nullable:!1,serializedName:"surprise",type:{name:"Number"}}}}},y={serializedName:"HairColor",type:{name:"Composite",className:"HairColor",modelProperties:{color:{nullable:!1,serializedName:"color",type:{name:"Enum",allowedValues:["unknown","white","gray","blond","brown","red","black","other"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},h={serializedName:"Hair",type:{name:"Composite",className:"Hair",modelProperties:{bald:{nullable:!1,serializedName:"bald",type:{name:"Number"}},invisible:{nullable:!1,serializedName:"invisible",type:{name:"Boolean"}},hairColor:{serializedName:"hairColor",type:{name:"Sequence",element:{type:{name:"Composite",className:"HairColor"}}}}}}},N={serializedName:"Makeup",type:{name:"Composite",className:"Makeup",modelProperties:{eyeMakeup:{nullable:!1,serializedName:"eyeMakeup",type:{name:"Boolean"}},lipMakeup:{nullable:!1,serializedName:"lipMakeup",type:{name:"Boolean"}}}}},f={serializedName:"Occlusion",type:{name:"Composite",className:"Occlusion",modelProperties:{foreheadOccluded:{nullable:!1,serializedName:"foreheadOccluded",type:{name:"Boolean"}},eyeOccluded:{nullable:!1,serializedName:"eyeOccluded",type:{name:"Boolean"}},mouthOccluded:{nullable:!1,serializedName:"mouthOccluded",type:{name:"Boolean"}}}}},g={serializedName:"Accessory",type:{name:"Composite",className:"Accessory",modelProperties:{type:{nullable:!1,serializedName:"type",type:{name:"Enum",allowedValues:["headWear","glasses","mask"]}},confidence:{nullable:!1,serializedName:"confidence",type:{name:"Number"}}}}},P={serializedName:"Blur",type:{name:"Composite",className:"Blur",modelProperties:{blurLevel:{nullable:!1,serializedName:"blurLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},z={serializedName:"Exposure",type:{name:"Composite",className:"Exposure",modelProperties:{exposureLevel:{nullable:!1,serializedName:"exposureLevel",type:{name:"Enum",allowedValues:["UnderExposure","GoodExposure","OverExposure"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},I={serializedName:"Noise",type:{name:"Composite",className:"Noise",modelProperties:{noiseLevel:{nullable:!1,serializedName:"noiseLevel",type:{name:"Enum",allowedValues:["Low","Medium","High"]}},value:{nullable:!1,serializedName:"value",type:{name:"Number"}}}}},q={serializedName:"FaceAttributes",type:{name:"Composite",className:"FaceAttributes",modelProperties:{age:{serializedName:"age",type:{name:"Number"}},gender:{serializedName:"gender",type:{name:"Enum",allowedValues:["male","female"]}},smile:{serializedName:"smile",type:{name:"Number"}},facialHair:{serializedName:"facialHair",type:{name:"Composite",className:"FacialHair"}},glasses:{serializedName:"glasses",type:{name:"Enum",allowedValues:["noGlasses","readingGlasses","sunglasses","swimmingGoggles"]}},headPose:{serializedName:"headPose",type:{name:"Composite",className:"HeadPose"}},emotion:{serializedName:"emotion",type:{name:"Composite",className:"Emotion"}},hair:{serializedName:"hair",type:{name:"Composite",className:"Hair"}},makeup:{serializedName:"makeup",type:{name:"Composite",className:"Makeup"}},occlusion:{serializedName:"occlusion",type:{name:"Composite",className:"Occlusion"}},accessories:{serializedName:"accessories",type:{name:"Sequence",element:{type:{name:"Composite",className:"Accessory"}}}},blur:{serializedName:"blur",type:{name:"Composite",className:"Blur"}},exposure:{serializedName:"exposure",type:{name:"Composite",className:"Exposure"}},noise:{serializedName:"noise",type:{name:"Composite",className:"Noise"}}}}},M={serializedName:"DetectedFace",type:{name:"Composite",className:"DetectedFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}},faceRectangle:{required:!0,serializedName:"faceRectangle",type:{name:"Composite",className:"FaceRectangle"}},faceLandmarks:{serializedName:"faceLandmarks",type:{name:"Composite",className:"FaceLandmarks"}},faceAttributes:{serializedName:"faceAttributes",type:{name:"Composite",className:"FaceAttributes"}}}}},b={serializedName:"FindSimilarRequest",type:{name:"Composite",className:"FindSimilarRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},faceListId:{serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largeFaceListId:{serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},faceIds:{serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:20,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"matchPerson",type:{name:"Enum",allowedValues:["matchPerson","matchFace"]}}}}},F={serializedName:"SimilarFace",type:{name:"Composite",className:"SimilarFace",modelProperties:{faceId:{serializedName:"faceId",type:{name:"Uuid"}},persistedFaceId:{serializedName:"persistedFaceId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},L={serializedName:"GroupRequest",type:{name:"Composite",className:"GroupRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:1e3},type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},C={serializedName:"GroupResult",type:{name:"Composite",className:"GroupResult",modelProperties:{groups:{required:!0,serializedName:"groups",type:{name:"Sequence",element:{type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}},messyGroup:{serializedName:"messyGroup",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}}}},R={serializedName:"IdentifyRequest",type:{name:"Composite",className:"IdentifyRequest",modelProperties:{faceIds:{required:!0,serializedName:"faceIds",constraints:{MaxItems:10},type:{name:"Sequence",element:{type:{name:"Uuid"}}}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},maxNumOfCandidatesReturned:{serializedName:"maxNumOfCandidatesReturned",defaultValue:1,constraints:{InclusiveMaximum:5,InclusiveMinimum:1},type:{name:"Number"}},confidenceThreshold:{serializedName:"confidenceThreshold",type:{name:"Number"}}}}},S={serializedName:"IdentifyCandidate",type:{name:"Composite",className:"IdentifyCandidate",modelProperties:{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},G={serializedName:"IdentifyResult",type:{name:"Composite",className:"IdentifyResult",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},candidates:{required:!0,serializedName:"candidates",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyCandidate"}}}}}}},O={serializedName:"VerifyFaceToPersonRequest",type:{name:"Composite",className:"VerifyFaceToPersonRequest",modelProperties:{faceId:{required:!0,serializedName:"faceId",type:{name:"Uuid"}},personGroupId:{serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},largePersonGroupId:{serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}}}}},T={serializedName:"VerifyFaceToFaceRequest",type:{name:"Composite",className:"VerifyFaceToFaceRequest",modelProperties:{faceId1:{required:!0,serializedName:"faceId1",type:{name:"Uuid"}},faceId2:{required:!0,serializedName:"faceId2",type:{name:"Uuid"}}}}},E={serializedName:"VerifyResult",type:{name:"Composite",className:"VerifyResult",modelProperties:{isIdentical:{required:!0,serializedName:"isIdentical",type:{name:"Boolean"}},confidence:{required:!0,serializedName:"confidence",type:{name:"Number"}}}}},D={serializedName:"PersistedFace",type:{name:"Composite",className:"PersistedFace",modelProperties:{persistedFaceId:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}},userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},A={serializedName:"NameAndUserDataContract",type:{name:"Composite",className:"NameAndUserDataContract",modelProperties:{name:{serializedName:"name",constraints:{MaxLength:128},type:{name:"String"}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},U={serializedName:"MetaDataContract",type:{name:"Composite",className:"MetaDataContract",modelProperties:r({},A.type.modelProperties,{recognitionModel:{nullable:!1,serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}})}},x={serializedName:"FaceList",type:{name:"Composite",className:"FaceList",modelProperties:r({},U.type.modelProperties,{faceListId:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},persistedFaces:{serializedName:"persistedFaces",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}})}},w={serializedName:"PersonGroup",type:{name:"Composite",className:"PersonGroup",modelProperties:r({},U.type.modelProperties,{personGroupId:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},B={serializedName:"Person",type:{name:"Composite",className:"Person",modelProperties:r({},A.type.modelProperties,{personId:{required:!0,serializedName:"personId",type:{name:"Uuid"}},persistedFaceIds:{serializedName:"persistedFaceIds",type:{name:"Sequence",element:{type:{name:"Uuid"}}}}})}},v={serializedName:"LargeFaceList",type:{name:"Composite",className:"LargeFaceList",modelProperties:r({},U.type.modelProperties,{largeFaceListId:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},H={serializedName:"LargePersonGroup",type:{name:"Composite",className:"LargePersonGroup",modelProperties:r({},U.type.modelProperties,{largePersonGroupId:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}})}},V={serializedName:"UpdateFaceRequest",type:{name:"Composite",className:"UpdateFaceRequest",modelProperties:{userData:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}}}},k={serializedName:"TrainingStatus",type:{name:"Composite",className:"TrainingStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["nonstarted","running","succeeded","failed"]}},created:{required:!0,serializedName:"createdDateTime",type:{name:"DateTime"}},lastAction:{serializedName:"lastActionDateTime",type:{name:"DateTime"}},lastSuccessfulTraining:{serializedName:"lastSuccessfulTrainingDateTime",type:{name:"DateTime"}},message:{serializedName:"message",type:{name:"String"}}}}},_={serializedName:"ApplySnapshotRequest",type:{name:"Composite",className:"ApplySnapshotRequest",modelProperties:{objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},mode:{nullable:!1,serializedName:"mode",defaultValue:"CreateNew",type:{name:"Enum",allowedValues:["CreateNew"]}}}}},j={serializedName:"Snapshot",type:{name:"Composite",className:"Snapshot",modelProperties:{id:{required:!0,serializedName:"id",type:{name:"Uuid"}},account:{required:!0,serializedName:"account",type:{name:"String"}},type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastUpdateTime:{required:!0,serializedName:"lastUpdateTime",type:{name:"DateTime"}}}}},$={serializedName:"TakeSnapshotRequest",type:{name:"Composite",className:"TakeSnapshotRequest",modelProperties:{type:{required:!0,serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}},objectId:{required:!0,serializedName:"objectId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}},applyScope:{required:!0,serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},Q={serializedName:"UpdateSnapshotRequest",type:{name:"Composite",className:"UpdateSnapshotRequest",modelProperties:{applyScope:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},userData:{serializedName:"userData",constraints:{MaxLength:16384},type:{name:"String"}}}}},W={serializedName:"OperationStatus",type:{name:"Composite",className:"OperationStatus",modelProperties:{status:{required:!0,serializedName:"status",type:{name:"Enum",allowedValues:["notstarted","running","succeeded","failed"]}},createdTime:{required:!0,serializedName:"createdTime",type:{name:"DateTime"}},lastActionTime:{serializedName:"lastActionTime",type:{name:"DateTime"}},resourceLocation:{serializedName:"resourceLocation",type:{name:"String"}},message:{serializedName:"message",type:{name:"String"}}}}},J={serializedName:"ImageUrl",type:{name:"Composite",className:"ImageUrl",modelProperties:{url:{required:!0,serializedName:"url",type:{name:"String"}}}}},K={serializedName:"snapshot-take-headers",type:{name:"Composite",className:"SnapshotTakeHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},X={serializedName:"snapshot-apply-headers",type:{name:"Composite",className:"SnapshotApplyHeaders",modelProperties:{operationLocation:{serializedName:"operation-location",type:{name:"String"}}}}},Y=Object.freeze({ErrorModel:i,APIError:p,FaceRectangle:n,Coordinate:d,FaceLandmarks:l,FacialHair:m,HeadPose:u,Emotion:c,HairColor:y,Hair:h,Makeup:N,Occlusion:f,Accessory:g,Blur:P,Exposure:z,Noise:I,FaceAttributes:q,DetectedFace:M,FindSimilarRequest:b,SimilarFace:F,GroupRequest:L,GroupResult:C,IdentifyRequest:R,IdentifyCandidate:S,IdentifyResult:G,VerifyFaceToPersonRequest:O,VerifyFaceToFaceRequest:T,VerifyResult:E,PersistedFace:D,NameAndUserDataContract:A,MetaDataContract:U,FaceList:x,PersonGroup:w,Person:B,LargeFaceList:v,LargePersonGroup:H,UpdateFaceRequest:V,TrainingStatus:k,ApplySnapshotRequest:_,Snapshot:j,TakeSnapshotRequest:$,UpdateSnapshotRequest:Q,OperationStatus:W,ImageUrl:J,SnapshotTakeHeaders:K,SnapshotApplyHeaders:X}),Z=Object.freeze({Accessory:g,APIError:p,Blur:P,Coordinate:d,DetectedFace:M,Emotion:c,ErrorModel:i,Exposure:z,FaceAttributes:q,FaceLandmarks:l,FaceRectangle:n,FacialHair:m,FindSimilarRequest:b,GroupRequest:L,GroupResult:C,Hair:h,HairColor:y,HeadPose:u,IdentifyCandidate:S,IdentifyRequest:R,IdentifyResult:G,ImageUrl:J,Makeup:N,Noise:I,Occlusion:f,SimilarFace:F,VerifyFaceToFaceRequest:T,VerifyFaceToPersonRequest:O,VerifyResult:E}),ee={parameterPath:["options","applyScope"],mapper:{serializedName:"applyScope",type:{name:"Sequence",element:{type:{name:"Uuid"}}}},collectionFormat:o.QueryCollectionFormat.Csv},ae={parameterPath:["options","detectionModel"],mapper:{serializedName:"detectionModel",defaultValue:"detection_01",type:{name:"String"}}},re={parameterPath:"endpoint",mapper:{required:!0,serializedName:"Endpoint",defaultValue:"",type:{name:"String"}},skipEncoding:!0},te={parameterPath:"faceListId",mapper:{required:!0,serializedName:"faceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},se={parameterPath:"largeFaceListId",mapper:{required:!0,serializedName:"largeFaceListId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},oe={parameterPath:"largePersonGroupId",mapper:{required:!0,serializedName:"largePersonGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},ie={parameterPath:"persistedFaceId",mapper:{required:!0,serializedName:"persistedFaceId",type:{name:"Uuid"}}},pe={parameterPath:"personGroupId",mapper:{required:!0,serializedName:"personGroupId",constraints:{MaxLength:64,Pattern:/^[a-z0-9-_]+$/},type:{name:"String"}}},ne={parameterPath:"personId",mapper:{required:!0,serializedName:"personId",type:{name:"Uuid"}}},de={parameterPath:["options","recognitionModel"],mapper:{serializedName:"recognitionModel",defaultValue:"recognition_01",type:{name:"String"}}},le={parameterPath:["options","returnFaceAttributes"],mapper:{serializedName:"returnFaceAttributes",type:{name:"Sequence",element:{type:{name:"Enum",allowedValues:["age","gender","headPose","smile","facialHair","glasses","emotion","hair","makeup","occlusion","accessories","blur","exposure","noise"]}}}},collectionFormat:o.QueryCollectionFormat.Csv},me={parameterPath:["options","returnFaceId"],mapper:{serializedName:"returnFaceId",defaultValue:!0,type:{name:"Boolean"}}},ue={parameterPath:["options","returnFaceLandmarks"],mapper:{serializedName:"returnFaceLandmarks",defaultValue:!1,type:{name:"Boolean"}}},ce={parameterPath:["options","returnRecognitionModel"],mapper:{serializedName:"returnRecognitionModel",defaultValue:!1,type:{name:"Boolean"}}},ye={parameterPath:"snapshotId",mapper:{required:!0,serializedName:"snapshotId",type:{name:"Uuid"}}},he={parameterPath:["options","start"],mapper:{serializedName:"start",type:{name:"String"}}},Ne={parameterPath:["options","start"],mapper:{serializedName:"start",constraints:{MaxLength:64},type:{name:"String"}}},fe={parameterPath:["options","targetFace"],mapper:{serializedName:"targetFace",type:{name:"Sequence",element:{type:{name:"Number"}}}},collectionFormat:o.QueryCollectionFormat.Csv},ge={parameterPath:["options","top"],mapper:{serializedName:"top",constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},Pe={parameterPath:["options","top"],mapper:{serializedName:"top",defaultValue:1e3,constraints:{InclusiveMaximum:1e3,InclusiveMinimum:1},type:{name:"Number"}}},ze={parameterPath:["options","userData"],mapper:{serializedName:"userData",constraints:{MaxLength:1024},type:{name:"String"}}},Ie=(qe.prototype.findSimilar=function(e,a,r){return this.client.sendOperationRequest({faceId:e,options:a},be,r)},qe.prototype.group=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},Fe,r)},qe.prototype.identify=function(e,a,r){return this.client.sendOperationRequest({faceIds:e,options:a},Le,r)},qe.prototype.verifyFaceToFace=function(e,a,r,t){return this.client.sendOperationRequest({faceId1:e,faceId2:a,options:r},Ce,t)},qe.prototype.detectWithUrl=function(e,a,r){return this.client.sendOperationRequest({url:e,options:a},Re,r)},qe.prototype.verifyFaceToPerson=function(e,a,r,t){return this.client.sendOperationRequest({faceId:e,personId:a,options:r},Se,t)},qe.prototype.detectWithStream=function(e,a,r){return this.client.sendOperationRequest({image:e,options:a},Ge,r)},qe);function qe(e){this.client=e}var Me=new o.Serializer(Z),be={httpMethod:"POST",path:"findsimilars",urlParameters:[re],requestBody:{parameterPath:{faceId:"faceId",faceListId:["options","faceListId"],largeFaceListId:["options","largeFaceListId"],faceIds:["options","faceIds"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],mode:["options","mode"]},mapper:r({},b,{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"SimilarFace"}}}}},default:{bodyMapper:p}},serializer:Me},Fe={httpMethod:"POST",path:"group",urlParameters:[re],requestBody:{parameterPath:{faceIds:"faceIds"},mapper:r({},L,{required:!0})},responses:{200:{bodyMapper:C},default:{bodyMapper:p}},serializer:Me},Le={httpMethod:"POST",path:"identify",urlParameters:[re],requestBody:{parameterPath:{faceIds:"faceIds",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],maxNumOfCandidatesReturned:["options","maxNumOfCandidatesReturned"],confidenceThreshold:["options","confidenceThreshold"]},mapper:r({},R,{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"IdentifyResult"}}}}},default:{bodyMapper:p}},serializer:Me},Ce={httpMethod:"POST",path:"verify",urlParameters:[re],requestBody:{parameterPath:{faceId1:"faceId1",faceId2:"faceId2"},mapper:r({},T,{required:!0})},responses:{200:{bodyMapper:E},default:{bodyMapper:p}},serializer:Me},Re={httpMethod:"POST",path:"detect",urlParameters:[re],queryParameters:[me,ue,le,de,ce,ae],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:Me},Se={httpMethod:"POST",path:"verify",urlParameters:[re],requestBody:{parameterPath:{faceId:"faceId",personGroupId:["options","personGroupId"],largePersonGroupId:["options","largePersonGroupId"],personId:"personId"},mapper:r({},O,{required:!0})},responses:{200:{bodyMapper:E},default:{bodyMapper:p}},serializer:Me},Ge={httpMethod:"POST",path:"detect",urlParameters:[re],queryParameters:[me,ue,le,de,ce,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"DetectedFace"}}}}},default:{bodyMapper:p}},serializer:Me},Oe=Object.freeze({APIError:p,ErrorModel:i,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,UpdateFaceRequest:V}),Te=(Ee.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ae,r)},Ee.prototype.list=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ue,r)},Ee.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},xe,t)},Ee.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},we,t)},Ee.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({personGroupId:e,personId:a,options:r},Be,t)},Ee.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},ve,s)},Ee.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},He,s)},Ee.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,persistedFaceId:r,options:t},Ve,s)},Ee.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,url:r,options:t},ke,s)},Ee.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({personGroupId:e,personId:a,image:r,options:t},_e,s)},Ee);function Ee(e){this.client=e}var De=new o.Serializer(Oe),Ae={httpMethod:"POST",path:"persongroups/{personGroupId}/persons",urlParameters:[re,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:De},Ue={httpMethod:"GET",path:"persongroups/{personGroupId}/persons",urlParameters:[re,pe],queryParameters:[he,ge],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:De},xe={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[re,pe,ne],responses:{200:{},default:{bodyMapper:p}},serializer:De},we={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[re,pe,ne],responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:De},Be={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}",urlParameters:[re,pe,ne],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:De},ve={httpMethod:"DELETE",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,pe,ne,ie],responses:{200:{},default:{bodyMapper:p}},serializer:De},He={httpMethod:"GET",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,pe,ne,ie],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:De},Ve={httpMethod:"PATCH",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,pe,ne,ie],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r({},V,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:De},ke={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,pe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:De},_e={httpMethod:"POST",path:"persongroups/{personGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,pe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:De},je=Object.freeze({APIError:p,ErrorModel:i,FaceList:x,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,TrainingStatus:k}),$e=(Qe.prototype.create=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Je,r)},Qe.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ke,r)},Qe.prototype.get=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Xe,r)},Qe.prototype.update=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ye,r)},Qe.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},Ze,r)},Qe.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},ea,a)},Qe.prototype.train=function(e,a,r){return this.client.sendOperationRequest({personGroupId:e,options:a},aa,r)},Qe);function Qe(e){this.client=e}var We=new o.Serializer(je),Je={httpMethod:"PUT",path:"persongroups/{personGroupId}",urlParameters:[re,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:We},Ke={httpMethod:"DELETE",path:"persongroups/{personGroupId}",urlParameters:[re,pe],responses:{200:{},default:{bodyMapper:p}},serializer:We},Xe={httpMethod:"GET",path:"persongroups/{personGroupId}",urlParameters:[re,pe],queryParameters:[ce],responses:{200:{bodyMapper:w},default:{bodyMapper:p}},serializer:We},Ye={httpMethod:"PATCH",path:"persongroups/{personGroupId}",urlParameters:[re,pe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:We},Ze={httpMethod:"GET",path:"persongroups/{personGroupId}/training",urlParameters:[re,pe],responses:{200:{bodyMapper:k},default:{bodyMapper:p}},serializer:We},ea={httpMethod:"GET",path:"persongroups",urlParameters:[re],queryParameters:[Ne,Pe,ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersonGroup"}}}}},default:{bodyMapper:p}},serializer:We},aa={httpMethod:"POST",path:"persongroups/{personGroupId}/train",urlParameters:[re,pe],responses:{202:{},default:{bodyMapper:p}},serializer:We},ra=Object.freeze({APIError:p,ErrorModel:i,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w}),ta=(sa.prototype.create=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},ia,r)},sa.prototype.get=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},pa,r)},sa.prototype.update=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},na,r)},sa.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({faceListId:e,options:a},da,r)},sa.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},la,a)},sa.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,persistedFaceId:a,options:r},ma,t)},sa.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,url:a,options:r},ua,t)},sa.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({faceListId:e,image:a,options:r},ca,t)},sa);function sa(e){this.client=e}var oa=new o.Serializer(ra),ia={httpMethod:"PUT",path:"facelists/{faceListId}",urlParameters:[re,te],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:oa},pa={httpMethod:"GET",path:"facelists/{faceListId}",urlParameters:[re,te],queryParameters:[ce],responses:{200:{bodyMapper:x},default:{bodyMapper:p}},serializer:oa},na={httpMethod:"PATCH",path:"facelists/{faceListId}",urlParameters:[re,te],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:oa},da={httpMethod:"DELETE",path:"facelists/{faceListId}",urlParameters:[re,te],responses:{200:{},default:{bodyMapper:p}},serializer:oa},la={httpMethod:"GET",path:"facelists",urlParameters:[re],queryParameters:[ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"FaceList"}}}}},default:{bodyMapper:p}},serializer:oa},ma={httpMethod:"DELETE",path:"facelists/{faceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,te,ie],responses:{200:{},default:{bodyMapper:p}},serializer:oa},ua={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[re,te],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:oa},ca={httpMethod:"POST",path:"facelists/{faceListId}/persistedfaces",urlParameters:[re,te],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:oa},ya=Object.freeze({APIError:p,ErrorModel:i,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,UpdateFaceRequest:V}),ha=(Na.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},ga,r)},Na.prototype.list=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Pa,r)},Na.prototype.deleteMethod=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},za,t)},Na.prototype.get=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},Ia,t)},Na.prototype.update=function(e,a,r,t){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,options:r},qa,t)},Na.prototype.deleteFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Ma,s)},Na.prototype.getFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},ba,s)},Na.prototype.updateFace=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,persistedFaceId:r,options:t},Fa,s)},Na.prototype.addFaceFromUrl=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,url:r,options:t},La,s)},Na.prototype.addFaceFromStream=function(e,a,r,t,s){return this.client.sendOperationRequest({largePersonGroupId:e,personId:a,image:r,options:t},Ca,s)},Na);function Na(e){this.client=e}var fa=new o.Serializer(ya),ga={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[re,oe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:fa},Pa={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons",urlParameters:[re,oe],queryParameters:[he,ge],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Person"}}}}},default:{bodyMapper:p}},serializer:fa},za={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[re,oe,ne],responses:{200:{},default:{bodyMapper:p}},serializer:fa},Ia={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[re,oe,ne],responses:{200:{bodyMapper:B},default:{bodyMapper:p}},serializer:fa},qa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}",urlParameters:[re,oe,ne],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:fa},Ma={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,oe,ne,ie],responses:{200:{},default:{bodyMapper:p}},serializer:fa},ba={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,oe,ne,ie],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:fa},Fa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}",urlParameters:[re,oe,ne,ie],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r({},V,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:fa},La={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,oe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:fa},Ca={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces",urlParameters:[re,oe,ne],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:fa},Ra=Object.freeze({APIError:p,ErrorModel:i,FaceList:x,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,TrainingStatus:k}),Sa=(Ga.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Ta,r)},Ga.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Ea,r)},Ga.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Da,r)},Ga.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Aa,r)},Ga.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},Ua,r)},Ga.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},xa,a)},Ga.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largePersonGroupId:e,options:a},wa,r)},Ga);function Ga(e){this.client=e}var Oa=new o.Serializer(Ra),Ta={httpMethod:"PUT",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Oa},Ea={httpMethod:"DELETE",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],responses:{200:{},default:{bodyMapper:p}},serializer:Oa},Da={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],queryParameters:[ce],responses:{200:{bodyMapper:H},default:{bodyMapper:p}},serializer:Oa},Aa={httpMethod:"PATCH",path:"largepersongroups/{largePersonGroupId}",urlParameters:[re,oe],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Oa},Ua={httpMethod:"GET",path:"largepersongroups/{largePersonGroupId}/training",urlParameters:[re,oe],responses:{200:{bodyMapper:k},default:{bodyMapper:p}},serializer:Oa},xa={httpMethod:"GET",path:"largepersongroups",urlParameters:[re],queryParameters:[Ne,Pe,ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargePersonGroup"}}}}},default:{bodyMapper:p}},serializer:Oa},wa={httpMethod:"POST",path:"largepersongroups/{largePersonGroupId}/train",urlParameters:[re,oe],responses:{202:{},default:{bodyMapper:p}},serializer:Oa},Ba=Object.freeze({APIError:p,ErrorModel:i,FaceList:x,ImageUrl:J,LargeFaceList:v,LargePersonGroup:H,MetaDataContract:U,NameAndUserDataContract:A,PersistedFace:D,Person:B,PersonGroup:w,TrainingStatus:k,UpdateFaceRequest:V}),va=(Ha.prototype.create=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ka,r)},Ha.prototype.get=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},_a,r)},Ha.prototype.update=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},ja,r)},Ha.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},$a,r)},Ha.prototype.getTrainingStatus=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Qa,r)},Ha.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},Wa,a)},Ha.prototype.train=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},Ja,r)},Ha.prototype.deleteFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Ka,t)},Ha.prototype.getFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Xa,t)},Ha.prototype.updateFace=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,persistedFaceId:a,options:r},Ya,t)},Ha.prototype.addFaceFromUrl=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,url:a,options:r},Za,t)},Ha.prototype.listFaces=function(e,a,r){return this.client.sendOperationRequest({largeFaceListId:e,options:a},er,r)},Ha.prototype.addFaceFromStream=function(e,a,r,t){return this.client.sendOperationRequest({largeFaceListId:e,image:a,options:r},ar,t)},Ha);function Ha(e){this.client=e}var Va=new o.Serializer(Ba),ka={httpMethod:"PUT",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"],recognitionModel:["options","recognitionModel"]},mapper:r({},U,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Va},_a={httpMethod:"GET",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],queryParameters:[ce],responses:{200:{bodyMapper:v},default:{bodyMapper:p}},serializer:Va},ja={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],requestBody:{parameterPath:{name:["options","name"],userData:["options","userData"]},mapper:r({},A,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Va},$a={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}",urlParameters:[re,se],responses:{200:{},default:{bodyMapper:p}},serializer:Va},Qa={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/training",urlParameters:[re,se],responses:{200:{bodyMapper:k},default:{bodyMapper:p}},serializer:Va},Wa={httpMethod:"GET",path:"largefacelists",urlParameters:[re],queryParameters:[ce],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"LargeFaceList"}}}}},default:{bodyMapper:p}},serializer:Va},Ja={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/train",urlParameters:[re,se],responses:{202:{},default:{bodyMapper:p}},serializer:Va},Ka={httpMethod:"DELETE",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,se,ie],responses:{200:{},default:{bodyMapper:p}},serializer:Va},Xa={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,se,ie],responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Va},Ya={httpMethod:"PATCH",path:"largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}",urlParameters:[re,se,ie],requestBody:{parameterPath:{userData:["options","userData"]},mapper:r({},V,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:Va},Za={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[re,se],queryParameters:[ze,fe,ae],requestBody:{parameterPath:{url:"url"},mapper:r({},J,{required:!0})},responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Va},er={httpMethod:"GET",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[re,se],queryParameters:[he,ge],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"PersistedFace"}}}}},default:{bodyMapper:p}},serializer:Va},ar={httpMethod:"POST",path:"largefacelists/{largeFaceListId}/persistedfaces",urlParameters:[re,se],queryParameters:[ze,fe,ae],requestBody:{parameterPath:"image",mapper:{required:!0,serializedName:"Image",type:{name:"Stream"}}},contentType:"application/octet-stream",responses:{200:{bodyMapper:D},default:{bodyMapper:p}},serializer:Va},rr=Object.freeze({APIError:p,ApplySnapshotRequest:_,ErrorModel:i,OperationStatus:W,Snapshot:j,SnapshotApplyHeaders:X,SnapshotTakeHeaders:K,TakeSnapshotRequest:$,UpdateSnapshotRequest:Q}),tr=(sr.prototype.take=function(e,a,r,t,s){return this.client.sendOperationRequest({type:e,objectId:a,applyScope:r,options:t},pr,s)},sr.prototype.list=function(e,a){return this.client.sendOperationRequest({options:e},nr,a)},sr.prototype.get=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},dr,r)},sr.prototype.update=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},lr,r)},sr.prototype.deleteMethod=function(e,a,r){return this.client.sendOperationRequest({snapshotId:e,options:a},mr,r)},sr.prototype.apply=function(e,a,r,t){return this.client.sendOperationRequest({snapshotId:e,objectId:a,options:r},ur,t)},sr.prototype.getOperationStatus=function(e,a,r){return this.client.sendOperationRequest({operationId:e,options:a},cr,r)},sr);function sr(e){this.client=e}var or,ir=new o.Serializer(rr),pr={httpMethod:"POST",path:"snapshots",urlParameters:[re],requestBody:{parameterPath:{type:"type",objectId:"objectId",applyScope:"applyScope",userData:["options","userData"]},mapper:r({},$,{required:!0})},responses:{202:{headersMapper:K},default:{bodyMapper:p}},serializer:ir},nr={httpMethod:"GET",path:"snapshots",urlParameters:[re],queryParameters:[{parameterPath:["options","type"],mapper:{serializedName:"type",type:{name:"Enum",allowedValues:["FaceList","LargeFaceList","LargePersonGroup","PersonGroup"]}}},ee],responses:{200:{bodyMapper:{serializedName:"parsedResponse",type:{name:"Sequence",element:{type:{name:"Composite",className:"Snapshot"}}}}},default:{bodyMapper:p}},serializer:ir},dr={httpMethod:"GET",path:"snapshots/{snapshotId}",urlParameters:[re,ye],responses:{200:{bodyMapper:j},default:{bodyMapper:p}},serializer:ir},lr={httpMethod:"PATCH",path:"snapshots/{snapshotId}",urlParameters:[re,ye],requestBody:{parameterPath:{applyScope:["options","applyScope"],userData:["options","userData"]},mapper:r({},Q,{required:!0})},responses:{200:{},default:{bodyMapper:p}},serializer:ir},mr={httpMethod:"DELETE",path:"snapshots/{snapshotId}",urlParameters:[re,ye],responses:{200:{},default:{bodyMapper:p}},serializer:ir},ur={httpMethod:"POST",path:"snapshots/{snapshotId}/apply",urlParameters:[re,ye],requestBody:{parameterPath:{objectId:"objectId",mode:["options","mode"]},mapper:r({},_,{required:!0})},responses:{202:{headersMapper:X},default:{bodyMapper:p}},serializer:ir},cr={httpMethod:"GET",path:"operations/{operationId}",urlParameters:[re,{parameterPath:"operationId",mapper:{required:!0,serializedName:"operationId",type:{name:"Uuid"}}}],responses:{200:{bodyMapper:W},default:{bodyMapper:p}},serializer:ir},yr=(a(hr,or=o.ServiceClient),hr);function hr(e,a,r){var t=this;if(null==a)throw new Error("'endpoint' cannot be null.");if(null==e)throw new Error("'credentials' cannot be null.");if(!(r=r||{}).userAgent){var s=o.getDefaultUserAgentValue();r.userAgent="@azure/cognitiveservices-face/3.1.0 "+s}return(t=or.call(this,e,r)||this).baseUri="{Endpoint}/face/v1.0",t.requestContentType="application/json; charset=utf-8",t.endpoint=a,t.credentials=e,t}var Nr,fr=(a(gr,Nr=yr),gr);function gr(e,a,r){var t=Nr.call(this,e,a,r)||this;return t.face=new Ie(t),t.personGroupPerson=new Te(t),t.personGroup=new $e(t),t.faceList=new ta(t),t.largePersonGroupPerson=new ha(t),t.largePersonGroup=new Sa(t),t.largeFaceList=new va(t),t.snapshot=new tr(t),t}e.FaceClient=fr,e.FaceClientContext=yr,e.FaceModels=s,e.FaceMappers=Y,e.Face=Ie,e.PersonGroupPerson=Te,e.PersonGroupOperations=$e,e.FaceListOperations=ta,e.LargePersonGroupPerson=ha,e.LargePersonGroupOperations=Sa,e.LargeFaceListOperations=va,e.SnapshotOperations=tr,Object.defineProperty(e,"__esModule",{value:!0})});

@@ -13,3 +13,3 @@ /*

var packageName = "@azure/cognitiveservices-face";
var packageVersion = "3.0.0";
var packageVersion = "3.1.0";
var FaceClientContext = /** @class */ (function (_super) {

@@ -16,0 +16,0 @@ tslib_1.__extends(FaceClientContext, _super);

import * as msRest from "@azure/ms-rest-js";
export declare const applyScope: msRest.OperationQueryParameter;
export declare const detectionModel: msRest.OperationQueryParameter;
export declare const endpoint: msRest.OperationURLParameter;

@@ -4,0 +5,0 @@ export declare const faceListId: msRest.OperationURLParameter;

@@ -29,2 +29,15 @@ /*

};
export var detectionModel = {
parameterPath: [
"options",
"detectionModel"
],
mapper: {
serializedName: "detectionModel",
defaultValue: 'detection_01',
type: {
name: "String"
}
}
};
export var endpoint = {

@@ -134,3 +147,2 @@ parameterPath: "endpoint",

mapper: {
nullable: false,
serializedName: "recognitionModel",

@@ -137,0 +149,0 @@ defaultValue: 'recognition_01',

@@ -156,26 +156,38 @@ import * as msRest from "@azure/ms-rest-js";

* and attributes.<br />
* * Optional parameters including faceId, landmarks, and attributes. Attributes include age,
* gender, headPose, smile, facialHair, glasses, emotion, hair, makeup, occlusion, accessories,
* blur, exposure and noise.
* * The extracted face feature, instead of the actual image, will be stored on server. The faceId
* * No image will be stored. Only the extracted face feature will be stored on server. The faceId
* is an identifier of the face feature and will be used in [Face -
* Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239), [Face -
* Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a), and [Face
* - Find Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237). It
* will expire 24 hours after the detection call.
* * Higher face image quality means better detection and recognition precision. Please consider
* high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or
* bigger.
* - Find Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
* The stored face feature(s) will expire and be deleted 24 hours after the original detection
* call.
* * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender,
* headPose, smile, facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur,
* exposure and noise. Some of the results returned for specific attributes may not be highly
* accurate.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * Faces are detectable when its size is 36x36 to 4096x4096 pixels. If need to detect very small
* but clear faces, please try to enlarge the input image.
* * Up to 64 faces can be returned for an image. Faces are ranked by face rectangle size from
* * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from
* large to small.
* * Face detector prefer frontal and near-frontal faces. There are cases that faces may not be
* detected, e.g. exceptionally large face angles (head-pose) or being occluded, or wrong image
* orientation.
* * Attributes (age, gender, headPose, smile, facialHair, glasses, emotion, hair, makeup,
* occlusion, accessories, blur, exposure and noise) may not be perfectly accurate. HeadPose's
* pitch value is a reserved field and will always return 0.
* * For optimal results when querying [Face -
* Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239), [Face -
* Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a), and [Face
* - Find Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
* ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of
* 200x200 pixels (100 pixels between eyes).
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*
* * Different 'recognitionModel' values are provided. If follow-up operations like Verify,

@@ -188,2 +200,9 @@ * Identify, Find Similar are needed, please specify the recognition model with 'recognitionModel'

* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'recognition_01': | The default recognition model for [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236). All those
* faceIds created before 2019 March are bonded with this recognition model. |
* | 'recognition_02': | Recognition model released in 2019 March. 'recognition_02' is recommended
* since its overall accuracy is improved compared with 'recognition_01'. |
* @param url Publicly reachable URL of an image

@@ -230,4 +249,54 @@ * @param [options] The optional parameters

/**
* Detect human faces in an image and returns face locations, and optionally with faceIds,
* landmarks, and attributes.
* Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks,
* and attributes.<br />
* * No image will be stored. Only the extracted face feature will be stored on server. The faceId
* is an identifier of the face feature and will be used in [Face -
* Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239), [Face -
* Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a), and [Face
* - Find Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
* The stored face feature(s) will expire and be deleted 24 hours after the original detection
* call.
* * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender,
* headPose, smile, facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur,
* exposure and noise. Some of the results returned for specific attributes may not be highly
* accurate.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from
* large to small.
* * For optimal results when querying [Face -
* Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239), [Face -
* Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a), and [Face
* - Find Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
* ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of
* 200x200 pixels (100 pixels between eyes).
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*
* * Different 'recognitionModel' values are provided. If follow-up operations like Verify,
* Identify, Find Similar are needed, please specify the recognition model with 'recognitionModel'
* parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed,
* please explicitly specify the model you need in this parameter. Once specified, the detected
* faceIds will be associated with the specified recognition model. More details, please refer to
* [How to specify a recognition
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'recognition_01': | The default recognition model for [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236). All those
* faceIds created before 2019 March are bonded with this recognition model. |
* | 'recognition_02': | Recognition model released in 2019 March. 'recognition_02' is recommended
* since its overall accuracy is improved compared with 'recognition_01'. |
* @param image An image stream.

@@ -234,0 +303,0 @@ * @param [options] The optional parameters

@@ -230,3 +230,4 @@ /*

Parameters.recognitionModel,
Parameters.returnRecognitionModel
Parameters.returnRecognitionModel,
Parameters.detectionModel
],

@@ -302,3 +303,4 @@ requestBody: {

Parameters.recognitionModel,
Parameters.returnRecognitionModel
Parameters.returnRecognitionModel,
Parameters.detectionModel
],

@@ -305,0 +307,0 @@ requestBody: {

@@ -19,4 +19,5 @@ import * as msRest from "@azure/ms-rest-js";

* Face](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395250) to import the
* faces. Faces are stored on server until [FaceList -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524f) is called.
* faces. No image will be stored. Only the extracted face features are stored on server until
* [FaceList - Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524f)
* is called.
* <br /> Find Similar is used for scenario like finding celebrity-like faces, similar face

@@ -31,8 +32,14 @@ * filtering, or as a light way face identification. But if the actual use is to identify person,

* [LargeFaceList](/docs/services/563879b61984550e40cbbe8d/operations/5a157b68d2de3616c086f2cc)
* when the face number is large. It can support up to 1,000,000 faces. 'recognitionModel' should
* be specified to associate with this face list. The default value for 'recognitionModel' is
* 'recognition_01', if the latest model needed, please explicitly specify the model you need in
* this parameter. New faces that are added to an existing face list will use the recognition model
* that's already associated with the collection. Existing face features in a face list can't be
* updated to features extracted by another version of recognition model.
* when the face number is large. It can support up to 1,000,000 faces.
* <br />'recognitionModel' should be specified to associate with this face list. The default value
* for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly
* specify the model you need in this parameter. New faces that are added to an existing face list
* will use the recognition model that's already associated with the collection. Existing face
* features in a face list can't be updated to features extracted by another version of recognition
* model.
* * 'recognition_01': The default recognition model for [FaceList-
* Create](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524b). All those
* face lists created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March. 'recognition_02' is recommended
* since its overall accuracy is improved compared with 'recognition_01'.
* @param faceListId Id referencing a particular face list.

@@ -91,4 +98,3 @@ * @param [options] The optional parameters

/**
* Delete an existing face list according to faceListId. Persisted face images in the face list
* will also be deleted.
* Delete a specified face list.
* @param faceListId Id referencing a particular face list.

@@ -128,4 +134,5 @@ * @param [options] The optional parameters

/**
* Delete an existing face from a face list (given by a persistedFaceId and a faceListId).
* Persisted image related to the face will also be deleted.
* Delete a face from a face list by specified faceListId and persistedFaceId.
* <br /> Adding/deleting faces to/from a same face list are processed sequentially and to/from
* different face lists are in parallel.
* @param faceListId Id referencing a particular face list.

@@ -151,4 +158,38 @@ * @param persistedFaceId Id referencing a particular persistedFaceId of an existing face.

/**
* Add a face to a face list. The input face is specified as an image with a targetFace rectangle.
* It returns a persistedFaceId representing the added face, and persistedFaceId will not expire.
* Add a face to a specified face list, up to 1,000 faces.
* <br /> To deal with an image contains multiple faces, input face can be specified as an image
* with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image
* will be stored. Only the extracted face feature will be stored on server until [FaceList -
* Delete Face](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395251) or
* [FaceList - Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524f)
* is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better detection and recognition precision. Please consider
* high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or
* bigger.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an
* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same face list are processed sequentially and to/from
* different face lists are in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [FaceList - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395250). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param faceListId Id referencing a particular face list.

@@ -174,4 +215,38 @@ * @param url Publicly reachable URL of an image

/**
* Add a face to a face list. The input face is specified as an image with a targetFace rectangle.
* It returns a persistedFaceId representing the added face, and persistedFaceId will not expire.
* Add a face to a specified face list, up to 1,000 faces.
* <br /> To deal with an image contains multiple faces, input face can be specified as an image
* with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image
* will be stored. Only the extracted face feature will be stored on server until [FaceList -
* Delete Face](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395251) or
* [FaceList - Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524f)
* is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better detection and recognition precision. Please consider
* high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or
* bigger.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an
* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same face list are processed sequentially and to/from
* different face lists are in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [FaceList - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395250). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param faceListId Id referencing a particular face list.

@@ -178,0 +253,0 @@ * @param image An image stream.

@@ -228,3 +228,4 @@ /*

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -256,3 +257,4 @@ requestBody: {

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -259,0 +261,0 @@ requestBody: {

@@ -21,5 +21,5 @@ import * as msRest from "@azure/ms-rest-js";

* Train](/docs/services/563879b61984550e40cbbe8d/operations/5a158422d2de3616c086f2d1) to make it
* ready for [Face -
* FindSimilar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237). Faces
* are stored on server until [LargeFaceList -
* ready for [Face - Find
* Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237). No image
* will be stored. Only the extracted face features are stored on server until [LargeFaceList -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/5a1580d5d2de3616c086f2cd) is called.

@@ -33,8 +33,4 @@ * <br /> Find Similar is used for scenario like finding celebrity-like faces, similar face

* Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239).
* <br />
* * Free-tier subscription quota: 64 large face lists.
* * S0-tier subscription quota: 1,000,000 large face lists.
* <br />
* 'recognitionModel' should be specified to associate with this large face list. The default value
* for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly
* <br/>'recognitionModel' should be specified to associate with this large face list. The default
* value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly
* specify the model you need in this parameter. New faces that are added to an existing large face

@@ -44,2 +40,11 @@ * list will use the recognition model that's already associated with the collection. Existing face

* recognition model.
* * 'recognition_01': The default recognition model for [LargeFaceList-
* Create](/docs/services/563879b61984550e40cbbe8d/operations/5a157b68d2de3616c086f2cc). All those
* large face lists created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March. 'recognition_02' is recommended
* since its overall accuracy is improved compared with 'recognition_01'.
*
* Large face list quota:
* * Free-tier subscription quota: 64 large face lists.
* * S0-tier subscription quota: 1,000,000 large face lists.
* @param largeFaceListId Id referencing a particular large face list.

@@ -98,4 +103,3 @@ * @param [options] The optional parameters

/**
* Delete an existing large face list according to faceListId. Persisted face images in the large
* face list will also be deleted.
* Delete a specified large face list.
* @param largeFaceListId Id referencing a particular large face list.

@@ -184,4 +188,5 @@ * @param [options] The optional parameters

/**
* Delete an existing face from a large face list (given by a persistedFaceId and a
* largeFaceListId). Persisted image related to the face will also be deleted.
* Delete a face from a large face list by specified largeFaceListId and persistedFaceId.
* <br /> Adding/deleting faces to/from a same large face list are processed sequentially and
* to/from different large face lists are in parallel.
* @param largeFaceListId Id referencing a particular large face list.

@@ -250,5 +255,41 @@ * @param persistedFaceId Id referencing a particular persistedFaceId of an existing face.

/**
* Add a face to a large face list. The input face is specified as an image with a targetFace
* rectangle. It returns a persistedFaceId representing the added face, and persistedFaceId will
* not expire.
* Add a face to a specified large face list, up to 1,000,000 faces.
* <br /> To deal with an image contains multiple faces, input face can be specified as an image
* with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image
* will be stored. Only the extracted face feature will be stored on server until [LargeFaceList
* Face - Delete](/docs/services/563879b61984550e40cbbe8d/operations/5a158c8ad2de3616c086f2d4) or
* [LargeFaceList -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/5a1580d5d2de3616c086f2cd) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better recognition precision. Please consider high-quality
* faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an
* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same face list are processed sequentially and to/from
* different face lists are in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargeFaceList - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/5a158c10d2de3616c086f2d3). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*
* Quota:
* * Free-tier subscription quota: 1,000 faces per large face list.
* * S0-tier subscription quota: 1,000,000 faces per large face list.
* @param largeFaceListId Id referencing a particular large face list.

@@ -293,5 +334,41 @@ * @param url Publicly reachable URL of an image

/**
* Add a face to a large face list. The input face is specified as an image with a targetFace
* rectangle. It returns a persistedFaceId representing the added face, and persistedFaceId will
* not expire.
* Add a face to a specified large face list, up to 1,000,000 faces.
* <br /> To deal with an image contains multiple faces, input face can be specified as an image
* with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image
* will be stored. Only the extracted face feature will be stored on server until [LargeFaceList
* Face - Delete](/docs/services/563879b61984550e40cbbe8d/operations/5a158c8ad2de3616c086f2d4) or
* [LargeFaceList -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/5a1580d5d2de3616c086f2cd) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better recognition precision. Please consider high-quality
* faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an
* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same face list are processed sequentially and to/from
* different face lists are in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargeFaceList - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/5a158c10d2de3616c086f2d3). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
*
* Quota:
* * Free-tier subscription quota: 1,000 faces per large face list.
* * S0-tier subscription quota: 1,000,000 faces per large face list.
* @param largeFaceListId Id referencing a particular large face list.

@@ -298,0 +375,0 @@ * @param image An image stream.

@@ -335,3 +335,4 @@ /*

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -395,3 +396,4 @@ requestBody: {

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -398,0 +400,0 @@ requestBody: {

@@ -15,4 +15,5 @@ import * as msRest from "@azure/ms-rest-js";

* userData and recognitionModel.
* <br /> A large person group is the container of the uploaded person data, including face images
* and face recognition feature, and up to 1,000,000 people.
* <br /> A large person group is the container of the uploaded person data, including face
* recognition feature, and up to 1,000,000
* people.
* <br /> After creation, use [LargePersonGroup Person -

@@ -24,16 +25,22 @@ * Create](/docs/services/563879b61984550e40cbbe8d/operations/599adcba3a7b9412a4d53f40) to add

* Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239).
* <br /> The person face, image, and userData will be stored on server until [LargePersonGroup
* Person - Delete](/docs/services/563879b61984550e40cbbe8d/operations/599ade5c6ac60f11b48b5aa2) or
* <br /> No image will be stored. Only the person's extracted face features and userData will be
* stored on server until [LargePersonGroup Person -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599ade5c6ac60f11b48b5aa2) or
* [LargePersonGroup -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599adc216ac60f11b48b5a9f) is called.
* <br />
* <br/>'recognitionModel' should be specified to associate with this large person group. The
* default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please
* explicitly specify the model you need in this parameter. New faces that are added to an existing
* large person group will use the recognition model that's already associated with the collection.
* Existing face features in a large person group can't be updated to features extracted by another
* version of recognition model.
* * 'recognition_01': The default recognition model for [LargePersonGroup -
* Create](/docs/services/563879b61984550e40cbbe8d/operations/599acdee6ac60f11b48b5a9d). All those
* large person groups created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March. 'recognition_02' is recommended
* since its overall accuracy is improved compared with 'recognition_01'.
*
* Large person group quota:
* * Free-tier subscription quota: 1,000 large person groups.
* * S0-tier subscription quota: 1,000,000 large person groups.
* <br />
* 'recognitionModel' should be specified to associate with this large person group. The default
* value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly
* specify the model you need in this parameter. New faces that are added to an existing large
* person group will use the recognition model that's already associated with the collection.
* Existing face features in a large person group can't be updated to features extracted by another
* version of recognition model.
* @param largePersonGroupId Id referencing a particular large person group.

@@ -40,0 +47,0 @@ * @param [options] The optional parameters

@@ -50,4 +50,4 @@ import * as msRest from "@azure/ms-rest-js";

/**
* Delete an existing person from a large person group. All stored person data, and face features
* in the person entry will be deleted.
* Delete an existing person from a large person group. The persistedFaceId, userData, person name
* and face feature in the person entry will all be deleted.
* @param largePersonGroupId Id referencing a particular large person group.

@@ -73,3 +73,4 @@ * @param personId Id referencing a particular person.

/**
* Retrieve a person's information, including registered persisted faces, name and userData.
* Retrieve a person's name and userData, and the persisted faceIds representing the registered
* person face feature.
* @param largePersonGroupId Id referencing a particular large person group.

@@ -116,3 +117,6 @@ * @param personId Id referencing a particular person.

/**
* Delete a face from a person. Relative feature for the persisted face will also be deleted.
* Delete a face from a person in a large person group by specified largePersonGroupId, personId
* and persistedFaceId.
* <br /> Adding/deleting faces to/from a same person will be processed sequentially.
* Adding/deleting faces to/from different persons are processed in parallel.
* @param largePersonGroupId Id referencing a particular large person group.

@@ -190,4 +194,41 @@ * @param personId Id referencing a particular person.

/**
* Add a representative face to a person for identification. The input face is specified as an
* image with a targetFace rectangle.
* Add a face to a person into a large person group for face identification or verification. To
* deal with an image contains multiple faces, input face can be specified as an image with a
* targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be
* stored. Only the extracted face feature will be stored on server until [LargePersonGroup
* PersonFace -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599ae2966ac60f11b48b5aa3),
* [LargePersonGroup Person -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599ade5c6ac60f11b48b5aa2) or
* [LargePersonGroup -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599adc216ac60f11b48b5a9f) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better recognition precision. Please consider high-quality
* faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
* * Each person entry can hold up to 248 faces.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an
* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting
* faces to/from different persons are processed in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargePersonGroup Person - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/599adf2a3a7b9412a4d53f42). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param largePersonGroupId Id referencing a particular large person group.

@@ -216,4 +257,41 @@ * @param personId Id referencing a particular person.

/**
* Add a representative face to a person for identification. The input face is specified as an
* image with a targetFace rectangle.
* Add a face to a person into a large person group for face identification or verification. To
* deal with an image contains multiple faces, input face can be specified as an image with a
* targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be
* stored. Only the extracted face feature will be stored on server until [LargePersonGroup
* PersonFace -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599ae2966ac60f11b48b5aa3),
* [LargePersonGroup Person -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599ade5c6ac60f11b48b5aa2) or
* [LargePersonGroup -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/599adc216ac60f11b48b5a9f) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better recognition precision. Please consider high-quality
* faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
* * Each person entry can hold up to 248 faces.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an
* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting
* faces to/from different persons are processed in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [LargePersonGroup Person - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/599adf2a3a7b9412a4d53f42). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param largePersonGroupId Id referencing a particular large person group.

@@ -220,0 +298,0 @@ * @param personId Id referencing a particular person.

@@ -298,3 +298,4 @@ /*

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -327,3 +328,4 @@ requestBody: {

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -330,0 +332,0 @@ requestBody: {

@@ -15,4 +15,4 @@ import * as msRest from "@azure/ms-rest-js";

* recognitionModel.
* <br /> A person group is the container of the uploaded person data, including face images and
* face recognition features.
* <br /> A person group is the container of the uploaded person data, including face recognition
* features.
* <br /> After creation, use [PersonGroup Person -

@@ -24,7 +24,20 @@ * Create](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523c) to add

* Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239).
* <br /> The person's face, image, and userData will be stored on server until [PersonGroup Person
* - Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523d) or
* <br /> No image will be stored. Only the person's extracted face features and userData will be
* stored on server until [PersonGroup Person -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523d) or
* [PersonGroup -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395245) is called.
* <br />
* <br/>'recognitionModel' should be specified to associate with this person group. The default
* value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly
* specify the model you need in this parameter. New faces that are added to an existing person
* group will use the recognition model that's already associated with the collection. Existing
* face features in a person group can't be updated to features extracted by another version of
* recognition model.
* * 'recognition_01': The default recognition model for [PersonGroup -
* Create](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395244). All those
* person groups created before 2019 March are bonded with this recognition model.
* * 'recognition_02': Recognition model released in 2019 March. 'recognition_02' is recommended
* since its overall accuracy is improved compared with 'recognition_01'.
*
* Person group quota:
* * Free-tier subscription quota: 1,000 person groups. Each holds up to 1,000 persons.

@@ -34,9 +47,2 @@ * * S0-tier subscription quota: 1,000,000 person groups. Each holds up to 10,000 persons.

* [LargePersonGroup](/docs/services/563879b61984550e40cbbe8d/operations/599acdee6ac60f11b48b5a9d).
* <br />
* 'recognitionModel' should be specified to associate with this person group. The default value
* for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly
* specify the model you need in this parameter. New faces that are added to an existing person
* group will use the recognition model that's already associated with the collection. Existing
* face features in a person group can't be updated to features extracted by another version of
* recognition model.
* @param personGroupId Id referencing a particular person group.

@@ -43,0 +49,0 @@ * @param [options] The optional parameters

@@ -50,4 +50,4 @@ import * as msRest from "@azure/ms-rest-js";

/**
* Delete an existing person from a person group. All stored person data, and face features in the
* person entry will be deleted.
* Delete an existing person from a person group. The persistedFaceId, userData, person name and
* face feature in the person entry will all be deleted.
* @param personGroupId Id referencing a particular person group.

@@ -115,3 +115,6 @@ * @param personId Id referencing a particular person.

/**
* Delete a face from a person. Relative feature for the persisted face will also be deleted.
* Delete a face from a person in a person group by specified personGroupId, personId and
* persistedFaceId.
* <br /> Adding/deleting faces to/from a same person will be processed sequentially.
* Adding/deleting faces to/from different persons are processed in parallel.
* @param personGroupId Id referencing a particular person group.

@@ -165,3 +168,26 @@ * @param personId Id referencing a particular person.

/**
* Update a person persisted face's userData field.
* Add a face to a person into a person group for face identification or verification. To deal with
* an image contains multiple faces, input face can be specified as an image with a targetFace
* rectangle. It returns a persistedFaceId representing the added face. No image will be stored.
* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523e),
* [PersonGroup Person -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523d) or
* [PersonGroup -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395245) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better recognition precision. Please consider high-quality
* faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
* * Each person entry can hold up to 248 faces.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is
* from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an
* error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting
* faces to/from different persons are processed in parallel.
* @param personGroupId Id referencing a particular person group.

@@ -190,4 +216,40 @@ * @param personId Id referencing a particular person.

/**
* Add a representative face to a person for identification. The input face is specified as an
* image with a targetFace rectangle.
* Add a face to a person into a person group for face identification or verification. To deal with
* an image contains multiple faces, input face can be specified as an image with a targetFace
* rectangle. It returns a persistedFaceId representing the added face. No image will be stored.
* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523e),
* [PersonGroup Person -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523d) or
* [PersonGroup -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395245) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better recognition precision. Please consider high-quality
* faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
* * Each person entry can hold up to 248 faces.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size
* is from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as
* an error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting
* faces to/from different persons are processed in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [PersonGroup Person - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523b). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param personGroupId Id referencing a particular person group.

@@ -216,4 +278,40 @@ * @param personId Id referencing a particular person.

/**
* Add a representative face to a person for identification. The input face is specified as an
* image with a targetFace rectangle.
* Add a face to a person into a person group for face identification or verification. To deal with
* an image contains multiple faces, input face can be specified as an image with a targetFace
* rectangle. It returns a persistedFaceId representing the added face. No image will be stored.
* Only the extracted face feature will be stored on server until [PersonGroup PersonFace -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523e),
* [PersonGroup Person -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523d) or
* [PersonGroup -
* Delete](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395245) is called.
* <br /> Note persistedFaceId is different from faceId generated by [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
* * Higher face image quality means better recognition precision. Please consider high-quality
* faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
* * Each person entry can hold up to 248 faces.
* * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size
* is from 1KB to 6MB.
* * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as
* an error. If the provided "targetFace" rectangle is not returned from [Face -
* Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236), there’s no
* guarantee to detect and add the face successfully.
* * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions
* will cause failures.
* * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting
* faces to/from different persons are processed in parallel.
* * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels.
* Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum
* face size.
* * Different 'detectionModel' values can be provided. To use and compare different detection
* models, please refer to [How to specify a detection
* model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
* | Model | Recommended use-case(s) |
* | ---------- | -------- |
* | 'detection_01': | The default detection model for [PersonGroup Person - Add
* Face](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523b). Recommend
* for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces,
* occluded faces or wrong image orientation, the faces in such cases may not be detected. |
* | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on
* small, side and blurry faces. |
* @param personGroupId Id referencing a particular person group.

@@ -220,0 +318,0 @@ * @param personId Id referencing a particular person.

@@ -298,3 +298,4 @@ /*

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -327,3 +328,4 @@ requestBody: {

Parameters.userData,
Parameters.targetFace
Parameters.targetFace,
Parameters.detectionModel
],

@@ -330,0 +332,0 @@ requestBody: {

@@ -0,0 +0,0 @@ The MIT License (MIT)

@@ -5,5 +5,5 @@ {

"description": "FaceClient Library with typescript type definitions for node.js and browser.",
"version": "3.0.0",
"version": "3.1.0",
"dependencies": {
"@azure/ms-rest-js": "^1.6.0",
"@azure/ms-rest-js": "^1.8.1",
"tslib": "^1.9.3"

@@ -29,9 +29,9 @@ },

},
"homepage": "https://github.com/azure/azure-sdk-for-js/tree/master/packages/@azure/cognitiveservices-face",
"homepage": "https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/cognitiveservices/cognitiveservices-face",
"repository": {
"type": "git",
"url": "https://github.com/azure/azure-sdk-for-js.git"
"url": "https://github.com/Azure/azure-sdk-for-js.git"
},
"bugs": {
"url": "https://github.com/azure/azure-sdk-for-js/issues"
"url": "https://github.com/Azure/azure-sdk-for-js/issues"
},

@@ -47,3 +47,3 @@ "files": [

"esm/**/*.d.ts.map",
"lib/**/*.ts",
"src/**/*.ts",
"README.md",

@@ -50,0 +50,0 @@ "rollup.config.js",

@@ -101,1 +101,3 @@ ## An isomorphic javascript sdk for - FaceClient

- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js)
![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js/sdk/cognitiveservices/cognitiveservices-face/README.png)

@@ -17,4 +17,4 @@ {

},
"include": ["./lib/**/*.ts"],
"include": ["./src/**/*.ts"],
"exclude": ["node_modules"]
}

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc