Skip to content

Commit 9109873

Browse files
readme: update with autogen'd examples
1 parent a3406b0 commit 9109873

File tree

1 file changed

+45
-110
lines changed

1 file changed

+45
-110
lines changed

README.md

Lines changed: 45 additions & 110 deletions
Original file line numberDiff line numberDiff line change
@@ -637,94 +637,29 @@ var visionClient = vision({
637637
keyFilename: '/path/to/keyfile.json'
638638
});
639639

640-
// Read the text from an image.
641-
visionClient.detectText('./image.jpg', function(err, text) {
642-
// text = [
643-
// 'This was text found in the image',
644-
// 'This was more text found in the image'
645-
// ]
646-
});
647-
648-
// Detect faces and the locations of their features in an image.
649-
visionClient.detectFaces('./image.jpg', function(err, faces) {
650-
// faces = [
651-
// {
652-
// angles: {pan,tilt,roll},
653-
// bounds: {
654-
// head: [{x,y},{x,y},{x,y},{x,y}],
655-
// face: [{x,y},{x,y},{x,y},{x,y}]
656-
// },
657-
// features: {
658-
// confidence: 34.489909,
659-
// chin: {
660-
// center: {x,y,z},
661-
// left: {x,y,z},
662-
// right: {x,y,z}
663-
// },
664-
// ears: {
665-
// left: {x,y,z},
666-
// right: {x,y,z}
667-
// },
668-
// eyebrows: {
669-
// left: {
670-
// left: {x,y,z},
671-
// right: {x,y,z},
672-
// top: {x,y,z}
673-
// },
674-
// right: {
675-
// left: {x,y,z},
676-
// right: {x,y,z},
677-
// top: {x,y,z}
678-
// }
679-
// },
680-
// eyes: {
681-
// left: {
682-
// bottom: {x,y,z},
683-
// center: {x,y,z},
684-
// left: {x,y,z},
685-
// pupil: {x,y,z},
686-
// right: {x,y,z},
687-
// top: {x,y,z}
688-
// },
689-
// right: {
690-
// bottom: {x,y,z},
691-
// center: {x,y,z},
692-
// left: {x,y,z},
693-
// pupil: {x,y,z},
694-
// right: {x,y,z},
695-
// top: {x,y,z}
696-
// }
697-
// },
698-
// forehead: {x,y,z},
699-
// lips: {
700-
// bottom: {x,y,z},
701-
// top: {x,y,z}
702-
// },
703-
// mouth: {
704-
// center: {x,y,z},
705-
// left: {x,y,z},
706-
// right: {x,y,z}
707-
// },
708-
// nose: {
709-
// bottom: {
710-
// center: {x,y,z},
711-
// left: {x,y,z},
712-
// right: {x,y,z}
713-
// },
714-
// tip: {x,y,z},
715-
// top: {x,y,z}
716-
// }
717-
// },
718-
// confidence: 56.748849,
719-
// blurry: false,
720-
// dark: false,
721-
// happy: false,
722-
// hat: false,
723-
// mad: false,
724-
// sad: false,
725-
// surprised: false
726-
// }
727-
// ]
640+
var gcsImageUri = 'gs://gapic-toolkit/President_Barack_Obama.jpg';
641+
var source = {
642+
gcsImageUri : gcsImageUri
643+
};
644+
var image = {
645+
source : source
646+
};
647+
var type = vision.v1.types.Feature.Type.FACE_DETECTION;
648+
var featuresElement = {
649+
type : type
650+
};
651+
var features = [featuresElement];
652+
var requestsElement = {
653+
image : image,
654+
features : features
655+
};
656+
var requests = [requestsElement];
657+
visionClient.batchAnnotateImages({requests: requests}).then(function(responses) {
658+
var response = responses[0];
659+
// doThingsWith(response)
660+
})
661+
.catch(function(err) {
662+
console.error(err);
728663
});
729664
```
730665

@@ -998,29 +933,29 @@ var speechClient = speech({
998933
keyFilename: '/path/to/keyfile.json'
999934
});
1000935

1001-
// Detect the speech in an audio file.
1002-
speechClient.recognize('./audio.raw', {
1003-
encoding: 'LINEAR16',
1004-
sampleRateHertz: 16000
1005-
}, function(err, transcript) {
1006-
// transcript = 'how old is the Brooklyn Bridge'
936+
var languageCode = 'en-US';
937+
var sampleRateHertz = 44100;
938+
var encoding = speech.v1.types.RecognitionConfig.AudioEncoding.FLAC;
939+
var config = {
940+
languageCode : languageCode,
941+
sampleRateHertz : sampleRateHertz,
942+
encoding : encoding
943+
};
944+
var uri = 'gs://gapic-toolkit/hello.flac';
945+
var audio = {
946+
uri : uri
947+
};
948+
var request = {
949+
config: config,
950+
audio: audio
951+
};
952+
speechClient.recognize(request).then(function(responses) {
953+
var response = responses[0];
954+
// doThingsWith(response)
955+
})
956+
.catch(function(err) {
957+
console.error(err);
1007958
});
1008-
1009-
// Detect the speech in an audio file stream.
1010-
fs.createReadStream('./audio.raw')
1011-
.on('error', console.error)
1012-
.pipe(speechClient.createRecognizeStream({
1013-
config: {
1014-
encoding: 'LINEAR16',
1015-
sampleRateHertz: 16000
1016-
},
1017-
singleUtterance: false,
1018-
interimResults: false
1019-
}))
1020-
.on('error', console.error)
1021-
.on('data', function(data) {
1022-
// data.results = "how old is the Brooklyn Bridge"
1023-
});
1024959
```
1025960

1026961

0 commit comments

Comments
 (0)