Vendor dependencies for GCS

This commit is contained in:
Alexander Neumann
2017-08-05 20:17:15 +02:00
parent ba75a3884c
commit 8ca6a9a240
1228 changed files with 1769186 additions and 1 deletions

View File

@@ -0,0 +1,80 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
package vision
import (
visionpb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
import (
"strconv"
"testing"
"time"
"cloud.google.com/go/internal/testutil"
"golang.org/x/net/context"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
)
var _ = iterator.Done
var _ = strconv.FormatUint
var _ = time.Now
func TestImageAnnotatorSmoke(t *testing.T) {
if testing.Short() {
t.Skip("skipping smoke test in short mode")
}
ctx := context.Background()
ts := testutil.TokenSource(ctx, DefaultAuthScopes()...)
if ts == nil {
t.Skip("Integration tests skipped. See CONTRIBUTING.md for details")
}
projectId := testutil.ProjID()
_ = projectId
c, err := NewImageAnnotatorClient(ctx, option.WithTokenSource(ts))
if err != nil {
t.Fatal(err)
}
var gcsImageUri string = "gs://gapic-toolkit/President_Barack_Obama.jpg"
var source = &visionpb.ImageSource{
GcsImageUri: gcsImageUri,
}
var image = &visionpb.Image{
Source: source,
}
var type_ visionpb.Feature_Type = visionpb.Feature_FACE_DETECTION
var featuresElement = &visionpb.Feature{
Type: type_,
}
var features = []*visionpb.Feature{featuresElement}
var requestsElement = &visionpb.AnnotateImageRequest{
Image: image,
Features: features,
}
var requests = []*visionpb.AnnotateImageRequest{requestsElement}
var request = &visionpb.BatchAnnotateImagesRequest{
Requests: requests,
}
if _, err := c.BatchAnnotateImages(ctx, request); err != nil {
t.Error(err)
}
}

9
vendor/cloud.google.com/go/vision/apiv1/README.md generated vendored Normal file
View File

@@ -0,0 +1,9 @@
Auto-generated vision v1 clients
=================================
This package includes auto-generated clients for the vision v1 API.
Use the handwritten client (in the parent directory,
cloud.google.com/go/vision) in preference to this.
This code is EXPERIMENTAL and subject to CHANGE AT ANY TIME.

151
vendor/cloud.google.com/go/vision/apiv1/client.go generated vendored Normal file
View File

@@ -0,0 +1,151 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vision
import (
gax "github.com/googleapis/gax-go"
"golang.org/x/net/context"
pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
)
// AnnotateImage runs image detection and annotation for a single image.
func (c *ImageAnnotatorClient) AnnotateImage(ctx context.Context, req *pb.AnnotateImageRequest, opts ...gax.CallOption) (*pb.AnnotateImageResponse, error) {
res, err := c.BatchAnnotateImages(ctx, &pb.BatchAnnotateImagesRequest{
Requests: []*pb.AnnotateImageRequest{req},
}, opts...)
if err != nil {
return nil, err
}
return res.Responses[0], nil
}
// Called for a single image and a single feature.
func (c *ImageAnnotatorClient) annotateOne(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, ftype pb.Feature_Type, maxResults int, opts []gax.CallOption) (*pb.AnnotateImageResponse, error) {
res, err := c.AnnotateImage(ctx, &pb.AnnotateImageRequest{
Image: img,
ImageContext: ictx,
Features: []*pb.Feature{{Type: ftype, MaxResults: int32(maxResults)}},
}, opts...)
if err != nil {
return nil, err
}
// When there is only one image and one feature, the response's Error field is
// unambiguously about that one detection, so we "promote" it to the error return
// value.
// res.Error is a google.rpc.Status. Convert to a Go error. Use a gRPC
// error because it preserves the code as a separate field.
// TODO(jba): preserve the details field.
if res.Error != nil {
return nil, grpc.Errorf(codes.Code(res.Error.Code), "%s", res.Error.Message)
}
return res, nil
}
// DetectFaces performs face detection on the image.
// At most maxResults results are returned.
func (c *ImageAnnotatorClient) DetectFaces(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, maxResults int, opts ...gax.CallOption) ([]*pb.FaceAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_FACE_DETECTION, maxResults, opts)
if err != nil {
return nil, err
}
return res.FaceAnnotations, nil
}
// DetectLandmarks performs landmark detection on the image.
// At most maxResults results are returned.
func (c *ImageAnnotatorClient) DetectLandmarks(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, maxResults int, opts ...gax.CallOption) ([]*pb.EntityAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_LANDMARK_DETECTION, maxResults, opts)
if err != nil {
return nil, err
}
return res.LandmarkAnnotations, nil
}
// DetectLogos performs logo detection on the image.
// At most maxResults results are returned.
func (c *ImageAnnotatorClient) DetectLogos(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, maxResults int, opts ...gax.CallOption) ([]*pb.EntityAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_LOGO_DETECTION, maxResults, opts)
if err != nil {
return nil, err
}
return res.LogoAnnotations, nil
}
// DetectLabels performs label detection on the image.
// At most maxResults results are returned.
func (c *ImageAnnotatorClient) DetectLabels(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, maxResults int, opts ...gax.CallOption) ([]*pb.EntityAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_LABEL_DETECTION, maxResults, opts)
if err != nil {
return nil, err
}
return res.LabelAnnotations, nil
}
// DetectTexts performs text detection on the image.
// At most maxResults results are returned.
func (c *ImageAnnotatorClient) DetectTexts(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, maxResults int, opts ...gax.CallOption) ([]*pb.EntityAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_TEXT_DETECTION, maxResults, opts)
if err != nil {
return nil, err
}
return res.TextAnnotations, nil
}
// DetectDocumentText performs full text (OCR) detection on the image.
func (c *ImageAnnotatorClient) DetectDocumentText(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, opts ...gax.CallOption) (*pb.TextAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_DOCUMENT_TEXT_DETECTION, 0, opts)
if err != nil {
return nil, err
}
return res.FullTextAnnotation, nil
}
// DetectSafeSearch performs safe-search detection on the image.
func (c *ImageAnnotatorClient) DetectSafeSearch(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, opts ...gax.CallOption) (*pb.SafeSearchAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_SAFE_SEARCH_DETECTION, 0, opts)
if err != nil {
return nil, err
}
return res.SafeSearchAnnotation, nil
}
// DetectImageProperties computes properties of the image.
func (c *ImageAnnotatorClient) DetectImageProperties(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, opts ...gax.CallOption) (*pb.ImageProperties, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_IMAGE_PROPERTIES, 0, opts)
if err != nil {
return nil, err
}
return res.ImagePropertiesAnnotation, nil
}
// DetectWeb computes a web annotation on the image.
func (c *ImageAnnotatorClient) DetectWeb(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, opts ...gax.CallOption) (*pb.WebDetection, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_WEB_DETECTION, 0, opts)
if err != nil {
return nil, err
}
return res.WebDetection, nil
}
// CropHints computes crop hints for the image.
func (c *ImageAnnotatorClient) CropHints(ctx context.Context, img *pb.Image, ictx *pb.ImageContext, opts ...gax.CallOption) (*pb.CropHintsAnnotation, error) {
res, err := c.annotateOne(ctx, img, ictx, pb.Feature_CROP_HINTS, 0, opts)
if err != nil {
return nil, err
}
return res.CropHintsAnnotation, nil
}

200
vendor/cloud.google.com/go/vision/apiv1/client_test.go generated vendored Normal file
View File

@@ -0,0 +1,200 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vision
import (
"fmt"
"reflect"
"testing"
"github.com/golang/protobuf/proto"
"golang.org/x/net/context"
pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
"google.golang.org/genproto/googleapis/rpc/status"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
)
var batchResponse = &pb.BatchAnnotateImagesResponse{
Responses: []*pb.AnnotateImageResponse{{
FaceAnnotations: []*pb.FaceAnnotation{
{RollAngle: 1}, {RollAngle: 2}},
LandmarkAnnotations: []*pb.EntityAnnotation{{Mid: "landmark"}},
LogoAnnotations: []*pb.EntityAnnotation{{Mid: "logo"}},
LabelAnnotations: []*pb.EntityAnnotation{{Mid: "label"}},
TextAnnotations: []*pb.EntityAnnotation{{Mid: "text"}},
FullTextAnnotation: &pb.TextAnnotation{Text: "full"},
SafeSearchAnnotation: &pb.SafeSearchAnnotation{Spoof: pb.Likelihood_POSSIBLE},
ImagePropertiesAnnotation: &pb.ImageProperties{DominantColors: &pb.DominantColorsAnnotation{}},
CropHintsAnnotation: &pb.CropHintsAnnotation{CropHints: []*pb.CropHint{{Confidence: 0.5}}},
WebDetection: &pb.WebDetection{WebEntities: []*pb.WebDetection_WebEntity{{EntityId: "web"}}},
}},
}
// Verify that all the "shortcut" methods use the underlying
// BatchAnnotateImages RPC correctly.
func TestClientMethods(t *testing.T) {
ctx := context.Background()
c, err := NewImageAnnotatorClient(ctx, clientOpt)
if err != nil {
t.Fatal(err)
}
mockImageAnnotator.resps = []proto.Message{batchResponse}
img := &pb.Image{Source: &pb.ImageSource{ImageUri: "http://foo.jpg"}}
ictx := &pb.ImageContext{LanguageHints: []string{"en", "fr"}}
req := &pb.AnnotateImageRequest{
Image: img,
ImageContext: ictx,
Features: []*pb.Feature{
{Type: pb.Feature_LABEL_DETECTION, MaxResults: 3},
{Type: pb.Feature_FACE_DETECTION, MaxResults: 4},
},
}
for i, test := range []struct {
call func() (interface{}, error)
wantFeatures []*pb.Feature
wantRes interface{}
}{
{
func() (interface{}, error) { return c.AnnotateImage(ctx, req) },
req.Features, batchResponse.Responses[0],
},
{
func() (interface{}, error) { return c.DetectFaces(ctx, img, ictx, 2) },
[]*pb.Feature{{pb.Feature_FACE_DETECTION, 2}},
batchResponse.Responses[0].FaceAnnotations,
},
{
func() (interface{}, error) { return c.DetectLandmarks(ctx, img, ictx, 2) },
[]*pb.Feature{{pb.Feature_LANDMARK_DETECTION, 2}},
batchResponse.Responses[0].LandmarkAnnotations,
},
{
func() (interface{}, error) { return c.DetectLogos(ctx, img, ictx, 2) },
[]*pb.Feature{{pb.Feature_LOGO_DETECTION, 2}},
batchResponse.Responses[0].LogoAnnotations,
},
{
func() (interface{}, error) { return c.DetectLabels(ctx, img, ictx, 2) },
[]*pb.Feature{{pb.Feature_LABEL_DETECTION, 2}},
batchResponse.Responses[0].LabelAnnotations,
},
{
func() (interface{}, error) { return c.DetectTexts(ctx, img, ictx, 2) },
[]*pb.Feature{{pb.Feature_TEXT_DETECTION, 2}},
batchResponse.Responses[0].TextAnnotations,
},
{
func() (interface{}, error) { return c.DetectDocumentText(ctx, img, ictx) },
[]*pb.Feature{{pb.Feature_DOCUMENT_TEXT_DETECTION, 0}},
batchResponse.Responses[0].FullTextAnnotation,
},
{
func() (interface{}, error) { return c.DetectSafeSearch(ctx, img, ictx) },
[]*pb.Feature{{pb.Feature_SAFE_SEARCH_DETECTION, 0}},
batchResponse.Responses[0].SafeSearchAnnotation,
},
{
func() (interface{}, error) { return c.DetectImageProperties(ctx, img, ictx) },
[]*pb.Feature{{pb.Feature_IMAGE_PROPERTIES, 0}},
batchResponse.Responses[0].ImagePropertiesAnnotation,
},
{
func() (interface{}, error) { return c.DetectWeb(ctx, img, ictx) },
[]*pb.Feature{{pb.Feature_WEB_DETECTION, 0}},
batchResponse.Responses[0].WebDetection,
},
{
func() (interface{}, error) { return c.CropHints(ctx, img, ictx) },
[]*pb.Feature{{pb.Feature_CROP_HINTS, 0}},
batchResponse.Responses[0].CropHintsAnnotation,
},
} {
mockImageAnnotator.reqs = nil
res, err := test.call()
if err != nil {
t.Fatal(err)
}
got := mockImageAnnotator.reqs[0]
want := &pb.BatchAnnotateImagesRequest{
Requests: []*pb.AnnotateImageRequest{{
Image: img,
ImageContext: ictx,
Features: test.wantFeatures,
}},
}
if !testEqual(got, want) {
t.Errorf("#%d:\ngot %v\nwant %v", i, got, want)
}
if got, want := res, test.wantRes; !testEqual(got, want) {
t.Errorf("#%d:\ngot %v\nwant %v", i, got, want)
}
}
}
func testEqual(a, b interface{}) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
t := reflect.TypeOf(a)
if t != reflect.TypeOf(b) {
return false
}
if am, ok := a.(proto.Message); ok {
return proto.Equal(am, b.(proto.Message))
}
if t.Kind() != reflect.Slice {
panic(fmt.Sprintf("testEqual can only handle proto.Message and slices, got %s", t))
}
va := reflect.ValueOf(a)
vb := reflect.ValueOf(b)
if va.Len() != vb.Len() {
return false
}
for i := 0; i < va.Len(); i++ {
if !testEqual(va.Index(i).Interface(), vb.Index(i).Interface()) {
return false
}
}
return true
}
func TestAnnotateOneError(t *testing.T) {
ctx := context.Background()
c, err := NewImageAnnotatorClient(ctx, clientOpt)
if err != nil {
t.Fatal(err)
}
mockImageAnnotator.resps = []proto.Message{
&pb.BatchAnnotateImagesResponse{
Responses: []*pb.AnnotateImageResponse{{
Error: &status.Status{Code: int32(codes.NotFound), Message: "not found"},
}},
},
}
_, err = c.annotateOne(ctx,
&pb.Image{Source: &pb.ImageSource{ImageUri: "http://foo.jpg"}},
nil, pb.Feature_LOGO_DETECTION, 1, nil)
if grpc.Code(err) != codes.NotFound {
t.Errorf("got %v, want NotFound")
}
}

45
vendor/cloud.google.com/go/vision/apiv1/doc.go generated vendored Normal file
View File

@@ -0,0 +1,45 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
// Package vision is an experimental, auto-generated package for the
// Google Cloud Vision API.
//
// Integrates Google Vision features, including image labeling, face, logo,
// and landmark detection, optical character recognition (OCR), and detection
// of explicit content, into applications.
//
// Use the client at cloud.google.com/go/vision in preference to this.
package vision // import "cloud.google.com/go/vision/apiv1"
import (
"golang.org/x/net/context"
"google.golang.org/grpc/metadata"
)
func insertXGoog(ctx context.Context, val []string) context.Context {
md, _ := metadata.FromOutgoingContext(ctx)
md = md.Copy()
md["x-goog-api-client"] = val
return metadata.NewOutgoingContext(ctx, md)
}
// DefaultAuthScopes reports the authentication scopes required
// by this package.
func DefaultAuthScopes() []string {
return []string{
"https://www.googleapis.com/auth/cloud-platform",
}
}

View File

@@ -0,0 +1,92 @@
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vision_test
import (
"fmt"
"os"
vision "cloud.google.com/go/vision/apiv1"
"golang.org/x/net/context"
pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
func Example_NewImageFromReader() {
f, err := os.Open("path/to/image.jpg")
if err != nil {
// TODO: handle error.
}
img, err := vision.NewImageFromReader(f)
if err != nil {
// TODO: handle error.
}
fmt.Println(img)
}
func Example_NewImageFromURI() {
img := vision.NewImageFromURI("gs://my-bucket/my-image.png")
fmt.Println(img)
}
func ExampleImageAnnotatorClient_AnnotateImage() {
ctx := context.Background()
c, err := vision.NewImageAnnotatorClient(ctx)
if err != nil {
// TODO: Handle error.
}
res, err := c.AnnotateImage(ctx, &pb.AnnotateImageRequest{
Image: vision.NewImageFromURI("gs://my-bucket/my-image.png"),
Features: []*pb.Feature{
{Type: pb.Feature_LANDMARK_DETECTION, MaxResults: 5},
{Type: pb.Feature_LABEL_DETECTION, MaxResults: 3},
},
})
if err != nil {
// TODO: Handle error.
}
// TODO: Use res.
_ = res
}
func Example_FaceFromLandmarks() {
ctx := context.Background()
c, err := vision.NewImageAnnotatorClient(ctx)
if err != nil {
// TODO: Handle error.
}
resp, err := c.BatchAnnotateImages(ctx, &pb.BatchAnnotateImagesRequest{
Requests: []*pb.AnnotateImageRequest{
{
Image: vision.NewImageFromURI("gs://bucket/image.jpg"),
Features: []*pb.Feature{{
Type: pb.Feature_FACE_DETECTION,
MaxResults: 5,
}},
},
},
})
if err != nil {
// TODO: Handle error.
}
res := resp.Responses[0]
if res.Error != nil {
// TODO: Handle error.
}
for _, a := range res.FaceAnnotations {
face := vision.FaceFromLandmarks(a.Landmarks)
fmt.Println(face.Nose.Tip)
fmt.Println(face.Eyes.Left.Pupil)
}
}

153
vendor/cloud.google.com/go/vision/apiv1/face.go generated vendored Normal file
View File

@@ -0,0 +1,153 @@
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vision
import (
"log"
pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
// FaceLandmarks contains the positions of facial features detected by the service.
type FaceLandmarks struct {
Eyebrows Eyebrows
Eyes Eyes
Ears Ears
Nose Nose
Mouth Mouth
Chin Chin
Forehead *pb.Position
}
// Eyebrows represents a face's eyebrows.
type Eyebrows struct {
Left, Right Eyebrow
}
// Eyebrow represents a face's eyebrow.
type Eyebrow struct {
Top, Left, Right *pb.Position
}
// Eyes represents a face's eyes.
type Eyes struct {
Left, Right Eye
}
// Eye represents a face's eye.
type Eye struct {
Left, Right, Top, Bottom, Center, Pupil *pb.Position
}
// Ears represents a face's ears.
type Ears struct {
Left, Right *pb.Position
}
// Nose represents a face's nose.
type Nose struct {
Left, Right, Top, Bottom, Tip *pb.Position
}
// Mouth represents a face's mouth.
type Mouth struct {
Left, Center, Right, UpperLip, LowerLip *pb.Position
}
// Chin represents a face's chin.
type Chin struct {
Left, Center, Right *pb.Position
}
// FaceFromLandmarks converts the list of face landmarks returned by the service
// to a FaceLandmarks struct.
func FaceFromLandmarks(landmarks []*pb.FaceAnnotation_Landmark) *FaceLandmarks {
face := &FaceLandmarks{}
for _, lm := range landmarks {
switch lm.Type {
case pb.FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW:
face.Eyebrows.Left.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW:
face.Eyebrows.Left.Right = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW:
face.Eyebrows.Right.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW:
face.Eyebrows.Right.Right = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT:
face.Eyebrows.Left.Top = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT:
face.Eyebrows.Right.Top = lm.Position
case pb.FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES:
face.Nose.Top = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_TIP:
face.Nose.Tip = lm.Position
case pb.FaceAnnotation_Landmark_UPPER_LIP:
face.Mouth.UpperLip = lm.Position
case pb.FaceAnnotation_Landmark_LOWER_LIP:
face.Mouth.LowerLip = lm.Position
case pb.FaceAnnotation_Landmark_MOUTH_LEFT:
face.Mouth.Left = lm.Position
case pb.FaceAnnotation_Landmark_MOUTH_RIGHT:
face.Mouth.Right = lm.Position
case pb.FaceAnnotation_Landmark_MOUTH_CENTER:
face.Mouth.Center = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT:
face.Nose.Right = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT:
face.Nose.Left = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER:
face.Nose.Bottom = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE:
face.Eyes.Left.Center = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE:
face.Eyes.Right.Center = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY:
face.Eyes.Left.Top = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER:
face.Eyes.Left.Right = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY:
face.Eyes.Left.Bottom = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER:
face.Eyes.Left.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY:
face.Eyes.Right.Top = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER:
face.Eyes.Right.Right = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY:
face.Eyes.Right.Bottom = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER:
face.Eyes.Right.Left = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_PUPIL:
face.Eyes.Left.Pupil = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_PUPIL:
face.Eyes.Right.Pupil = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EAR_TRAGION:
face.Ears.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EAR_TRAGION:
face.Ears.Right = lm.Position
case pb.FaceAnnotation_Landmark_FOREHEAD_GLABELLA:
face.Forehead = lm.Position
case pb.FaceAnnotation_Landmark_CHIN_GNATHION:
face.Chin.Center = lm.Position
case pb.FaceAnnotation_Landmark_CHIN_LEFT_GONION:
face.Chin.Left = lm.Position
case pb.FaceAnnotation_Landmark_CHIN_RIGHT_GONION:
face.Chin.Right = lm.Position
default:
log.Printf("vision: ignoring unknown face annotation landmark %s", lm.Type)
}
}
return face
}

228
vendor/cloud.google.com/go/vision/apiv1/face_test.go generated vendored Normal file
View File

@@ -0,0 +1,228 @@
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vision
import (
"testing"
"cloud.google.com/go/internal/pretty"
pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
func TestFaceFromLandmarks(t *testing.T) {
landmarks := []*pb.FaceAnnotation_Landmark{
{
Type: pb.FaceAnnotation_Landmark_LEFT_EYE,
Position: &pb.Position{X: 1192, Y: 575, Z: 0},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EYE,
Position: &pb.Position{X: 1479, Y: 571, Z: -9},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW,
Position: &pb.Position{X: 1097, Y: 522, Z: 27},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW,
Position: &pb.Position{X: 1266, Y: 521, Z: -61},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW,
Position: &pb.Position{X: 1402, Y: 520, Z: -66},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW,
Position: &pb.Position{X: 1571, Y: 519, Z: 10},
},
{
Type: pb.FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES,
Position: &pb.Position{X: 1331, Y: 566, Z: -66},
},
{
Type: pb.FaceAnnotation_Landmark_NOSE_TIP,
Position: &pb.Position{X: 1329, Y: 743, Z: -137},
},
{
Type: pb.FaceAnnotation_Landmark_UPPER_LIP,
Position: &pb.Position{X: 1330, Y: 836, Z: -66},
},
{
Type: pb.FaceAnnotation_Landmark_LOWER_LIP,
Position: &pb.Position{X: 1334, Y: 954, Z: -36},
},
{
Type: pb.FaceAnnotation_Landmark_MOUTH_LEFT,
Position: &pb.Position{X: 1186, Y: 867, Z: 27},
},
{
Type: pb.FaceAnnotation_Landmark_MOUTH_RIGHT,
Position: &pb.Position{X: 1484, Y: 857, Z: 19},
},
{
Type: pb.FaceAnnotation_Landmark_MOUTH_CENTER,
Position: &pb.Position{X: 1332, Y: 894, Z: -41},
},
{
Type: pb.FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT,
Position: &pb.Position{X: 1432, Y: 750, Z: -26},
},
{
Type: pb.FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT,
Position: &pb.Position{X: 1236, Y: 755, Z: -20},
},
{
Type: pb.FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER,
Position: &pb.Position{X: 1332, Y: 783, Z: -70},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY,
Position: &pb.Position{X: 1193, Y: 561, Z: -20},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER,
Position: &pb.Position{X: 1252, Y: 581, Z: -1},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY,
Position: &pb.Position{X: 1190, Y: 593, Z: -1},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER,
Position: &pb.Position{X: 1133, Y: 584, Z: 28},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_EYE_PUPIL,
Position: &pb.Position{X: 1189, Y: 580, Z: -8},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY,
Position: &pb.Position{X: 1474, Y: 561, Z: -30},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER,
Position: &pb.Position{X: 1536, Y: 581, Z: 15},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY,
Position: &pb.Position{X: 1481, Y: 590, Z: -11},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER,
Position: &pb.Position{X: 1424, Y: 579, Z: -6},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EYE_PUPIL,
Position: &pb.Position{X: 1478, Y: 580, Z: -18},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT,
Position: &pb.Position{X: 1181, Y: 482, Z: -40},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT,
Position: &pb.Position{X: 1485, Y: 482, Z: -50},
},
{
Type: pb.FaceAnnotation_Landmark_LEFT_EAR_TRAGION,
Position: &pb.Position{X: 1027, Y: 696, Z: 361},
},
{
Type: pb.FaceAnnotation_Landmark_RIGHT_EAR_TRAGION,
Position: &pb.Position{X: 1666, Y: 695, Z: 339},
},
{
Type: pb.FaceAnnotation_Landmark_FOREHEAD_GLABELLA,
Position: &pb.Position{X: 1332, Y: 514, Z: -75},
},
{
Type: pb.FaceAnnotation_Landmark_CHIN_GNATHION,
Position: &pb.Position{X: 1335, Y: 1058, Z: 6},
},
{
Type: pb.FaceAnnotation_Landmark_CHIN_LEFT_GONION,
Position: &pb.Position{X: 1055, Y: 882, Z: 257},
},
{
Type: pb.FaceAnnotation_Landmark_CHIN_RIGHT_GONION,
Position: &pb.Position{X: 1631, Y: 881, Z: 238},
},
}
want := &FaceLandmarks{
Eyebrows: Eyebrows{
Left: Eyebrow{
Top: &pb.Position{X: 1181, Y: 482, Z: -40},
Left: &pb.Position{X: 1097, Y: 522, Z: 27},
Right: &pb.Position{X: 1266, Y: 521, Z: -61},
},
Right: Eyebrow{
Top: &pb.Position{X: 1485, Y: 482, Z: -50},
Left: &pb.Position{X: 1402, Y: 520, Z: -66},
Right: &pb.Position{X: 1571, Y: 519, Z: 10},
},
},
Eyes: Eyes{
Left: Eye{
Left: &pb.Position{X: 1133, Y: 584, Z: 28},
Right: &pb.Position{X: 1252, Y: 581, Z: -1},
Top: &pb.Position{X: 1193, Y: 561, Z: -20},
Bottom: &pb.Position{X: 1190, Y: 593, Z: -1},
Center: &pb.Position{X: 1192, Y: 575, Z: 0},
Pupil: &pb.Position{X: 1189, Y: 580, Z: -8},
},
Right: Eye{
Left: &pb.Position{X: 1424, Y: 579, Z: -6},
Right: &pb.Position{X: 1536, Y: 581, Z: 15},
Top: &pb.Position{X: 1474, Y: 561, Z: -30},
Bottom: &pb.Position{X: 1481, Y: 590, Z: -11},
Center: &pb.Position{X: 1479, Y: 571, Z: -9},
Pupil: &pb.Position{X: 1478, Y: 580, Z: -18},
},
},
Ears: Ears{
Left: &pb.Position{X: 1027, Y: 696, Z: 361},
Right: &pb.Position{X: 1666, Y: 695, Z: 339},
},
Nose: Nose{
Left: &pb.Position{X: 1236, Y: 755, Z: -20},
Right: &pb.Position{X: 1432, Y: 750, Z: -26},
Top: &pb.Position{X: 1331, Y: 566, Z: -66},
Bottom: &pb.Position{X: 1332, Y: 783, Z: -70},
Tip: &pb.Position{X: 1329, Y: 743, Z: -137},
},
Mouth: Mouth{
Left: &pb.Position{X: 1186, Y: 867, Z: 27},
Center: &pb.Position{X: 1332, Y: 894, Z: -41},
Right: &pb.Position{X: 1484, Y: 857, Z: 19},
UpperLip: &pb.Position{X: 1330, Y: 836, Z: -66},
LowerLip: &pb.Position{X: 1334, Y: 954, Z: -36},
},
Chin: Chin{
Left: &pb.Position{X: 1055, Y: 882, Z: 257},
Center: &pb.Position{X: 1335, Y: 1058, Z: 6},
Right: &pb.Position{X: 1631, Y: 881, Z: 238},
},
Forehead: &pb.Position{X: 1332, Y: 514, Z: -75},
}
got := FaceFromLandmarks(landmarks)
msg, ok, err := pretty.Diff(want, got)
if err != nil {
t.Fatal(err)
}
if !ok {
t.Error(msg)
}
}

37
vendor/cloud.google.com/go/vision/apiv1/image.go generated vendored Normal file
View File

@@ -0,0 +1,37 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vision
import (
"io"
"io/ioutil"
pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
// NewImageFromReader reads the bytes of an image from r.
func NewImageFromReader(r io.Reader) (*pb.Image, error) {
bytes, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
return &pb.Image{Content: bytes}, nil
}
// NewImageFromURI returns an image that refers to an object in Google Cloud Storage
// (when the uri is of the form "gs://BUCKET/OBJECT") or at a public URL.
func NewImageFromURI(uri string) *pb.Image {
return &pb.Image{Source: &pb.ImageSource{ImageUri: uri}}
}

View File

@@ -0,0 +1,133 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
package vision
import (
"time"
"cloud.google.com/go/internal/version"
gax "github.com/googleapis/gax-go"
"golang.org/x/net/context"
"google.golang.org/api/option"
"google.golang.org/api/transport"
visionpb "google.golang.org/genproto/googleapis/cloud/vision/v1"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
)
// ImageAnnotatorCallOptions contains the retry settings for each method of ImageAnnotatorClient.
type ImageAnnotatorCallOptions struct {
BatchAnnotateImages []gax.CallOption
}
func defaultImageAnnotatorClientOptions() []option.ClientOption {
return []option.ClientOption{
option.WithEndpoint("vision.googleapis.com:443"),
option.WithScopes(DefaultAuthScopes()...),
}
}
func defaultImageAnnotatorCallOptions() *ImageAnnotatorCallOptions {
retry := map[[2]string][]gax.CallOption{
{"default", "idempotent"}: {
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.DeadlineExceeded,
codes.Unavailable,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.3,
})
}),
},
}
return &ImageAnnotatorCallOptions{
BatchAnnotateImages: retry[[2]string{"default", "idempotent"}],
}
}
// ImageAnnotatorClient is a client for interacting with Google Cloud Vision API.
type ImageAnnotatorClient struct {
// The connection to the service.
conn *grpc.ClientConn
// The gRPC API client.
imageAnnotatorClient visionpb.ImageAnnotatorClient
// The call options for this service.
CallOptions *ImageAnnotatorCallOptions
// The metadata to be sent with each request.
xGoogHeader []string
}
// NewImageAnnotatorClient creates a new image annotator client.
//
// Service that performs Google Cloud Vision API detection tasks over client
// images, such as face, landmark, logo, label, and text detection. The
// ImageAnnotator service returns detected entities from the images.
func NewImageAnnotatorClient(ctx context.Context, opts ...option.ClientOption) (*ImageAnnotatorClient, error) {
conn, err := transport.DialGRPC(ctx, append(defaultImageAnnotatorClientOptions(), opts...)...)
if err != nil {
return nil, err
}
c := &ImageAnnotatorClient{
conn: conn,
CallOptions: defaultImageAnnotatorCallOptions(),
imageAnnotatorClient: visionpb.NewImageAnnotatorClient(conn),
}
c.SetGoogleClientInfo()
return c, nil
}
// Connection returns the client's connection to the API service.
func (c *ImageAnnotatorClient) Connection() *grpc.ClientConn {
return c.conn
}
// Close closes the connection to the API service. The user should invoke this when
// the client is no longer required.
func (c *ImageAnnotatorClient) Close() error {
return c.conn.Close()
}
// SetGoogleClientInfo sets the name and version of the application in
// the `x-goog-api-client` header passed on each request. Intended for
// use by Google-written clients.
func (c *ImageAnnotatorClient) SetGoogleClientInfo(keyval ...string) {
kv := append([]string{"gl-go", version.Go()}, keyval...)
kv = append(kv, "gapic", version.Repo, "gax", gax.Version, "grpc", grpc.Version)
c.xGoogHeader = []string{gax.XGoogHeader(kv...)}
}
// BatchAnnotateImages run image detection and annotation for a batch of images.
func (c *ImageAnnotatorClient) BatchAnnotateImages(ctx context.Context, req *visionpb.BatchAnnotateImagesRequest, opts ...gax.CallOption) (*visionpb.BatchAnnotateImagesResponse, error) {
ctx = insertXGoog(ctx, c.xGoogHeader)
opts = append(c.CallOptions.BatchAnnotateImages[0:len(c.CallOptions.BatchAnnotateImages):len(c.CallOptions.BatchAnnotateImages)], opts...)
var resp *visionpb.BatchAnnotateImagesResponse
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.imageAnnotatorClient.BatchAnnotateImages(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}

View File

@@ -0,0 +1,51 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
package vision_test
import (
"cloud.google.com/go/vision/apiv1"
"golang.org/x/net/context"
visionpb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
func ExampleNewImageAnnotatorClient() {
ctx := context.Background()
c, err := vision.NewImageAnnotatorClient(ctx)
if err != nil {
// TODO: Handle error.
}
// TODO: Use client.
_ = c
}
func ExampleImageAnnotatorClient_BatchAnnotateImages() {
ctx := context.Background()
c, err := vision.NewImageAnnotatorClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &visionpb.BatchAnnotateImagesRequest{
// TODO: Fill request struct fields.
}
resp, err := c.BatchAnnotateImages(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}

159
vendor/cloud.google.com/go/vision/apiv1/mock_test.go generated vendored Normal file
View File

@@ -0,0 +1,159 @@
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
package vision
import (
visionpb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
import (
"flag"
"fmt"
"io"
"log"
"net"
"os"
"strings"
"testing"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes"
"golang.org/x/net/context"
"google.golang.org/api/option"
status "google.golang.org/genproto/googleapis/rpc/status"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
gstatus "google.golang.org/grpc/status"
)
var _ = io.EOF
var _ = ptypes.MarshalAny
var _ status.Status
type mockImageAnnotatorServer struct {
// Embed for forward compatibility.
// Tests will keep working if more methods are added
// in the future.
visionpb.ImageAnnotatorServer
reqs []proto.Message
// If set, all calls return this error.
err error
// responses to return if err == nil
resps []proto.Message
}
func (s *mockImageAnnotatorServer) BatchAnnotateImages(ctx context.Context, req *visionpb.BatchAnnotateImagesRequest) (*visionpb.BatchAnnotateImagesResponse, error) {
md, _ := metadata.FromIncomingContext(ctx)
if xg := md["x-goog-api-client"]; len(xg) == 0 || !strings.Contains(xg[0], "gl-go/") {
return nil, fmt.Errorf("x-goog-api-client = %v, expected gl-go key", xg)
}
s.reqs = append(s.reqs, req)
if s.err != nil {
return nil, s.err
}
return s.resps[0].(*visionpb.BatchAnnotateImagesResponse), nil
}
// clientOpt is the option tests should use to connect to the test server.
// It is initialized by TestMain.
var clientOpt option.ClientOption
var (
mockImageAnnotator mockImageAnnotatorServer
)
func TestMain(m *testing.M) {
flag.Parse()
serv := grpc.NewServer()
visionpb.RegisterImageAnnotatorServer(serv, &mockImageAnnotator)
lis, err := net.Listen("tcp", "localhost:0")
if err != nil {
log.Fatal(err)
}
go serv.Serve(lis)
conn, err := grpc.Dial(lis.Addr().String(), grpc.WithInsecure())
if err != nil {
log.Fatal(err)
}
clientOpt = option.WithGRPCConn(conn)
os.Exit(m.Run())
}
func TestImageAnnotatorBatchAnnotateImages(t *testing.T) {
var expectedResponse *visionpb.BatchAnnotateImagesResponse = &visionpb.BatchAnnotateImagesResponse{}
mockImageAnnotator.err = nil
mockImageAnnotator.reqs = nil
mockImageAnnotator.resps = append(mockImageAnnotator.resps[:0], expectedResponse)
var requests []*visionpb.AnnotateImageRequest = nil
var request = &visionpb.BatchAnnotateImagesRequest{
Requests: requests,
}
c, err := NewImageAnnotatorClient(context.Background(), clientOpt)
if err != nil {
t.Fatal(err)
}
resp, err := c.BatchAnnotateImages(context.Background(), request)
if err != nil {
t.Fatal(err)
}
if want, got := request, mockImageAnnotator.reqs[0]; !proto.Equal(want, got) {
t.Errorf("wrong request %q, want %q", got, want)
}
if want, got := expectedResponse, resp; !proto.Equal(want, got) {
t.Errorf("wrong response %q, want %q)", got, want)
}
}
func TestImageAnnotatorBatchAnnotateImagesError(t *testing.T) {
errCode := codes.PermissionDenied
mockImageAnnotator.err = gstatus.Error(errCode, "test error")
var requests []*visionpb.AnnotateImageRequest = nil
var request = &visionpb.BatchAnnotateImagesRequest{
Requests: requests,
}
c, err := NewImageAnnotatorClient(context.Background(), clientOpt)
if err != nil {
t.Fatal(err)
}
resp, err := c.BatchAnnotateImages(context.Background(), request)
if st, ok := gstatus.FromError(err); !ok {
t.Errorf("got error %v, expected grpc error", err)
} else if c := st.Code(); c != errCode {
t.Errorf("got error code %q, want %q", c, errCode)
}
_ = resp
}