I am trying to write a Swagger config for the AWS API Gateway deployment, and came up with this for a sample (mostly copied from the documentation):
{
"swagger": "2.0",
"info": {
"description": "desc",
"title": "TestAPI",
"version": "1.0"
},
"schemes": [
"https"
],
"paths": {
"/": {
"get": {
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"responses": {
"200": {
"description": "test",
"headers": {
"Content-type": {
"type": "string"
}
}
}
},
"x-amazon-apigateway-integration" : {
"type" : "aws",
"uri" : "[REDACTED]",
"credentials" : "[REDACTED]",
"httpMethod" : "POST",
"requestTemplates" : {
"application/json" : "#set ($root=$input.path('$')) { \"stage\": \"$root.name\", \"user-id\": \"$root.key\" }"
},
"requestParameters" : {
"integration.request.querystring.stage" : "method.request.querystring.version",
"integration.request.querystring.provider" : "method.request.querystring.vendor"
},
"responses" : {
"2\\d{2}" : {
"statusCode" : "200",
"responseParameters" : {
"method.response.header.requestId" : "integration.response.header.cid"
},
"responseTemplates" : {
"application/json" : "#set ($root=$input.path('$')) { \"stage\": \"$root.name\", \"user-id\": \"$root.key\" }"
}
},
"302" : {
"statusCode" : "302",
"responseParameters" : {
"method.response.header.Location" : "integration.response.body.redirect.url"
}
},
"default" : {
"statusCode" : "400",
"responseParameters" : {
"method.response.header.test-method-response-header" : "'static value'"
}
}
}
}
}
}
}
}
But the problem is that
aws apigateway import-rest-api --body 'file://deploy/api.json' --region eu-west-1
Outputs the following:
An error occurred (BadRequestException) when calling the ImportRestApi operation: Errors found during import:
Unable to put integration on 'GET' for resource at path '/': Invalid mapping expression specified: Validation Result: warnings : [], errors : [Invalid mapping expression parameter specified: method.request.querystring.version]
That part is taken directly from the documentation, so this is really confusing to me. Any ideas what to do? Documentation seems lacking in many ways, so it is hard to solve many problems with it.
Have you tried defining version and vendor as parameters to the method?
{
"swagger": "2.0",
"info": {
"description": "desc",
"title": "TestAPI",
"version": "1.0"
},
"schemes": [
"https"
],
"paths": {
"/": {
"get": {
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"parameters" : [
{
"name" : "version",
"in" : "query",
"required" : true,
"type" : "string"
"description" : "The version requested"
},
{
"name" : "vendor",
"in" : "query",
"required" : true,
"type" : "string"
"description" : "The vendor being queried"
}
],
"responses": {
"200": {
"description": "test",
"headers": {
"Content-type": {
"type": "string"
}
}
}
},
"x-amazon-apigateway-integration" : {
"type" : "aws",
"uri" : "[REDACTED]",
"credentials" : "[REDACTED]",
"httpMethod" : "POST",
"requestTemplates" : {
"application/json" : "#set ($root=$input.path('$')) { \"stage\": \"$root.name\", \"user-id\": \"$root.key\" }"
},
"requestParameters" : {
"integration.request.querystring.stage" : "method.request.querystring.version",
"integration.request.querystring.provider" : "method.request.querystring.vendor"
},
"responses" : {
"2\\d{2}" : {
"statusCode" : "200",
"responseParameters" : {
"method.response.header.requestId" : "integration.response.header.cid"
},
"responseTemplates" : {
"application/json" : "#set ($root=$input.path('$')) { \"stage\": \"$root.name\", \"user-id\": \"$root.key\" }"
}
},
"302" : {
"statusCode" : "302",
"responseParameters" : {
"method.response.header.Location" : "integration.response.body.redirect.url"
}
},
"default" : {
"statusCode" : "400",
"responseParameters" : {
"method.response.header.test-method-response-header" : "'static value'"
}
}
}
}
}
}
}
}
API Gateway requires method request parameters to be defined before being referenced
Related
We have an API gateway with many routes managed by terraform API gateway resources. As the number of routes increased, it was difficult to manage them from the resources. Hence, we are trying to move it to the body method with merge so that the new routes can be added by importing openapi spec with amazon extensions. But the custom lambda authorizer is not getting attached to the method request.
OpenAPI Spec:
openapi_config = {
openapi = "3.0.1"
info = {
title = "im-dev-api-gateway"
version = "1.0"
}
paths = {
"/v1/api/go/data" : {
"post" : {
"parameters" : [{
"name" : "proxy",
"in" : "path",
"required" : true,
"schema" : {
"type" : "string"
}
}],
"responses" : {
"200" : {
"description" : "200 response",
"headers" : {
"Access-Control-Allow-Origin" : {
"schema" : {
"type" : "string"
}
}
},
"content" : {
"application/json" : {
"schema" : {
"$ref" : "#/components/schemas/Empty"
}
}
}
}
},
"security" : [{
"im-dev-lambda-authorizer" : []
}],
"x-amazon-apigateway-integration" : {
"httpMethod" : "POST",
"uri" : "https://$${stageVariables.LoadBalancerURL}/v1/api/go/data",
"requestParameters" : {
"integration.request.header.X-Auth-Client-ID" : "context.authorizer.x-auth-client-id",
"integration.request.path.proxy" : "method.request.path.proxy",
"integration.request.header.X-Request-ID" : "context.authorizer.x-request-id"
},
"passthroughBehavior" : "when_no_match",
"timeoutInMillis" : 29000,
"type" : "http_proxy"
}
},
"options" : {
"responses" : {
"200" : {
"description" : "200 response",
"headers" : {
"Access-Control-Allow-Origin" : {
"schema" : {
"type" : "string"
}
},
"Access-Control-Allow-Methods" : {
"schema" : {
"type" : "string"
}
},
"Access-Control-Allow-Headers" : {
"schema" : {
"type" : "string"
}
}
},
"content" : {
"application/json" : {
"schema" : {
"$ref" : "#/components/schemas/Empty"
}
}
}
}
},
"x-amazon-apigateway-integration" : {
"responses" : {
"default" : {
"statusCode" : "200",
"responseParameters" : {
"method.response.header.Access-Control-Allow-Methods" : "'GET,OPTIONS,POST,PUT'",
"method.response.header.Access-Control-Allow-Headers" : "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Auth-Client-ID,X-Request-ID'",
"method.response.header.Access-Control-Allow-Origin" : "'*'"
}
}
},
"requestTemplates" : {
"application/json" : "{\"statusCode\": 200}"
},
"passthroughBehavior" : "never",
"timeoutInMillis" : 29000,
"type" : "mock"
}
}
},
"components" : {
"schemas" : {
"Empty" : {
"title" : "Empty Schema",
"type" : "object"
}
},
"securitySchemes" : {
"im-dev-lambda-authorizer" : {
"type" : "apiKey",
"name" : "Unused",
"in" : "header",
"x-amazon-apigateway-authtype" : "custom",
"x-amazon-apigateway-authorizer" : {
"authorizerUri" : "arn:aws:apigateway:ap-south-1:lambda:path/2015-03-31/functions/arn:aws:lambda:ap-south-1:999999999:function:im-dev-authorizer/invocations",
"authorizerCredentials" : "arn:aws:iam::999999999:role/im-dev-api-gateway-auth-invocation",
"authorizerResultTtlInSeconds" : 0,
"identitySource" : "context.$context.requestId",
"type" : "request"
}
}
}
},
}
}
}
I need to call an API to load data on an ongoing basis. API returns different properties for each event. When I create the swagger file it has properties from sample return, but in the long run there will be more properties which can be added by the source system and they will not be in swagger file.
Is there any way to recreate swagger file before data load dynamically with additional properties?
Swagger file is generated by Informatica Cloud based on a sample return while testing the connection.
Properties list has a different number of entries based on event type.
swagger file:
{"swagger" : "2.0",
"info" : {
"description" : null,
"version" : "1.0.0",
"title" : null,
"termsOfService" : null,
"contact" : null,
"license" : null
},
"host" : "<host>.com",
"basePath" : "/api",
"schemes" : [ "https" ],
"paths" : {
"/2.0" : {
"post" : {
"tags" : [ "events" ],
"summary" : null,
"description" : null,
"operationId" : "events",
"produces" : [ "application/json" ],
"consumes" : [ "application/json" ],
"parameters" : [ {
"name" : "script",
"in" : "query",
"description" : null,
"required" : false,
"type" : "string"
}, {
"name" : "Authorization",
"in" : "header",
"description" : null,
"required" : false,
"type" : "string"
} ],
"responses" : {
"200" : {
"description" : "successful operation",
"schema" : {
"$ref" : "#/definitions/events"
}
}
}
}
}
},
"definitions" : {
"events##properties" : {
"properties" : {
"$app_build_number" : {
"type" : "string"
},
"$app_version_string" : {
"type" : "string"
},
"$carrier" : {
"type" : "string"
},
"$lib_version" : {
"type" : "string"
},
"$manufacturer" : {
"type" : "string"
},
"$model" : {
"type" : "string"
},
"$os" : {
"type" : "string"
},
"$os_version" : {
"type" : "string"
},
"$radio" : {
"type" : "string"
},
"$region" : {
"type" : "string"
},
"$screen_height" : {
"type" : "number",
"format" : "int32"
},
"$screen_width" : {
"type" : "number",
"format" : "int32"
},
"Home Step Enabled" : {
"type" : "string"
},
"Number Of Lifetime Logins" : {
"type" : "number",
"format" : "int32"
},
"Sessions" : {
"type" : "number",
"format" : "int32"
},
"mp_country_code" : {
"type" : "string"
},
"mp_lib" : {
"type" : "string"
}
}
},
"events" : {
"properties" : {
"name" : {
"type" : "string"
},
"distinct_id" : {
"type" : "string"
},
"labels" : {
"type" : "string"
},
"time" : {
"type" : "number",
"format" : "int64"
},
"sampling_factor" : {
"type" : "number",
"format" : "int32"
},
"dataset" : {
"type" : "string"
},
"properties" : {
"$ref" : "#/definitions/events##properties"
}
}
}
}
}
sample return:
"name": "Session",
"distinct_id": "1234567890",
"labels": [],
"time": 1520072505000,
"sampling_factor": 1,
"dataset": "$event_data_set",
"properties": {
"$app_build_number": "900",
"$app_version_string": "1.9",
"$carrier": "AT&T",
"$lib_version": "2.0.1",
"$manufacturer": "Apple",
"$model": "iPhone10,6",
"$os": "iOS",
"$os_version": "11.2.6",
"$radio": "LTE",
"$region": "Florida",
"$screen_height": 667,
"$screen_width": 375,
"Number Of Lifetime Logins": 2,
"Session Length": "00h:00m:08s",
"Sessions": 43,
"mp_country_code": "US",
"mp_lib": "swift"
}
}
To try this error I have tried with Elasticsearch 2.x and 5.x but doesn't work in any of these.
I have lots of logs saved in my Elasticsearch instance. They have a field called timestamp whose format is "YYYY-MM-dd HH-mm-ss.SSS" (for example, "2017-11-02 00:00:00.000"). When I try to send a query via POSTMAN which is this:
{
"query": {
"range": {
"timestamp": {
"gte": "2017-10-21 00:00:00.000",
"lte": "2017-10-27 00:00:00.000"
}
}
}
}
I receive nothing and I there are more than 500 logs in that range. What am I doing wrong?
EDIT:
My index (loganalyzer):
{
"loganalyzer" : {
"aliases" : { },
"mappings" : {
"logs" : {
"properties" : {
"entireLog" : {
"type" : "string"
},
"formattedMessage" : {
"type" : "string"
},
"id" : {
"type" : "string"
},
"level" : {
"type" : "string"
},
"loggerName" : {
"type" : "string"
},
"testNo" : {
"type" : "string"
},
"threadName" : {
"type" : "string"
},
"timestamp" : {
"type" : "string"
}
}
}
},
"settings" : {
"index" : {
"refresh_interval" : "1s",
"number_of_shards" : "5",
"creation_date" : "1507415366223",
"store" : {
"type" : "fs"
},
"number_of_replicas" : "1",
"uuid" : "9w3QQQc0S0K0NcKtOERtTw",
"version" : {
"created" : "2040699"
}
}
},
"warmers" : { }
}
}
What I receive sending the request:
{
"took": 429,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"failed": 0
},
"hits": {
"total": 0,
"max_score": null,
"hits": []
}
}
And status 200 (OK).
Your edit with the mappings indicates the problem. The reason you aren't getting any result is because it's attempting to find a "range" for the string you're providing against the values of the field in your index, which are also treated as a string.
"timestamp" : {
"type" : "string"
}
Here's the elastic documentation on that mapping type
You need to apply a date mapping to that field before indexing, or reindex to a new index that has that mapping applied prior to ingestion.
Here is what the mapping request could look like, conforming to your timestamp format:
PUT loganalyzer
{
"mappings": {
"logs": {
"properties": {
"timestamp": {
"type": "date",
"format": "YYYY-MM-dd HH-mm-ss.SSS"
}
}
}
}
}
I am indexing a data stream to Elasticsearch and I cannot figure out how to normalize incoming data to make it index without error. I have a mapping type "getdatavalues" which is a meta-data query. This meta-data query can return very different looking responses but I'm not seeing the difference. The error I get:
{"index":{"_index":"ens_event-2016.03.11","_type":"getdatavalues","_id":"865800029798177_2016_03_11_03_18_12_100037","status":400,"error":"MapperParsingException[object mapping for [getdatavalues] tried to parse field [output] as object, but got EOF, has a concrete value been provided to it?]"}}
when performing:
curl -XPUT 'http://192.168.99.100:80/es/ens_event-2016.03.11/getdatavalues/865800029798177_2016_03_11_03_18_12_100037' -d '{
"type": "getDataValues",
"input": {
"deviceID": {
"IMEI": "865800029798177",
"serial-number": "64180258"
},
"handle": 644,
"exprCode": "200000010300140000080001005f00a700000000000000",
"noRollHandle": "478669308-578452",
"transactionID": 290
},
"timestamp": "2016-03-11T03:18:12.000Z",
"handle": 644,
"output": {
"noRollPubSessHandle": "478669308-578740",
"publishSessHandle": 1195,
"status": true,
"matchFilter": {
"prefix": "publicExpr.operatorDefined.commercialIdentifier.FoodSvcs.Restaurant.\"A&C Kabul Curry\".\"Rooster Street\"",
"argValues": {
"event": "InternationalEvent",
"hasEvent": "anyEvent"
}
},
"transactionID": 290,
"validFor": 50
}
}'
Here's what Elasticsearch has for the mapping:
"getdatavalues" : {
"dynamic_templates" : [ {
"strings" : {
"mapping" : {
"index" : "not_analyzed",
"type" : "string"
},
"match_mapping_type" : "string"
}
} ],
"properties" : {
"handle" : {
"type" : "long"
},
"input" : {
"properties" : {
"deviceID" : {
"properties" : {
"IMEI" : {
"type" : "string",
"index" : "not_analyzed"
},
"serial-number" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"exprCode" : {
"type" : "string",
"index" : "not_analyzed"
},
"handle" : {
"type" : "long"
},
"noRollHandle" : {
"type" : "string",
"index" : "not_analyzed"
},
"serviceVersion" : {
"type" : "string",
"index" : "not_analyzed"
},
"transactionID" : {
"type" : "long"
}
}
},
"output" : {
"properties" : {
"matchFilter" : {
"properties" : {
"argValues" : {
"properties" : {
"Interests" : {
"type" : "object"
},
"MerchantId" : {
"type" : "string",
"index" : "not_analyzed"
},
"Queue" : {
"type" : "string",
"index" : "not_analyzed"
},
"Vibe" : {
"type" : "string",
"index" : "not_analyzed"
},
"event" : {
"properties" : {
"event" : {
"type" : "string",
"index" : "not_analyzed"
},
"hasEvent" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"hasEvent" : {
"type" : "string",
"index" : "not_analyzed"
},
"interests" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"prefix" : {
"type" : "string",
"index" : "not_analyzed"
},
"transactionID" : {
"type" : "long"
},
"validFor" : {
"type" : "long"
}
}
},
"noRollPubSessHandle" : {
"type" : "string",
"index" : "not_analyzed"
},
"publishSessHandle" : {
"type" : "long"
},
"status" : {
"type" : "boolean"
},
"transactionID" : {
"type" : "long"
},
"validFor" : {
"type" : "long"
}
}
},
"timestamp" : {
"type" : "date",
"format" : "dateOptionalTime"
},
"type" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
Looks like the argValues object doesn't quite agree with your mapping:
"argValues": {
"event": "InternationalEvent",
"hasEvent": "anyEvent"
}
Either this:
"argValues": {
"event": {
"event": "InternationalEvent"
},
"hasEvent": "anyEvent"
}
Or this:
"argValues": {
"event": {
"event": "InternationalEvent"
"hasEvent": "anyEvent"
},
}
Would both seem to be valid.
I have some documents stored in ES (by logstash). and the results, when querying ES, do not look right:
The first query (see the queries and the results below) is supposed(meant) to return only documents that do not contain region field.
Even further, based on the result of the first query , obviously there is a document that contains field region, however, the results for second query which should (at least) return a document with region=IN, contains no documents.
Is something wrong with my queries?
How can I investigate where the problem is? (The ES logs do not have anything related to these queries)
Here is the query:
curl -X GET 'http://localhost:9200/logstash*/_search?pretty' -d '{
"query" : {
"match_all" : {}
},
filter : {
"and" : [
{ "term" : { "type" : "xsys" } },
{ "missing" : { "field" : "region" } }
]
}, size: 2
}'
And the result:
{
"took" : 40,
"timed_out" : false,
"_shards" : {
"total" : 90,
"successful" : 90,
"failed" : 0
},
"hits" : {
"total" : 5747,
"max_score" : 1.0,
"hits" : [ {
"_index" : "logstash-2013.09.28",
"_type" : "logs",
"_id" : "UMrz9bwKQgCq__TwBT0WmQ",
"_score" : 1.0,
"_source" : {
.....
"type":"xsys",
....
"region":"IN",
}
}, { ....
} ]
}
}
Furthermore, the result for the following query:
curl -X GET 'http://localhost:9200/logstash*/_search?pretty' -d '{
"query" : { "match_all" : {} },
filter : { "term" : { "region" : "IN" } },
size: 1
}'
is:
{
"took" : 55,
"timed_out" : false,
"_shards" : {
"total" : 90,
"successful" : 90,
"failed" : 0
},
"hits" : {
"total" : 0,
"max_score" : null,
"hits" : [ ]
}
The following mapping is used:
curl -XPUT http://localhost:9200/_template/logstash_per_index -d '
{
"template": "logstash*",
"settings": {
"index.query.default_field": "message",
"index.cache.field.type": "soft",
"index.store.compress.stored": true
},
"mappings": {
"_default_": {
"_all": { "enabled": false },
"properties": {
"message": { "type": "string", "index": "analyzed" },
"#version": { "type": "string", "index": "not_analyzed" },
"#timestamp": { "type": "date", "index": "not_analyzed" },
"type": { "type": "string", "index": "not_analyzed" },
....
"region": { "type": "string", "index": "not_analyzed" },
...
}
}
}
}'
Mapping (what ES has returned - curl -XGET 'http://localhost:9200/logstash-2013.09.28/_mapping):
{
"logstash-2013.09.28":{
"logs":{
"_all":{
"enabled":false
},
"properties":{
"#timestamp":{
"type":"date",
"format":"dateOptionalTime"
},
"#version":{
"type":"string",
"index":"not_analyzed",
"omit_norms":true,
"index_options":"docs"
},
"message":{
"type":"string"
},
"region":{
"type":"string"
},
"type":{
"type":"string",
"index":"not_analyzed",
"omit_norms":true,
"index_options":"docs"
}
}
},
"_default_":{
"_all":{
"enabled":false
},
"properties":{
"#timestamp":{
"type":"date",
"format":"dateOptionalTime"
},
"#version":{
"type":"string",
"index":"not_analyzed",
"omit_norms":true,
"index_options":"docs"
},
"message":{
"type":"string"
},
"type":{
"type":"string",
"index":"not_analyzed",
"omit_norms":true,
"index_options":"docs"
}
}
}
}
}