I can't trigger my second action on google assistant - actions-on-google

I have been playing around with the actions sdk and it seems to work but only for my main intent. I added a second intent and it never triggers.
Here is my action.json:
{
"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "conversation_1"
},
"intent": {
"name": "actions.intent.MAIN"
}
},
{
"name": "add",
"intent": {
"name": "myintent.ADD",
"parameters": [
{
"name": "somenumber",
"type": "SchemaOrg_Number"
}
],
"trigger": {
"queryPatterns": [
"add $SchemaOrg_Number:somenumber",
"add"
]
}
},
"fulfillment": {
"conversationName": "add"
}
}
],
"conversations": {
"conversation_1": {
"name": "conversation_1",
"url": "https://myaddress/sayNumber",
"fulfillmentApiVersion": 2
},
"add": {
"name": "add",
"url": "https://myaddress/sayNumber",
"fulfillmentApiVersion": 2
}
}
}
And here is my index.js:
'use strict';
process.env.DEBUG = 'actions-on-google:*';
const ActionsSdkApp = require('actions-on-google').ActionsSdkApp;
const functions = require('firebase-functions');
const NO_INPUTS = [
'I didn\'t hear that.',
'If you\'re still there, say that again.',
'We can stop here. See you soon.'
];
exports.sayNumber = functions.https.onRequest((request, response) => {
const app = new ActionsSdkApp({request, response});
function mainIntent (app) {
console.log('mainIntent');
let inputPrompt = app.buildInputPrompt(true, '<speak>Hi! <break time="1"/> ' +
'I can read out an ordinal like ' +
'<say-as interpret-as="ordinal">123</say-as>. Say a number.</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
function addIntent (app) {
console.log('addIntent');
let inputPrompt = app.buildInputPrompt(true, '<speak>Hi! <break time="1"/> ' +
'I can add.</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
function rawInput (app) {
console.log('rawInput');
if (app.getRawInput() === 'bye') {
app.tell('Goodbye!');
} else {
let inputPrompt = app.buildInputPrompt(true, '<speak>You said, <say-as interpret-as="ordinal">' +
app.getRawInput() + '</say-as>'+app.getIntent()+'</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
}
let actionMap = new Map();
actionMap.set(app.StandardIntents.MAIN, mainIntent);
actionMap.set(app.StandardIntents.TEXT, rawInput);
actionMap.set("myintent.ADD", addIntent);
app.handleRequest(actionMap);
});
I can say talk to my action name and then everything I say after that gets handled as raw input even if I use the add keywords. What am I doing wrong?

That is correct. The actions.json package only defines how users can start a conversation with your Action. Once the conversation has started, you are passed TEXT (or OPTION) intents and you are expected to handle the natural language processing yourself. Additional intents can be used for speech biasing, but aren't used to parse the response.
This is different than how some other voice agents handle language parsing. The Actions SDK is primarily intended if you have your own NLP already.
If you don't, you are probably better off using something like Dialogflow or Converse.AI.

Related

"How to get username by userID in google assistant action?"

I was connected my chatbot to google assistant action. They give only the userID, how to get username by using this userID?
You can get username without knowing userid, by the permissions document here. You can take a look at this sample code.
Or you can use account linking feature.
Tip! for userID, you can check out this doc
For Python:
There is no official library for developing google action using Python but,
You can add permission intent in possibleIntent array. So your Action SDK JSON will be,
{
"expectUserResponse": true,
"expectedInputs": [
{
"inputPrompt": {
"richInitialPrompt": {
"items": [
{
"simpleResponse": {
"textToSpeech": "PLACEHOLDER"
}
}
]
}
},
"possibleIntents": [
{
"intent": "actions.intent.PERMISSION",
"inputValueData": {
"#type": "type.googleapis.com/google.actions.v2.PermissionValueSpec",
"optContext": "To address you by name and know your location",
"permissions": [
"NAME",
"DEVICE_PRECISE_LOCATION"
]
}
}
]
}
],
"conversationToken": "{\"data\":{}}",
"userStorage": "{\"data\":{}}"
}
{`"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "welcome"
},
"intent": {
"name": "actions.intent.MAIN",
"trigger": {
"queryPatterns":["talk to Mr Bot"]
}
}
},
{
"description": "Rasa Intent",
"name": "TEXT",
"fulfillment": {
"conversationName": "rasa_intent"
},
"intent": {
"name": "actions.intent.TEXT",
"trigger": {
"queryPatterns":[]
}
}
}],
"conversations": {
"welcome": {
"name": "welcome",
"url": "https://ac752bb0.ngrok.io/webhooks/google_home/webhook",
"fulfillmentApiVersion": 2
},
"rasa_intent": {
"name": "rasa_intent",
"url": "https://ac752bb0.ngrok.io/webhooks/google_home/webhook",
"fulfillmentApiVersion": 2
}
} }
this is my action.json,
class GoogleConnector(InputChannel):
#classmethod
def name(cls):
return "google_home"
#def __init__(self):
# self.out_channel = CustomOutput(url, access_token)
def blueprint(self, on_new_message):
google_webhook = Blueprint('google_webhook', __name__)
#google_webhook.route("/", methods=['GET'])
def health():
return jsonify({"status": "ok"})
#google_webhook.route("/webhook", methods=['POST'])
def receive():
payload = json.loads(request.data)
sender_id = payload['user']['userId']
intent = payload['inputs'][0]['intent']
text = payload['inputs'][0]['rawInputs'][0]['query']
if intent == 'actions.intent.MAIN':
message = "<speak>Hello! <break time=\"1\"/> Welcome to the Rasa-powered Google Assistant skill. You can start by saying hi."
else:
out = CollectingOutputChannel()
on_new_message(UserMessage(text, out, sender_id))
responses = [m["text"] for m in out.messages]
message = responses[0]
r = json.dumps(
{
"conversationToken": "{\"state\":null,\"data\":{}}",
"expectUserResponse": 'true',
"expectedInputs": [
{
"inputPrompt": {
"initialPrompts": [
{
"ssml": message
}
]
},
"possibleIntents": [
{
"intent": "actions.intent.TEXT"
}
]
}
]
})
return r
return google_webhook
this my google connector python code,
how to modified this for account signin

google actions: conversation exit using actions sdk does not invoke actions.intent.CANCEL

I am using the actions SDK to build fulfilments. I am using Google Functions for the same. I have the following in the action.json
{
"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "App"
},
"intent": {
"name": "actions.intent.MAIN",
"trigger": {
"queryPatterns": [
. . .
]
}
}
}
],
"conversations": {
"App": {
"name": " ... ",
"url": " ...",
"fulfillmentApiVersion": 2
}
},
"locale": "en"
}
In the function code I notice that the custom intent code of actions.intent.CANCEL is not getting called when the user says/ types exit or Goodbye. In the emulator only the <earcon> appears. The JS code is as below:
app.intent('actions.intent.MAIN', (conv) => {
conv.ask('Welcome to ...');
});
app.intent('actions.intent.TEXT', (conv, input) => {
// the main logic of the application is here
});
app.intent('actions.intent.CANCEL', (conv) => {
conv.close(`Okay, let's try this again later.`);
// this code does not get called
});
Is something to be set in action.json for the cancel intent to work
Yes, you need to add something to your action.json for it to send you the CANCEL Intent. In your existing conversations object, add an inDialogIntents attribute with an array of objects giving the name of the CANCEL Intent. Something like this:
"conversations": {
"App": {
"name": "...",
"url": "...",
"fulfillmentApiVersion": 2
"inDialogIntents": [
{
"name": "actions.intent.CANCEL"
}
]
}
}

Storing data "across conversations" in Google Action

I'm a bit confused by the Google Actions documentation about storing data and hoped someone can help clarify...
The docs state that data in the conv.user.storage object will be saved "across conversations". I took this to mean that if the user exited the conversation these values would be persisted and available the next time they interact with my action. Is that understanding correct?
The reason I ask is that I can't get this behaviour to work in my action.
I have built a simple action fulfilment service (using Actions on Google NodeJS library v2.4.0 and Koa v2.5.3). The fulfilment is triggered from an intent defined in Dialogflow (after an account has been linked with Google Sign In) and stores a value in conversation storage. The code is as follows:
server.js (base server - loads actions dynamically from the local ./actions/ dir)
/* Load the environment */
const dotenv = require('dotenv');
const path = require('path');
const packageJson = require('./package.json');
dotenv.config({
silent: true,
path: process.env.ENV_FILE!=undefined && process.env.ENV_FILE.trim()!='' ? path.normalize(process.env.ENV_FILE) : path.join(__dirname, './.env')
});
const SERVER_NAME = process.env.NAME || packageJson.name;
const SERVER_PORT = process.env.PORT||'8080';
const SERVER_HOST = process.env.HOST||'0.0.0.0';
const HANDLERS_PATH = './actions/';
/* Load the dependencies */
const logger = require('utils-general').logger('google-server');
const Koa = require('koa');
const KoaBody = require('koa-body');
const KoaActionsOnGoogle = require('koa-aog');
const fs = require('fs');
const { dialogflow } = require('actions-on-google');
/* Load and initialise the Google Assistant actions */
//Initialise DialogFlow
const action = dialogflow({ debug: process.env.ACTIONS_DEBUG==='true', clientId: process.env.GOOGLE_CLIENT_ID });
//Load the action intent handlers
const handlers = [];
let handlerFiles = fs.readdirSync(HANDLERS_PATH);
handlerFiles.forEach(function loadHandlers(file) {
let handlerImpl = require(HANDLERS_PATH+file);
let handler = {};
handler[handlerImpl.intent] = handlerImpl.action;
handlers.push(handler);
});
//Add the actions intent handlers to DialogFlow
handlers.forEach(item => {
let key = Object.keys(item)[0];
logger.info(`Adding handler for action intent ${key}`);
action.intent(key, item[key]);
});
/* Create the application server to handle fulfilment requests */
logger.info(`Initialising the ${SERVER_NAME} server (port: ${SERVER_PORT}, host: ${SERVER_HOST})`);
//Create the server
const app = new Koa();
//Add default error handler middleware
app.on('error', function handleAppError(err) {
logger.error(`Unhandled ${err.name||'Error'}: ${err.message || JSON.stringify(err)}`);
});
//Add body parsing middleware
app.use(KoaBody({ jsonLimit: '50kb' }));
//Log the request/ response
app.use(async (ctx, next) => {
logger.trace(`REQUEST ${ctx.method} ${ctx.path} ${JSON.stringify(ctx.request.body)}`);
await next();
logger.trace(`RESPONSE (${ctx.response.status}) ${ctx.response.body ? JSON.stringify(ctx.response.body) : ''}`);
});
//Make the action fulfilment endpoint available on the server
app.use(KoaActionsOnGoogle({ action: action }));
/* Start server on the specified port */
app.listen(SERVER_PORT, SERVER_HOST, function () {
logger.info(`${SERVER_NAME} server started at ${new Date().toISOString()} and listening for requests on port ${SERVER_PORT}`);
});
module.exports = app;
storage-read.js (fulfilment for the "STORAGE_READ" intent - reads stored uuid from conversation storage):
const logger = require('utils-general').logger('google-action-storage-read');
const { SimpleResponse } = require('actions-on-google');
const { getUserId } = require('../utils/assistant-util');
const _get = require('lodash.get');
module.exports = {
intent: 'STORAGE_READ',
action: async function (conv, input) {
logger.debug(`Processing STORAGE_READ intent request: ${JSON.stringify(conv)}`, { traceid: getUserId(conv) });
let storedId = _get(conv, 'user.storage.uuid', undefined);
logger.debug(`User storage UUID is ${storedId}`);
conv.close(new SimpleResponse((storedId!=undefined ? `This conversation contains stored data` : `There is no stored data for this conversation`)));
}
}
storage-write.js (fulfils the "STORAGE_WRITE" intent - writes a UUID to conversation storage):
const logger = require('utils-general').logger('google-action-storage-read');
const { SimpleResponse } = require('actions-on-google');
const { getUserId } = require('../utils/assistant-util');
const _set = require('lodash.set');
const uuid = require('uuid/v4');
module.exports = {
intent: 'STORAGE_WRITE',
action: async function (conv, input) {
logger.debug(`Processing STORAGE_WRITE intent request`, { traceid: getUserId(conv) });
let newId = uuid();
logger.debug(`Writing new UUID to conversation storage: ${newId}`);
_set(conv, 'user.storage.uuid', newId);
conv.close(new SimpleResponse(`OK, I've written a new UUID to conversation storage`));
}
}
This "STORAGE_WRITE" fulfilment stores the data and makes it available between turns in the same conversation (i.e. another intent triggered in the same conversation can read the stored data). However, when the conversation is closed, subsequent (new) conversations with the same user are unable to read the data (i.e. when the "STORAGE_READ" intent is fulfilled) - the conv.user.storage object is always empty.
I have voice match set up on the Google account/ Home Mini I'm using, but I can't see how I determine in the action if the voice is matched (although it seems to be as when I start a new conversation my linked account is used). I'm also getting the same behaviour on the simulator.
Sample request/ responses (when using the simulator) are as follows:
STORAGE_WRITE request:
{
"user": {
"userId": "AB_Hidden_EWVzx3q",
"locale": "en-US",
"lastSeen": "2018-10-18T12:52:01Z",
"idToken": "eyMyHiddenTokenId"
},
"conversation": {
"conversationId": "ABwppHFrP5DIKzykGIfK5mNS42yVzuunzOfFUhyPctG0h0xM8p6u0E9suX8OIvaaGdlYydTl60ih-WJ5kkqV4acS5Zd1OkRJ5pnE",
"type": "NEW"
},
"inputs": [
{
"intent": "actions.intent.MAIN",
"rawInputs": [
{
"inputType": "KEYBOARD",
"query": "ask my pathfinder to write something to conversation storage"
}
],
"arguments": [
{
"name": "trigger_query",
"rawText": "write something to conversation storage",
"textValue": "write something to conversation storage"
}
]
}
],
"surface": {
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
},
{
"name": "actions.capability.MEDIA_RESPONSE_AUDIO"
}
]
},
"isInSandbox": true,
"availableSurfaces": [
{
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
}
]
}
],
"requestType": "SIMULATOR"
}
STORAGE_WRITE response:
{
"conversationToken": "[]",
"finalResponse": {
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "OK, I've written a new UUID to conversation storage"
}
}
]
}
},
"responseMetadata": {
"status": {
"message": "Success (200)"
},
"queryMatchInfo": {
"queryMatched": true,
"intent": "a7e54fcf-8ff1-4690-a311-e4c6a8d1bfd7"
}
},
"userStorage": "{\"data\":{\"uuid\":\"7dc835fa-0470-4028-b8ed-3374ed65ac7c\"}}"
}
Subsequent STORAGE_READ request:
{
"user": {
"userId": "AB_Hidden_EWVzx3q",
"locale": "en-US",
"lastSeen": "2018-10-18T12:52:47Z",
"idToken": "eyMyHiddenTokenId"
},
"conversation": {
"conversationId": "ABwppHHVvp810VEfa4BhBJPf1NIfKUGzyvw9JCw7kKq9YBd_F8w0VYjJiSuzGLrHcXHGc9pC6ukuMB62XVkzkZOaC24pEbXWLQX5",
"type": "NEW"
},
"inputs": [
{
"intent": "STORAGE_READ",
"rawInputs": [
{
"inputType": "KEYBOARD",
"query": "ask my pathfinder what is in conversation storage"
}
],
"arguments": [
{
"name": "trigger_query",
"rawText": "what is in conversation storage",
"textValue": "what is in conversation storage"
}
]
}
],
"surface": {
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
},
{
"name": "actions.capability.MEDIA_RESPONSE_AUDIO"
}
]
},
"isInSandbox": true,
"availableSurfaces": [
{
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
}
]
}
],
"requestType": "SIMULATOR"
}
STORAGE_READ response:
{
"conversationToken": "[]",
"finalResponse": {
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "There is no stored data for this conversation"
}
}
]
}
},
"responseMetadata": {
"status": {
"message": "Success (200)"
},
"queryMatchInfo": {
"queryMatched": true,
"intent": "368d08d3-fe0c-4481-aa8e-b0bdfa659eeb"
}
}
}
Can someone set me straighten me out on whether I'm misinterpreting the docs or maybe I have a bug somewhere?
Thanks!
my suspicion is that personal results are turned off in your case.
You mentioned you're testing on Home Mini and Prisoner was able reproduce on device (in the comments).
Shared devices like Smart Speakers (Home, Mini) and Smart Displays have personal results disabled by default. Check this documentation to enable it.
Open Settings on your Android phone
Under "Assistant devices," select your device (e.g. Mini)
Turn Personal results on
Beware that this means personal results like Calendar entries can be accessed through the device.
To check if userStorage will persist, you can use the GUEST/VERIFIED flag, see documentation here.

Simulator does not invoke other intents, like text and other custom intents for Actions SDK

I am using Actions SDK and I have different configurations to get the simulator to invoke my custom intent! It seems that the simulator refuses to trigger any action other than MAIN, not even TEXT. Below is my action.json:
{
"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "SHOPPING"
},
"intent": {
"name": "actions.intent.MAIN",
"trigger": {
"queryPatterns": [
"Talk to stroller shopping expert"
]
}
}
},
{
"description": "Listing strollers for a specified age group",
"name": "SHOPPING",
"fulfillment": {
"conversationName": "SHOPPING"
},
"intent": {
"name": "SHOPPING",
"trigger": {
"queryPatterns": [
"I am looking for a jogging stroller",
"I am shopping for a jogging stroller"
]
}
}
}
],
"conversations": {
"SHOPPING": {
"name": "SHOPPING",
"url": "SOME_URL (I have a valid URL BTW)",
"fulfillmentApiVersion": 2
}
}
}
I am using firebase and in the firebase log I cannot see any logs from my custom or TEXT intent. Here are part of my index.json code:
'use strict';
process.env.DEBUG = 'actions-on-google:*';
const ActionsSdkApp = require('actions-on-google').ActionsSdkApp;
const functions = require('firebase-functions');
const NO_INPUTS = [
'Padon me, I didn\'t hear that.',
'If you\'re still there, would you please say that again.',
'We can stop here. Good luck with your shopping.'
];
const SHOPPING_INTENT = 'SHOPPING';
exports.shopStrollers = functions.https.onRequest((request, response) => {
const app = new ActionsSdkApp({request, response});
function handleMainInput(app) {
console.log('mainIntent is invoked!');
console.log("The input is %s", app.getRawInput());
console.log("It seems that %s is never invoked!", app.StandardIntents.TEXT)
let inputPrompt = app.buildInputPrompt(true, '<speak>Hi! <break time="1"/> ' +
'I can help with finding strollers. How old is your baby?</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
function handleTextInput(app) {
console.log('TEXT is invoked!');
console.log("The input is %s", app.getRawInput());
console.log("Finally TEXT HANDLER got invoked")
if (app.getRawInput() === 'bye') {
app.tell('Hope you found the service helpful and best of luck with your shopping, please come back again, goodbye!');
} else {
let inputPrompt = app.buildInputPrompt(true, '<speak>Here is a list of top' +
' <say-as interpret-as="ordinal">10</say-as>strollers' +
', say next for the next batch</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
}
let actionMap = new Map();
actionMap.set(SHOPPING_INTENT, handleTextInput);
actionMap.set(app.StandardIntents.MAIN, handleMainInput);
actionMap.set(app.StandardIntents.TEXT, handleTextInput);
app.handleRequest(actionMap);
});
Does anybody have a clue what might be wrong, I would appreciate any help.
Problem solved. I had to enable web history.
I was using a business email and I had to go through a convoluted configuration process to "turn on web history"! Now everything works fine through ngrok and my express app. I would imagine it should work on firebase as well. So, action.json and index.js were good, the issue was with incomplete configuration for the business email!
Here is the steps I followed: https://productforums.google.com/forum/#!msg/apps/-52VibOcvrY/wUow1QOJ3VQJ

askWithList on Actions on Google

I'm following the sample code for Action on Google responses at the following link:
https://developers.google.com/actions/assistant/responses
I want the list response to appear when the user initiates the text intent, but all I get is "Your App isn’t responding right now. Try again soon." Here is the code that I'm using (it's copy and paste for the most part from the link):
function textIntent(app) {
app.askWithList(app.buildRichResponse()
.addSimpleResponse('Alright')
.addSuggestions(
['Basic Card', 'List', 'Carousel', 'Suggestions']),
// Build a list
app.buildList('Things to learn about')
// Add the first item to the list
.addItems(app.buildOptionItem('MATH_AND_PRIME',
['math', 'math and prime', 'prime numbers', 'prime'])
.setTitle('Math & prime numbers')
.setDescription('42 is an abundant number because the sum of its ' +
'proper divisors 54 is greater…')
)
// Add the second item to the list
.addItems(app.buildOptionItem('EGYPT',
['religion', 'egypt', 'ancient egyptian'])
.setTitle('Ancient Egyptian religion')
.setDescription('42 gods who ruled on the fate of the dead in the ' +
'afterworld. Throughout the under…')
)
// Add third item to the list
.addItems(app.buildOptionItem('RECIPES',
['recipes', 'recipe', '42 recipes'])
.setTitle('42 recipes with 42 ingredients')
.setDescription('Here\'s a beautifully simple recipe that\'s full ' +
'of flavor! All you need is some ginger and…')
)
);
}
let actionMap = new Map();
actionMap.set(app.StandardIntents.MAIN, mainIntent);
actionMap.set(app.StandardIntents.TEXT, textIntent);
app.handleRequest(actionMap);
Here is my action.json:
{
"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "welcome"
},
"intent": {
"name": "actions.intent.MAIN"
}
},
],
"conversations": {
"welcome": {
"name": "welcome",
"url": "https://example.com"
},
}
}
Any help would be much appreciated! Thank you in advance.
You're using Actions version 2 features, but the actions.json package isn't specifying a version, so it defaults to version 1.
The "conversations" section of actions.json should look something like:
"conversations": {
"welcome": {
"name": "welcome",
"url": "https://example.com",
"fulfillmentApiVersion": 2
},
}