adding custom JSON payloads for TELEGRAM in Dialogflow to get users to share their phone number - dialogflow-es-fulfillment

the function below is supposed to provides the fulfillment to the share_your_phone_number intent.
When the intent is invoked, the share your phone number keyboard is displayed for the user in telegram.
function share_your_phone_number(agent) {
agent.add(`Welcome.`);
agent.add(new Payload("telegram", {
"text": "Please click on button below to share your number",
"reply_markup": {
"one_time_keyboard": true,
"resize_keyboard": true,
"keyboard": [
[
{
"text": "Share my phone number",
"callback_data": "phone",
"request_contact": true
}
],
[
{
"text": "Cancel",
"callback_data": "Cancel"
}
]
]
}
}
));
}
When I deploy the API in the inline editor, only the "Welcome" string is returned in telegram bot chat. the key board buttons are not displayed.
I need a clue to fix fix this.

In creatin the Constructor for Payload object as documented [here]https://dialogflow.com/docs/reference/fulfillment-library/rich-responses#new_payloadplatform_payload, the platform and payload parameters are required.
new Payload(platform, payload)
The platform parameter is a property of WebhookClient object and should be defined as such (agent.SLACK, agent.TELEGRAM etc) assuming the webhookClient was instantiated and stored in agent
Examples:
agent.add(new Payload(agent.ACTIONS_ON_GOOGLE, {/*your Google payload here*/});
agent.add(new Payload(agent.SLACK, {/*your Slack payload here*/});
agent.add(new Payload(agent.TELEGRAM, {/*your telegram payload here*/});
ref: https://blog.dialogflow.com/post/fulfillment-library-beta/.
For my use-case outlined in the question this is my full solution:
// See https://github.com/dialogflow/dialogflow-fulfillment-nodejs
// for Dialogflow fulfillment library docs, samples, and to report issues
'use strict';
const functions = require('firebase-functions');
const {WebhookClient} = require('dialogflow-fulfillment');
const {Text, Card, Image, Suggestion, Payload} = require('dialogflow-fulfillment');
process.env.DEBUG = 'dialogflow:debug'; // enables lib debugging statements
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
console.log('Dialogflow Request headers: ' + JSON.stringify(request.headers));
console.log('Dialogflow Request body: ' + JSON.stringify(request.body));
function welcome(agent) {
agent.add(new Payload(agent.TELEGRAM, {
"text": "Please click on button below to share your number",
"reply_markup": {
"one_time_keyboard": true,
"resize_keyboard": true,
"keyboard": [
[
{
"text": "Share my phone number",
"callback_data": "phone",
"request_contact": true
}
],
[
{
"text": "Cancel",
"callback_data": "Cancel"
}
]
]
}
}));
}
// Run the proper function handler based on the matched Dialogflow intent name
let intentMap = new Map();
intentMap.set('Default Welcome Intent', welcome);
agent.handleRequest(intentMap);
});

Here is my result:
function welcome(agent) {
const payload = {
"text": "Pick a color",
"reply_markup": {
"inline_keyboard": [
[
{
"text": "Yellow",
"callback_data": "Yellow"
}
],
[
{
"text": "Blue",
"callback_data": "Blue"
}
]
]
}
};
console.log('queryText ' + JSON.stringify(agent.request_.body.queryResult.queryText));
console.log('displayName ' + JSON.stringify(agent.request_.body.queryResult.intent.displayName)
agent.add(
new Payload(agent.TELEGRAM, payload, {rawPayload: false, sendAsMessage: true})
);
}
Also you must update version of dialogflow-fulfillment in package.json to latest. Now I have this version - "dialogflow-fulfillment": "^0.6.1"

Related

Issue in make a phone call within Google Assistant App

I try to implement button in basic card i got an error
API Version 2: Failed to parse JSON response string with 'INVALID_ARGUMENT' error: "(expected_inputs[0].input_prompt.rich_initial_prompt.items[1].basic_card.buttons[0].open_url_action) versions: Cannot find field." HTTP Status Code: 200.
conv.ask(new BasicCard({
text: `This is a basic card. Text in a basic card can include "quotes" and`,
subtitle: 'This is a subtitle',
buttons: [
{
"title": "Call",
"openUrlAction": {
"url": "tel:+91123456789",
"androidApp": {
"packageName": "com.android.phone"
},
"versions": []
}
},
],
"formattedText": "Some text",
image: new Image({
url: 'https://storage.googleapis.com/actionsresources/logo_assistant_2x_64dp.png',
alt: 'Image alternate text',
}),
"title": "Card Title"
}));
and I tried all the ways add another empty object in button array also same error:
buttons: [
{
"title": "Call",
"openUrlAction": {
"url": "tel:+91123456789",
"androidApp": {
"packageName": "com.android.phone"
},
"versions": []
}
},
{}
],
Tried code below got error:
expected_inputs[0].input_prompt.rich_initial_prompt.items[1].basic_card.buttons[0].open_url_action: the protocol must be http or https.
buttons: new Button({
"title": "button text",
'url': "tel:+91123456789",
"androidApp": {
"packageName": "com.android.phone"
}
})
got response in action console simulator:
"buttons": [
{
"title": "button text",
"openUrlAction": {
"url": "tel:+919177723773"
}
}
]
You cannot open a tel: URL through the Assistant.
As the error noted:
expected_inputs[0].input_prompt.rich_initial_prompt.items[1].basic_card.buttons[0].open_url_action: the protocol must be http or https.
Keep in mind that the Assistant runs on devices that do not support tel: URLs, even if they support https: URLs.
The URL property that you are using includes a phonenumber. The link property can only use links to a webpage. This also comes back in the error message that you are getting:
"the protocol must be http or https.".
So if you want to fix the error, you need to include a http or https link in your button instead of a phonenumber.
As for calling a phonenumber from the Google Assistant App, I'm not sure if this feature is supported at the moment.

Storing data "across conversations" in Google Action

I'm a bit confused by the Google Actions documentation about storing data and hoped someone can help clarify...
The docs state that data in the conv.user.storage object will be saved "across conversations". I took this to mean that if the user exited the conversation these values would be persisted and available the next time they interact with my action. Is that understanding correct?
The reason I ask is that I can't get this behaviour to work in my action.
I have built a simple action fulfilment service (using Actions on Google NodeJS library v2.4.0 and Koa v2.5.3). The fulfilment is triggered from an intent defined in Dialogflow (after an account has been linked with Google Sign In) and stores a value in conversation storage. The code is as follows:
server.js (base server - loads actions dynamically from the local ./actions/ dir)
/* Load the environment */
const dotenv = require('dotenv');
const path = require('path');
const packageJson = require('./package.json');
dotenv.config({
silent: true,
path: process.env.ENV_FILE!=undefined && process.env.ENV_FILE.trim()!='' ? path.normalize(process.env.ENV_FILE) : path.join(__dirname, './.env')
});
const SERVER_NAME = process.env.NAME || packageJson.name;
const SERVER_PORT = process.env.PORT||'8080';
const SERVER_HOST = process.env.HOST||'0.0.0.0';
const HANDLERS_PATH = './actions/';
/* Load the dependencies */
const logger = require('utils-general').logger('google-server');
const Koa = require('koa');
const KoaBody = require('koa-body');
const KoaActionsOnGoogle = require('koa-aog');
const fs = require('fs');
const { dialogflow } = require('actions-on-google');
/* Load and initialise the Google Assistant actions */
//Initialise DialogFlow
const action = dialogflow({ debug: process.env.ACTIONS_DEBUG==='true', clientId: process.env.GOOGLE_CLIENT_ID });
//Load the action intent handlers
const handlers = [];
let handlerFiles = fs.readdirSync(HANDLERS_PATH);
handlerFiles.forEach(function loadHandlers(file) {
let handlerImpl = require(HANDLERS_PATH+file);
let handler = {};
handler[handlerImpl.intent] = handlerImpl.action;
handlers.push(handler);
});
//Add the actions intent handlers to DialogFlow
handlers.forEach(item => {
let key = Object.keys(item)[0];
logger.info(`Adding handler for action intent ${key}`);
action.intent(key, item[key]);
});
/* Create the application server to handle fulfilment requests */
logger.info(`Initialising the ${SERVER_NAME} server (port: ${SERVER_PORT}, host: ${SERVER_HOST})`);
//Create the server
const app = new Koa();
//Add default error handler middleware
app.on('error', function handleAppError(err) {
logger.error(`Unhandled ${err.name||'Error'}: ${err.message || JSON.stringify(err)}`);
});
//Add body parsing middleware
app.use(KoaBody({ jsonLimit: '50kb' }));
//Log the request/ response
app.use(async (ctx, next) => {
logger.trace(`REQUEST ${ctx.method} ${ctx.path} ${JSON.stringify(ctx.request.body)}`);
await next();
logger.trace(`RESPONSE (${ctx.response.status}) ${ctx.response.body ? JSON.stringify(ctx.response.body) : ''}`);
});
//Make the action fulfilment endpoint available on the server
app.use(KoaActionsOnGoogle({ action: action }));
/* Start server on the specified port */
app.listen(SERVER_PORT, SERVER_HOST, function () {
logger.info(`${SERVER_NAME} server started at ${new Date().toISOString()} and listening for requests on port ${SERVER_PORT}`);
});
module.exports = app;
storage-read.js (fulfilment for the "STORAGE_READ" intent - reads stored uuid from conversation storage):
const logger = require('utils-general').logger('google-action-storage-read');
const { SimpleResponse } = require('actions-on-google');
const { getUserId } = require('../utils/assistant-util');
const _get = require('lodash.get');
module.exports = {
intent: 'STORAGE_READ',
action: async function (conv, input) {
logger.debug(`Processing STORAGE_READ intent request: ${JSON.stringify(conv)}`, { traceid: getUserId(conv) });
let storedId = _get(conv, 'user.storage.uuid', undefined);
logger.debug(`User storage UUID is ${storedId}`);
conv.close(new SimpleResponse((storedId!=undefined ? `This conversation contains stored data` : `There is no stored data for this conversation`)));
}
}
storage-write.js (fulfils the "STORAGE_WRITE" intent - writes a UUID to conversation storage):
const logger = require('utils-general').logger('google-action-storage-read');
const { SimpleResponse } = require('actions-on-google');
const { getUserId } = require('../utils/assistant-util');
const _set = require('lodash.set');
const uuid = require('uuid/v4');
module.exports = {
intent: 'STORAGE_WRITE',
action: async function (conv, input) {
logger.debug(`Processing STORAGE_WRITE intent request`, { traceid: getUserId(conv) });
let newId = uuid();
logger.debug(`Writing new UUID to conversation storage: ${newId}`);
_set(conv, 'user.storage.uuid', newId);
conv.close(new SimpleResponse(`OK, I've written a new UUID to conversation storage`));
}
}
This "STORAGE_WRITE" fulfilment stores the data and makes it available between turns in the same conversation (i.e. another intent triggered in the same conversation can read the stored data). However, when the conversation is closed, subsequent (new) conversations with the same user are unable to read the data (i.e. when the "STORAGE_READ" intent is fulfilled) - the conv.user.storage object is always empty.
I have voice match set up on the Google account/ Home Mini I'm using, but I can't see how I determine in the action if the voice is matched (although it seems to be as when I start a new conversation my linked account is used). I'm also getting the same behaviour on the simulator.
Sample request/ responses (when using the simulator) are as follows:
STORAGE_WRITE request:
{
"user": {
"userId": "AB_Hidden_EWVzx3q",
"locale": "en-US",
"lastSeen": "2018-10-18T12:52:01Z",
"idToken": "eyMyHiddenTokenId"
},
"conversation": {
"conversationId": "ABwppHFrP5DIKzykGIfK5mNS42yVzuunzOfFUhyPctG0h0xM8p6u0E9suX8OIvaaGdlYydTl60ih-WJ5kkqV4acS5Zd1OkRJ5pnE",
"type": "NEW"
},
"inputs": [
{
"intent": "actions.intent.MAIN",
"rawInputs": [
{
"inputType": "KEYBOARD",
"query": "ask my pathfinder to write something to conversation storage"
}
],
"arguments": [
{
"name": "trigger_query",
"rawText": "write something to conversation storage",
"textValue": "write something to conversation storage"
}
]
}
],
"surface": {
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
},
{
"name": "actions.capability.MEDIA_RESPONSE_AUDIO"
}
]
},
"isInSandbox": true,
"availableSurfaces": [
{
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
}
]
}
],
"requestType": "SIMULATOR"
}
STORAGE_WRITE response:
{
"conversationToken": "[]",
"finalResponse": {
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "OK, I've written a new UUID to conversation storage"
}
}
]
}
},
"responseMetadata": {
"status": {
"message": "Success (200)"
},
"queryMatchInfo": {
"queryMatched": true,
"intent": "a7e54fcf-8ff1-4690-a311-e4c6a8d1bfd7"
}
},
"userStorage": "{\"data\":{\"uuid\":\"7dc835fa-0470-4028-b8ed-3374ed65ac7c\"}}"
}
Subsequent STORAGE_READ request:
{
"user": {
"userId": "AB_Hidden_EWVzx3q",
"locale": "en-US",
"lastSeen": "2018-10-18T12:52:47Z",
"idToken": "eyMyHiddenTokenId"
},
"conversation": {
"conversationId": "ABwppHHVvp810VEfa4BhBJPf1NIfKUGzyvw9JCw7kKq9YBd_F8w0VYjJiSuzGLrHcXHGc9pC6ukuMB62XVkzkZOaC24pEbXWLQX5",
"type": "NEW"
},
"inputs": [
{
"intent": "STORAGE_READ",
"rawInputs": [
{
"inputType": "KEYBOARD",
"query": "ask my pathfinder what is in conversation storage"
}
],
"arguments": [
{
"name": "trigger_query",
"rawText": "what is in conversation storage",
"textValue": "what is in conversation storage"
}
]
}
],
"surface": {
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
},
{
"name": "actions.capability.MEDIA_RESPONSE_AUDIO"
}
]
},
"isInSandbox": true,
"availableSurfaces": [
{
"capabilities": [
{
"name": "actions.capability.WEB_BROWSER"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
}
]
}
],
"requestType": "SIMULATOR"
}
STORAGE_READ response:
{
"conversationToken": "[]",
"finalResponse": {
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": "There is no stored data for this conversation"
}
}
]
}
},
"responseMetadata": {
"status": {
"message": "Success (200)"
},
"queryMatchInfo": {
"queryMatched": true,
"intent": "368d08d3-fe0c-4481-aa8e-b0bdfa659eeb"
}
}
}
Can someone set me straighten me out on whether I'm misinterpreting the docs or maybe I have a bug somewhere?
Thanks!
my suspicion is that personal results are turned off in your case.
You mentioned you're testing on Home Mini and Prisoner was able reproduce on device (in the comments).
Shared devices like Smart Speakers (Home, Mini) and Smart Displays have personal results disabled by default. Check this documentation to enable it.
Open Settings on your Android phone
Under "Assistant devices," select your device (e.g. Mini)
Turn Personal results on
Beware that this means personal results like Calendar entries can be accessed through the device.
To check if userStorage will persist, you can use the GUEST/VERIFIED flag, see documentation here.

Simulator does not invoke other intents, like text and other custom intents for Actions SDK

I am using Actions SDK and I have different configurations to get the simulator to invoke my custom intent! It seems that the simulator refuses to trigger any action other than MAIN, not even TEXT. Below is my action.json:
{
"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "SHOPPING"
},
"intent": {
"name": "actions.intent.MAIN",
"trigger": {
"queryPatterns": [
"Talk to stroller shopping expert"
]
}
}
},
{
"description": "Listing strollers for a specified age group",
"name": "SHOPPING",
"fulfillment": {
"conversationName": "SHOPPING"
},
"intent": {
"name": "SHOPPING",
"trigger": {
"queryPatterns": [
"I am looking for a jogging stroller",
"I am shopping for a jogging stroller"
]
}
}
}
],
"conversations": {
"SHOPPING": {
"name": "SHOPPING",
"url": "SOME_URL (I have a valid URL BTW)",
"fulfillmentApiVersion": 2
}
}
}
I am using firebase and in the firebase log I cannot see any logs from my custom or TEXT intent. Here are part of my index.json code:
'use strict';
process.env.DEBUG = 'actions-on-google:*';
const ActionsSdkApp = require('actions-on-google').ActionsSdkApp;
const functions = require('firebase-functions');
const NO_INPUTS = [
'Padon me, I didn\'t hear that.',
'If you\'re still there, would you please say that again.',
'We can stop here. Good luck with your shopping.'
];
const SHOPPING_INTENT = 'SHOPPING';
exports.shopStrollers = functions.https.onRequest((request, response) => {
const app = new ActionsSdkApp({request, response});
function handleMainInput(app) {
console.log('mainIntent is invoked!');
console.log("The input is %s", app.getRawInput());
console.log("It seems that %s is never invoked!", app.StandardIntents.TEXT)
let inputPrompt = app.buildInputPrompt(true, '<speak>Hi! <break time="1"/> ' +
'I can help with finding strollers. How old is your baby?</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
function handleTextInput(app) {
console.log('TEXT is invoked!');
console.log("The input is %s", app.getRawInput());
console.log("Finally TEXT HANDLER got invoked")
if (app.getRawInput() === 'bye') {
app.tell('Hope you found the service helpful and best of luck with your shopping, please come back again, goodbye!');
} else {
let inputPrompt = app.buildInputPrompt(true, '<speak>Here is a list of top' +
' <say-as interpret-as="ordinal">10</say-as>strollers' +
', say next for the next batch</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
}
let actionMap = new Map();
actionMap.set(SHOPPING_INTENT, handleTextInput);
actionMap.set(app.StandardIntents.MAIN, handleMainInput);
actionMap.set(app.StandardIntents.TEXT, handleTextInput);
app.handleRequest(actionMap);
});
Does anybody have a clue what might be wrong, I would appreciate any help.
Problem solved. I had to enable web history.
I was using a business email and I had to go through a convoluted configuration process to "turn on web history"! Now everything works fine through ngrok and my express app. I would imagine it should work on firebase as well. So, action.json and index.js were good, the issue was with incomplete configuration for the business email!
Here is the steps I followed: https://productforums.google.com/forum/#!msg/apps/-52VibOcvrY/wUow1QOJ3VQJ

Is it possible to play audio file or stream?

Is it possible to play audio file or stream using actions-on-google-nodejs library?
Using SSML you can return an audio clip up to 120s.
<speak>
<audio src="https://actions.google.com/sounds/v1/animals/cat_purr_close.ogg">
<desc>a cat purring</desc>
PURR (sound didn't load)
</audio>
</speak>
Edit
If you want to play audio the mp3 file (over 120s), you need to use Media Responses
if (!conv.surface.capabilities.has('actions.capability.MEDIA_RESPONSE_AUDIO')) {
conv.ask('Sorry, this device does not support audio playback.');
return;
}
conv.ask(new MediaObject({
name: 'Jazz in Paris',
url: 'https://storage.googleapis.com/automotive-media/Jazz_In_Paris.mp3',
description: 'A funky Jazz tune',
icon: new Image({
url: 'https://storage.googleapis.com/automotive-media/album_art.jpg',
alt: 'Ocean view',
}),
}));
To add one more point to Nick's answer, you can also build a Media Response which will allow you to play a long audio file (I'm currently playing 50 mins album in my app).
You can find it on Google's doc here.
A short example in Node.js could be:
const richResponse = app.buildRichResponse()
.addSimpleResponse("Here's song one.")
.addMediaResponse(app.buildMediaResponse()
.addMediaObjects([
app.buildMediaObject("Song One", "https://....mp3")
.setDescription("Song One with description and large image.") // Optional
.setImage("https://....jpg", app.Media.ImageType.LARGE)
// Optional. Use app.Media.ImageType.ICON if displaying icon.
])
)
.addSuggestions(["other songs"]);
And then you can just do
app.ask(richResponse)
UPDATE:
As per a comment request, here is the JSON response sent by my app for a mediaResponse:
{
"conversationToken": "[\"_actions_on_google\"]",
"expectUserResponse": true,
"expectedInputs": [
{
"inputPrompt": {
"richInitialPrompt": {
"items": [
{
"simpleResponse": {
"textToSpeech": "Here is my favorite album."
}
},
{
"mediaResponse": {
"mediaType": "AUDIO",
"mediaObjects": [
{
"name": my_name,
"description": my_descr,
"largeImage": {
"url": my_url
},
"contentUrl": my_contentURL
}
]
}
}
],
"suggestions": [
{
"title": my_suggestion
}
]
}
},
"possibleIntents": [
{
"intent": "assistant.intent.action.TEXT"
}
]
}
],
"responseMetadata": {
"status": {
"message": "Success (200)"
},
"queryMatchInfo": {
"queryMatched": true,
"intent": "0a3c14f8-87ca-47e7-a211-4e0a8968e3c5",
"parameterNames": [
my_param_name
]
}
},
"userStorage": "{\"data\":{}}"
}

I can't trigger my second action on google assistant

I have been playing around with the actions sdk and it seems to work but only for my main intent. I added a second intent and it never triggers.
Here is my action.json:
{
"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "conversation_1"
},
"intent": {
"name": "actions.intent.MAIN"
}
},
{
"name": "add",
"intent": {
"name": "myintent.ADD",
"parameters": [
{
"name": "somenumber",
"type": "SchemaOrg_Number"
}
],
"trigger": {
"queryPatterns": [
"add $SchemaOrg_Number:somenumber",
"add"
]
}
},
"fulfillment": {
"conversationName": "add"
}
}
],
"conversations": {
"conversation_1": {
"name": "conversation_1",
"url": "https://myaddress/sayNumber",
"fulfillmentApiVersion": 2
},
"add": {
"name": "add",
"url": "https://myaddress/sayNumber",
"fulfillmentApiVersion": 2
}
}
}
And here is my index.js:
'use strict';
process.env.DEBUG = 'actions-on-google:*';
const ActionsSdkApp = require('actions-on-google').ActionsSdkApp;
const functions = require('firebase-functions');
const NO_INPUTS = [
'I didn\'t hear that.',
'If you\'re still there, say that again.',
'We can stop here. See you soon.'
];
exports.sayNumber = functions.https.onRequest((request, response) => {
const app = new ActionsSdkApp({request, response});
function mainIntent (app) {
console.log('mainIntent');
let inputPrompt = app.buildInputPrompt(true, '<speak>Hi! <break time="1"/> ' +
'I can read out an ordinal like ' +
'<say-as interpret-as="ordinal">123</say-as>. Say a number.</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
function addIntent (app) {
console.log('addIntent');
let inputPrompt = app.buildInputPrompt(true, '<speak>Hi! <break time="1"/> ' +
'I can add.</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
function rawInput (app) {
console.log('rawInput');
if (app.getRawInput() === 'bye') {
app.tell('Goodbye!');
} else {
let inputPrompt = app.buildInputPrompt(true, '<speak>You said, <say-as interpret-as="ordinal">' +
app.getRawInput() + '</say-as>'+app.getIntent()+'</speak>', NO_INPUTS);
app.ask(inputPrompt);
}
}
let actionMap = new Map();
actionMap.set(app.StandardIntents.MAIN, mainIntent);
actionMap.set(app.StandardIntents.TEXT, rawInput);
actionMap.set("myintent.ADD", addIntent);
app.handleRequest(actionMap);
});
I can say talk to my action name and then everything I say after that gets handled as raw input even if I use the add keywords. What am I doing wrong?
That is correct. The actions.json package only defines how users can start a conversation with your Action. Once the conversation has started, you are passed TEXT (or OPTION) intents and you are expected to handle the natural language processing yourself. Additional intents can be used for speech biasing, but aren't used to parse the response.
This is different than how some other voice agents handle language parsing. The Actions SDK is primarily intended if you have your own NLP already.
If you don't, you are probably better off using something like Dialogflow or Converse.AI.