Centos8 podman exiting all containers (139) - centos

Any image I would try to run the behavior is always the same "Exited (139)"
OS: Centos 8 with podman running inside an Azure VM. The Centos image is the one provided by Azure when creating a VM.
VM: Azure B2S Gen 2 | 2vCPU(s) | 4 GiB RAM | 8 GiB SSD
I paste below the exact extract from the terminal:
pull
$ podman pull fedora
Trying to pull registry.access.redhat.com/fedora...
name unknown: Repo not found
Trying to pull registry.redhat.io/fedora...
unable to retrieve auth token: invalid username/password: unauthorized: Please login to the Red Hat Registry using your Customer Portal credentials. Further instructions can be found here: https://access.redhat.com/RegistryAuthentication
Trying to pull docker.io/library/fedora...
Getting image source signatures
Copying blob ae7b613df528 done
Copying config b3048463dc done
Writing manifest to image destination
Storing signatures
b3048463dcefbe4920ef2ae1af43171c9695e2077f315b2bc12ed0f6f67c86c7
run
$ podman run --rm fedora /bin/echo "Hello Geeks! Welcome to Podman"
ps
$ podman ps -a
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
feb43e01e777 docker.io/library/ubuntu:latest bash 3 minutes ago Exited (139) 3 minutes ago magical_carson
inspect
$ podman inspect feb43e01e777
[
{
"Id": "feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac",
"Created": "2020-12-10T11:35:16.863809294Z",
"Path": "bash",
"Args": [
"bash"
],
"State": {
"OciVersion": "1.0.2-dev",
"Status": "exited",
"Running": false,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 0,
"ExitCode": 139,
"Error": "",
"StartedAt": "2020-12-10T11:35:17.280743295Z",
"FinishedAt": "2020-12-10T11:35:17.280874897Z",
"Healthcheck": {
"Status": "",
"FailingStreak": 0,
"Log": null
}
},
"Image": "f643c72bc25212974c16f3348b3a898b1ec1eb13ec1539e10a103e6e217eb2f1",
"ImageName": "docker.io/library/ubuntu:latest",
"Rootfs": "",
"Pod": "",
"ResolvConfPath": "/run/user/1000/containers/overlay-containers/feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac/userdata/resolv.conf",
"HostnamePath": "/run/user/1000/containers/overlay-containers/feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac/userdata/hostname",
"HostsPath": "/run/user/1000/containers/overlay-containers/feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac/userdata/hosts",
"StaticDir": "/home/brais/.local/share/containers/storage/overlay-containers/feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac/userdata",
"OCIConfigPath": "/home/brais/.local/share/containers/storage/overlay-containers/feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac/userdata/config.json",
"OCIRuntime": "runc",
"LogPath": "/home/brais/.local/share/containers/storage/overlay-containers/feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac/userdata/ctr.log",
"LogTag": "",
"ConmonPidFile": "/run/user/1000/containers/overlay-containers/feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac/userdata/conmon.pid",
"Name": "magical_carson",
"RestartCount": 0,
"Driver": "overlay",
"MountLabel": "system_u:object_r:container_file_t:s0:c375,c701",
"ProcessLabel": "system_u:system_r:container_t:s0:c375,c701",
"AppArmorProfile": "",
"EffectiveCaps": [
"CAP_AUDIT_WRITE",
"CAP_CHOWN",
"CAP_DAC_OVERRIDE",
"CAP_FOWNER",
"CAP_FSETID",
"CAP_KILL",
"CAP_MKNOD",
"CAP_NET_BIND_SERVICE",
"CAP_NET_RAW",
"CAP_SETFCAP",
"CAP_SETGID",
"CAP_SETPCAP",
"CAP_SETUID",
"CAP_SYS_CHROOT"
],
"BoundingCaps": [
"CAP_AUDIT_WRITE",
"CAP_CHOWN",
"CAP_DAC_OVERRIDE",
"CAP_FOWNER",
"CAP_FSETID",
"CAP_KILL",
"CAP_MKNOD",
"CAP_NET_BIND_SERVICE",
"CAP_NET_RAW",
"CAP_SETFCAP",
"CAP_SETGID",
"CAP_SETPCAP",
"CAP_SETUID",
"CAP_SYS_CHROOT"
],
"ExecIDs": [],
"GraphDriver": {
"Name": "overlay",
"Data": {
"LowerDir": "/home/brais/.local/share/containers/storage/overlay/6581dd55e4fe0935a32a688d74513db86632efb162fd41431e7d69318802dfae/diff:/home/brais/.local/share/containers/storage/overlay/1bd27dc7c1c2e7a36c599becda69d0cd905f4f1a122f2b7a95c81a78abc452ec/diff:/home/brais/.local/share/containers/storage/overlay/bacd3af13903e13a43fe87b6944acd1ff21024132aad6e74b4452d984fb1a99a/diff",
"UpperDir": "/home/brais/.local/share/containers/storage/overlay/ccc5801aaacb05d0ed1e64cee2e38f7b4dd8a29890e6fdf780887d296a1c9696/diff",
"WorkDir": "/home/brais/.local/share/containers/storage/overlay/ccc5801aaacb05d0ed1e64cee2e38f7b4dd8a29890e6fdf780887d296a1c9696/work"
}
},
"Mounts": [],
"Dependencies": [],
"NetworkSettings": {
"EndpointID": "",
"Gateway": "",
"IPAddress": "",
"IPPrefixLen": 0,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"MacAddress": "",
"Bridge": "",
"SandboxID": "",
"HairpinMode": false,
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"Ports": {},
"SandboxKey": ""
},
"ExitCommand": [
"/usr/bin/podman",
"--root",
"/home/brais/.local/share/containers/storage",
"--runroot",
"/run/user/1000/containers",
"--log-level",
"error",
"--cgroup-manager",
"cgroupfs",
"--tmpdir",
"/run/user/1000/libpod/tmp",
"--runtime",
"runc",
"--storage-driver",
"overlay",
"--storage-opt",
"overlay.mount_program=/usr/bin/fuse-overlayfs",
"--events-backend",
"file",
"container",
"cleanup",
"feb43e01e7771ca0a5a1b4cdf5a7b2587341493f1ecd7b2723d1ad5a45076aac"
],
"Namespace": "",
"IsInfra": false,
"Config": {
"Hostname": "feb43e01e777",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"Tty": true,
"OpenStdin": true,
"StdinOnce": false,
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"TERM=xterm",
"container=podman",
"HOSTNAME=feb43e01e777",
"HOME=/root"
],
"Cmd": [
"bash"
],
"Image": "docker.io/library/ubuntu:latest",
"Volumes": null,
"WorkingDir": "/",
"Entrypoint": "",
"OnBuild": null,
"Labels": null,
"Annotations": {
"io.container.manager": "libpod",
"io.kubernetes.cri-o.Created": "2020-12-10T11:35:16.863809294Z",
"io.kubernetes.cri-o.TTY": "true",
"io.podman.annotations.autoremove": "FALSE",
"io.podman.annotations.init": "FALSE",
"io.podman.annotations.privileged": "FALSE",
"io.podman.annotations.publish-all": "FALSE",
"org.opencontainers.image.stopSignal": "15"
},
"StopSignal": 15,
"CreateCommand": [
"podman",
"run",
"-it",
"ubuntu",
"bash"
]
},
"HostConfig": {
"Binds": [],
"CgroupMode": "host",
"ContainerIDFile": "",
"LogConfig": {
"Type": "k8s-file",
"Config": null
},
"NetworkMode": "slirp4netns",
"PortBindings": {},
"RestartPolicy": {
"Name": "",
"MaximumRetryCount": 0
},
"AutoRemove": false,
"VolumeDriver": "",
"VolumesFrom": null,
"CapAdd": [],
"CapDrop": [],
"Dns": [],
"DnsOptions": [],
"DnsSearch": [],
"ExtraHosts": [],
"GroupAdd": [],
"IpcMode": "private",
"Cgroup": "",
"Cgroups": "default",
"Links": null,
"OomScoreAdj": 0,
"PidMode": "private",
"Privileged": false,
"PublishAllPorts": false,
"ReadonlyRootfs": false,
"SecurityOpt": [],
"Tmpfs": {},
"UTSMode": "private",
"UsernsMode": "",
"ShmSize": 65536000,
"Runtime": "oci",
"ConsoleSize": [
0,
0
],
"Isolation": "",
"CpuShares": 0,
"Memory": 0,
"NanoCpus": 0,
"CgroupParent": "",
"BlkioWeight": 0,
"BlkioWeightDevice": null,
"BlkioDeviceReadBps": null,
"BlkioDeviceWriteBps": null,
"BlkioDeviceReadIOps": null,
"BlkioDeviceWriteIOps": null,
"CpuPeriod": 0,
"CpuQuota": 0,
"CpuRealtimePeriod": 0,
"CpuRealtimeRuntime": 0,
"CpusetCpus": "",
"CpusetMems": "",
"Devices": [],
"DiskQuota": 0,
"KernelMemory": 0,
"MemoryReservation": 0,
"MemorySwap": 0,
"MemorySwappiness": 0,
"OomKillDisable": false,
"PidsLimit": 0,
"Ulimits": [],
"CpuCount": 0,
"CpuPercent": 0,
"IOMaximumIOps": 0,
"IOMaximumBandwidth": 0
}
}
]
podman info
$ podman info
host:
arch: amd64
buildahVersion: 1.15.1
cgroupVersion: v1
conmon:
package: conmon-2.0.20-2.module_el8.3.0+475+c50ce30b.x86_64
path: /usr/bin/conmon
version: 'conmon version 2.0.20, commit: 1019ecdeda3936be22162bb1cca308192145de53'
cpus: 2
distribution:
distribution: '"centos"'
version: "8"
eventLogger: file
hostname: vm-test1
idMappings:
gidmap:
- container_id: 0
host_id: 1000
size: 1
- container_id: 1
host_id: 100000
size: 65536
uidmap:
- container_id: 0
host_id: 1000
size: 1
- container_id: 1
host_id: 100000
size: 65536
kernel: 4.18.0-193.28.1.el8_2.x86_64
linkmode: dynamic
memFree: 247398400
memTotal: 4129382400
ociRuntime:
name: runc
package: runc-1.0.0-68.rc92.module_el8.3.0+475+c50ce30b.x86_64
path: /usr/bin/runc
version: 'runc version spec: 1.0.2-dev'
os: linux
remoteSocket:
path: /run/user/1000/podman/podman.sock
rootless: true
slirp4netns:
executable: /usr/bin/slirp4netns
package: slirp4netns-1.1.4-2.module_el8.3.0+475+c50ce30b.x86_64
version: |-
slirp4netns version 1.1.4
commit: b66ffa8e262507e37fca689822d23430f3357fe8
libslirp: 4.3.1
SLIRP_CONFIG_VERSION_MAX: 3
swapFree: 0
swapTotal: 0
uptime: 17h 48m 18.07s (Approximately 0.71 days)
registries:
search:
- registry.access.redhat.com
- registry.redhat.io
- docker.io
store:
configFile: /home/brais/.config/containers/storage.conf
containerStore:
number: 1
paused: 0
running: 0
stopped: 1
graphDriverName: overlay
graphOptions:
overlay.mount_program:
Executable: /usr/bin/fuse-overlayfs
Package: fuse-overlayfs-1.1.2-3.module_el8.3.0+507+aa0970ae.x86_64
Version: |-
fuse-overlayfs: version 1.1.0
FUSE library version 3.2.1
using FUSE kernel interface version 7.26
graphRoot: /home/brais/.local/share/containers/storage
graphStatus:
Backing Filesystem: xfs
Native Overlay Diff: "false"
Supports d_type: "true"
Using metacopy: "false"
imageStore:
number: 8
runRoot: /run/user/1000/containers
volumePath: /home/brais/.local/share/containers/storage/volumes
version:
APIVersion: 1
Built: 1600970293
BuiltTime: Thu Sep 24 17:58:13 2020
GitCommit: ""
GoVersion: go1.14.7
OsArch: linux/amd64
Version: 2.0.5

Related

'Create service' for a Cluster with Ec2 type is giving error in AWS console

Trying out sample ECS with EC2 type in AWS free tier.
Created a Cluster for ec2 instance.
Then created task-def for ec2 resources with image uri - public.ecr.aws/ubuntu/nginx:latest, OS : Linux/X86_64, image t2.micro.
While creating/deploying the service, getting error on selecting the task-def created.
There was an error deploying nginx-service
Resource handler returned message: "Error occurred during operation 'ECS Deployment Circuit Breaker was triggered'." (RequestToken: 1ab71394-b41e-190a-df10-6a87d62a7915, HandlerErrorCode: GeneralServiceException)
task-def-json
{
"taskDefinitionArn": "arn:aws:ecs:ap-northeast-1:930446195568:task-definition/ecs-task-def:1",
"containerDefinitions": [
{
"name": "nginx",
"image": "public.ecr.aws/ubuntu/nginx:latest",
"cpu": 0,
"portMappings": [
{
"name": "nginx-80-tcp",
"containerPort": 80,
"hostPort": 80,
"protocol": "tcp",
"appProtocol": "http"
}
],
"essential": true,
"environment": [],
"environmentFiles": [],
"mountPoints": [],
"volumesFrom": []
}
],
"family": "ecs-task-def",
"executionRoleArn": "arn:aws:iam::930446195568:role/ecsTaskExecutionRole",
"networkMode": "bridge",
"revision": 1,
"volumes": [],
"status": "ACTIVE",
"placementConstraints": [],
"compatibilities": [
"EC2"
],
"requiresCompatibilities": [
"EC2"
],
"cpu": "512",
"memory": "1024",
"runtimePlatform": {
"cpuArchitecture": "X86_64",
"operatingSystemFamily": "LINUX"
},
"registeredAt": "2023-02-15T17:11:45.596Z",
"registeredBy": "arn:aws:iam::930446195568:user/admin_user",
"tags": []
}

Ansible AWX : playbook run successfully for create directory in localhost but when go and check to that location that directory not available

I new with ansible AWX I wanted to create directory on my localhost for that create playbook as given below when I run playbook it shows that successful massage and changed on localhost but when i go to that
location the Directory isn't available there.
playbook:
---
- hosts: localhost
tasks:
- name: Create Directory
file:
path: ~/newDir1
mode: "0755"
state: directory
output:
TASK [Create Directory] ********************************************************
20:16:58
10
changed: [localhost]
PLAY RECAP *********************************************************************
20:17:00
localhost : ok=2 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
location got through output:
{
"path": "/var/lib/awx/newDir1",
"changed": true,
"diff": {
"before": {
"path": "/var/lib/awx/newDir1",
"state": "absent"
},
"after": {
"path": "/var/lib/awx/newDir1",
"state": "directory"
}
},
"uid": 975,
"gid": 975,
"owner": "awx",
"group": "awx",
"mode": "0755",
"state": "directory",
"secontext": "system_u:object_r:tmp_t:s0",
"size": 6,
"invocation": {
"module_args": {
"path": "/var/lib/awx/newDir1",
"mode": "0755",
"state": "directory",
"recurse": false,
"force": false,
"follow": true,
"modification_time_format": "%Y%m%d%H%M.%S",
"access_time_format": "%Y%m%d%H%M.%S",
"_original_basename": null,
"_diff_peek": null,
"src": null,
"modification_time": null,
"access_time": null,
"owner": null,
"group": null,
"seuser": null,
"serole": null,
"selevel": null,
"setype": null,
"attributes": null,
"content": null,
"backup": null,
"remote_src": null,
"regexp": null,
"delimiter": null,
"directory_mode": null,
"unsafe_writes": null
}
},
"_ansible_no_log": false
}
If you are using awx docker image then you need to check created directory inside that container.
Get inside container and search. :)

Azure devops - minimal code coverage pull reqeust

I have an Angular 9 project and I am trying to get the minimal code coverage working on an pull request in Azure devops conform the documentation. However the minimal code coverage isn't working, probably missing some step....
Steps to reproduce:
Create an new Angular 9 project: "ng new DefaultWebsite"
Create build pipeline and edit the karma and protractor config conform Microsoft "Build, test, and deploy JavaScript and Node.js apps" documentation
Add and "azurepipelines-coverage.yml" in the root of my project to enable the code coverage check in an pull request conform the Microsoft "Code coverage for pull requests" documentation
Disable some test in de app.spec.ts file, so the code coverage isn't 100% any more, it now is 77%. Changed the minimal code coverag in the yml file to 95% so the pull request cannot be complited conform the theory and it should give an "Coverage status check failed" error conform the Microsoft documentation.
However when the pull request is started there is an code coverage check below the 'Status' part. When the build (with unit and e2e tests) is done, there is no code coverage error which I espect te see below the 'Status' part.
Pull reqeust with code coverage check Pull reqeust build completed
When I look at the build there are test results and code coverage results.
Build test result Build code coverage result
When I look at the code coverage result I see an Line coverage of 75% which should be an minimal of 90% conform the yml-file.
Karma config file
// Karma configuration file, see link for more information
// https://karma-runner.github.io/1.0/config/configuration-file.html
module.exports = function (config) {
const process = require('process');
process.env.CHROME_BIN = require('puppeteer').executablePath();
config.set({
basePath: '',
frameworks: ['jasmine', '#angular-devkit/build-angular'],
plugins: [
require('karma-jasmine'),
require('karma-chrome-launcher'),
require('karma-jasmine-html-reporter'),
require('karma-coverage-istanbul-reporter'),
require('#angular-devkit/build-angular/plugins/karma'),
require('karma-junit-reporter')
],
client: {
clearContext: false // leave Jasmine Spec Runner output visible in browser
},
coverageIstanbulReporter: {
dir: require('path').join(__dirname, './coverage'),
reports: ['html', 'lcovonly', 'text-summary', 'cobertura'],
fixWebpackSourcePaths: true
},
coverageReporter: {
type : 'html',
dir : 'coverage/'
},
junitReporter: {
outputDir: './coverage', // results will be saved as $outputDir/$browserName.xml
outputFile: 'junit.xml', // if included, results will be saved as $outputDir/$browserName/$outputFile
suite: '', // suite will become the package name attribute in xml testsuite element
useBrowserName: true, // add browser name to report and classes names
nameFormatter: undefined, // function (browser, result) to customize the name attribute in xml testcase element
classNameFormatter: undefined, // function (browser, result) to customize the classname attribute in xml testcase element
properties: {}, // key value pair of properties to add to the <properties> section of the report
xmlVersion: null // use '1' if reporting to be per SonarQube 6.2 XML format
},
reporters: ['progress', 'kjhtml','junit'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['ChromeHeadless'],
singleRun: false,
restartOnFileChange: true
});
};
Protractor config file:
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/lib/config.ts
const { SpecReporter } = require('jasmine-spec-reporter');
const { JUnitXmlReporter } = require('jasmine-reporters');
process.env.CHROME_BIN = process.env.CHROME_BIN || require("puppeteer").executablePath();
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./src/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome',
chromeOptions: {
args: ["--headless", "--disable-gpu", "--window-size=1200,900"],
binary: process.env.CHROME_BIN
}
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function () { }
},
onPrepare() {
require('ts-node').register({
project: require('path').join(__dirname, './tsconfig.json')
});
jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayStacktrace: true } }));
var junitReporter = new JUnitXmlReporter({
savePath: require('path').join(__dirname, './junit'),
consolidateAll: true
});
jasmine.getEnv().addReporter(junitReporter);
}
};
azurepipelines-coverage.yml
coverage:
status: #Code coverage status will be posted to pull requests based on targets defined below.
diff: #diff coverage is code coverage only for the lines changed in a pull request.
target: 95% #set this to a desired %. Default is 70%.
Azure Build pipeline steps:
"steps": [
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "npm install",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "fe47e961-9fa8-4106-8639-368c022d43ad",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"command": "install",
"workingDir": "Project\\Frontend\\DefaultWebsite",
"verbose": "false",
"customCommand": "",
"customRegistry": "useNpmrc",
"customFeed": "",
"customEndpoint": "",
"publishRegistry": "useExternalRegistry",
"publishFeed": "",
"publishPackageMetadata": "true",
"publishEndpoint": ""
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "npm custom - test ",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "fe47e961-9fa8-4106-8639-368c022d43ad",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"command": "custom",
"workingDir": "Project\\Frontend\\DefaultWebsite",
"verbose": "false",
"customCommand": "run test",
"customRegistry": "useNpmrc",
"customFeed": "",
"customEndpoint": "",
"publishRegistry": "useExternalRegistry",
"publishFeed": "",
"publishPackageMetadata": "true",
"publishEndpoint": ""
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish code coverage from Project\\Frontend\\DefaultWebsite\\coverage\\cobertura-coverage.xml",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "2a7ebc54-c13e-490e-81a5-d7561ab7cd97",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"codeCoverageTool": "Cobertura",
"summaryFileLocation": "Project\\Frontend\\DefaultWebsite\\coverage\\cobertura-coverage.xml",
"pathToSources": "",
"reportDirectory": "",
"additionalCodeCoverageFiles": "",
"failIfCoverageEmpty": "false"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish Test Results Project\\Frontend\\DefaultWebsite\\**\\junit.xml copy",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "0b0f01ed-7dde-43ff-9cbb-e48954daf9b1",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"testRunner": "JUnit",
"testResultsFiles": "Project\\Frontend\\DefaultWebsite\\**\\junit.xml",
"searchFolder": "$(System.DefaultWorkingDirectory)",
"mergeTestResults": "false",
"failTaskOnFailedTests": "false",
"testRunTitle": "",
"platform": "",
"configuration": "",
"publishRunAttachments": "true"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "npm custom - e2e",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "fe47e961-9fa8-4106-8639-368c022d43ad",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"command": "custom",
"workingDir": "Project\\Frontend\\DefaultWebsite",
"verbose": "false",
"customCommand": "run e2e",
"customRegistry": "useNpmrc",
"customFeed": "",
"customEndpoint": "",
"publishRegistry": "useExternalRegistry",
"publishFeed": "",
"publishPackageMetadata": "true",
"publishEndpoint": ""
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish Test Results Project\\Frontend\\DefaultWebsite\\e2e\\**\\junitresults.xml",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "0b0f01ed-7dde-43ff-9cbb-e48954daf9b1",
"versionSpec": "2.*",
"definitionType": "task"
},
"inputs": {
"testRunner": "JUnit",
"testResultsFiles": "Project\\Frontend\\DefaultWebsite\\e2e\\**\\junitresults.xml",
"searchFolder": "$(System.DefaultWorkingDirectory)",
"mergeTestResults": "false",
"failTaskOnFailedTests": "false",
"testRunTitle": "",
"platform": "",
"configuration": "",
"publishRunAttachments": "true"
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "npm custom - prodBuild",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "fe47e961-9fa8-4106-8639-368c022d43ad",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"command": "custom",
"workingDir": "Project\\Frontend\\DefaultWebsite",
"verbose": "false",
"customCommand": "run prodBuild",
"customRegistry": "useNpmrc",
"customFeed": "",
"customEndpoint": "",
"publishRegistry": "useExternalRegistry",
"publishFeed": "",
"publishPackageMetadata": "true",
"publishEndpoint": ""
}
},
{
"environment": {},
"enabled": true,
"continueOnError": false,
"alwaysRun": false,
"displayName": "Publish Artifact: app",
"timeoutInMinutes": 0,
"condition": "succeeded()",
"task": {
"id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe",
"versionSpec": "1.*",
"definitionType": "task"
},
"inputs": {
"PathtoPublish": "Project\\Frontend\\DefaultWebsite\\dist",
"ArtifactName": "app",
"ArtifactType": "Container",
"TargetPath": "",
"Parallel": "false",
"ParallelCount": "8",
"FileCopyOptions": ""
}
}
],

Why isn't Istio Passthrough example working as expected?

I tried passthrough example from https://istio.io/docs/tasks/traffic-management/ingress/ingress-sni-passthrough/ but it seems it's not working.
I'm getting 404 with below logs: -
2020-01-29T17:21:12.551246Z info accesslog.instance.istio-system {"apiClaims": "", "apiKey": "", "clientTraceId": "", "connection_security_policy": "unknown", "destinationApp": "", "destinationIp": "0.0.0.0", "destinationName": "unknown", "destinationNamespace": "default", "destinationOwner": "unknown", "destinationPrincipal": "", "destinationServiceHost": "nginx.foo.bar.us", "destinationWorkload": "unknown", "grpcMessage": "", "grpcStatus": "", "httpAuthority": "nginx.foo.bar.us", "latency": "146.827µs", "method": "GET", "permissiveResponseCode": "none", "permissiveResponsePolicyID": "none", "protocol": "http", "receivedBytes": 243, "referer": "", "reporter": "source", "requestId": "dd1b7b2b-10b1-9c36-ae64-cce9825773d2", "requestSize": 0, "requestedServerName": "", "responseCode": 404, "responseFlags": "NR", "responseSize": 0, "responseTimestamp": "2020-01-29T17:21:12.551334Z", "sentBytes": 60, "sourceApp": "istio-ingressgateway", "sourceIp": "100.108.31.87", "sourceName": "istio-ingressgateway-5b794cc7c9-82z4h", "sourceNamespace": "istio-system", "sourceOwner": "kubernetes://apis/apps/v1/namespaces/istio-system/deployments/istio-ingressgateway", "sourcePrincipal": "", "sourceWorkload": "istio-ingressgateway", "url": "/", "userAgent": "curl/7.64.1", "xForwardedFor": "192.
168.0.1,172.31.0.156"}
I've deployed Istio on Kubernetes 1.15.8 with following version: -
❯ istioctl version
client version: 1.4.0
control plane version: 1.4.0
data plane version: 1.4.0 (5 proxies)
Am I missing something?
Appreciate your help.
Cheers,
-Ajit

get json key value using powershell

I have the following json output string:
{
"meta": {
"limit": 20,
"next": null,
"offset": 0,
"previous": null,
"total_count": 1
},
"objects": [{
"bcontext": "/api/v2.0/buildercontext/2/",
"bugs": [],
"build": {
"bldtype": "obj",
"branch": "main",
"buildstatus": [{
"build": "/api/v2.0/build/2140634/",
"failurereason": "_checkfailures (seen: FAIL - /testrun/18647678/ - area[4769] AIM-SANITY)",
"id": "1294397",
"lastupdate": "2015-03-31T14:30:18",
"overridden": false,
"overridedesc": "",
"overrideuser": null,
"recommended": false,
"resource_uri": "/api/v2.0/buildstatus/1294397/",
"slatype": {
"id": "26",
"name": "VA_Bats",
"resource_uri": "/api/v2.0/sla/26/"
}
}],
"changeset": "494625",
"coverage": false,
"deliverables": ["/api/v2.0/deliverable/4296455/", "/api/v2.0/deliverable/4296956/", "/api/v2.0/deliverable/4296959/", "/api/v2.0/deliverable/4296986/", "/api/v2.0/deliverable/4296992/", "/api/v2.0/deliverable/4296995/", "/api/v2.0/deliverable/4297034/", "/api/v2.0/deliverable/4297058/"],
"git_host": null,
"git_repo": null,
"id": "2140634",
"p4host": {
"id": "10",
"p4port": "perforce-rhino.eng.com:1800",
"p4weburl": "http://p4web.eng.com:1800",
"resource_uri": "/api/v2.0/perforceserver/10/"
},
"resource_uri": "/api/v2.0/build/2140634/",
"site": "/api/v2.0/site/25/",
"site_name": "mbu",
"slastested": ["/api/v2.0/sla/26/"],
"submit_time": "2015-03-31T05:40:21",
"submit_user": "haharonof"
},
"builder": "/api/v2.0/builder/1423/",
"clean": true,
"componentbuilds": "vcops-vsphere-solution-pak=sb-5242047,vrops=sb-5242013,vscm=sb-5242025,vsutilities=sb-5242029;parentbuilder=1410",
"deleted": false,
"endtime": "2015-03-31T06:20:58",
"helpzillas": [],
"id": "4296956",
"location": {
"httpserver": "sc-prd-cat-services001.eng.com",
"id": "1",
"name": "PA",
"nfsserver": "cat-results.eng.com",
"pxedir": "/mts/builder-pxe",
"resource_uri": "/api/v2.0/location/1/",
"resultspath": "/results"
},
"nfsserver": "build-storage60",
"p4client": "vmktestdevnanny-builder-1423",
"path": "/storage60/release/sb-5242148",
"ready": true,
"resource_uri": "/api/v2.0/deliverable/4296956/",
"result": "PASS",
"sbbuildid": 5242148,
"sbjobid": 5242148,
"sbuser": "arajamanickam",
"starttime": "2015-03-31T06:16:50",
"targetchangeset": "494625",
"targets": "vcopssuitevm",
"triagetime": null,
"vmodl": null
}]
}
I want to get sbbuildid using powershell. How can I get this?
By converting your json to an object, using the ConvertFrom-Json cmdlet (assuming $jsonString contains the json above):
$jsonObj = $jsonString | ConvertFrom-Json
$jsonObj.objects.sbbuildid
$sb_build_id = $build_info.Substring($build_info.IndexOf("sbbuildid") + 11, 8).trim()
Put whole string in $build_info