We have a website that uses enhanced analytics. On the page where a list of products is viewed the tracking code looks like this:
window.ga=window.ga||function(){(ga.q=ga.q||[]).push(arguments)};ga.l=+new Date;
ga.l = +new Date;
ga('create', 'xxxxxx', 'auto', {
anonymizeIp: true
});
ga('require', 'ec');
ga('set', 'currencyCode', 'EUR');
ga('ec:addImpression', {
'id': '5',
'name': 'Lorem Ipsum',
'category': 'Lorem',
'list': 'lorem-list',
'dimension1': '15',
'dimension2': 'Lorem'
});
ga('ec:addImpression', {
'id': '6',
'name': 'Lorem Ipsum',
'category': 'Lorem',
'list': 'lorem-list',
'dimension1': '16',
'dimension2': 'Lorem'
});
ga('ec:addImpression', {
'id': '7',
'name': 'Lorem Ipsum',
'category': 'Lorem',
'list': 'lorem-list',
'dimension1': '17',
'dimension2': 'Lorem'
});
ga('send', 'pageview');
The impressions are measured correctly (id, name, category, list etc) but the custom dimensions (dimension1 and dimension2) seem to be null for all measurements (been gathering for 3 weeks now).
As far as I can see the code is exactly as stated in the documentation: https://developers.google.com/analytics/devguides/collection/analyticsjs/enhanced-ecommerce#measuring-activities
What am I overseeing?
Update: As requested the (anonymised) debug log using chrome GA:debug where it seems the dimensions are provided correctly:
analytics_debug.js:10 Running analytics_debug.js. This script is intended for testing and debugging only.
log # analytics_debug.js:10
J # analytics_debug.js:9
(anonymous) # analytics_debug.js:76
(anonymous) # analytics_debug.js:76
analytics_debug.js:10 Initializing Google Analytics.
analytics_debug.js:10 Loading resource for plugin: ec
analytics_debug.js:10 Loading script: "https://www.google-analytics.com/plugins/ua/ec.js"
analytics_debug.js:10 Running command: ga("create", "UA-xxx-1", "auto", {anonymizeIp: true})
analytics_debug.js:10 Creating new tracker: t0
analytics_debug.js:10 Auto cookieDomain found: "myclient.nl"
analytics_debug.js:10 This field cannot be set in a create method. Please use ga("set", "anonymizeIp", true);
log # analytics_debug.js:10
J # analytics_debug.js:9
Td # analytics_debug.js:13
ad # analytics_debug.js:51
Z.create # analytics_debug.js:69
Y.b.(anonymous function) # analytics_debug.js:38
jf.R # analytics_debug.js:68
jf.H # analytics_debug.js:66
Z.P # analytics_debug.js:73
cd # analytics_debug.js:59
A # analytics_debug.js:59
(anonymous) # analytics_debug.js:76
(anonymous) # analytics_debug.js:76
analytics_debug.js:10 Running command: ga("require", "ec")
analytics_debug.js:10 Waiting on require of "ec" to be fulfilled.
analytics_debug.js:10 Registered new plugin: ga(provide, "render", Function)
analytics_debug.js:10 Running command: ga("require", "ec")
analytics_debug.js:10 Waiting on require of "ec" to be fulfilled.
analytics_debug.js:10 Executing Google Analytics commands.
analytics_debug.js:10 Registered new plugin: ga(provide, "ec", Function)
analytics_debug.js:10 Running command: ga("require", "ec")
analytics_debug.js:10 Plugin "ec" intialized on tracker "t0".
analytics_debug.js:10 Running command: ga("set", "currencyCode", "EUR")
analytics_debug.js:10 Running command: ga("ec:addImpression", {id: "1", name: "xxxx", category: "xxx", list: "xxxx", dimension1: "7", dimension2: "xxx"})
analytics_debug.js:10 Running command: ga("ec:addImpression", {id: "2", name: "xxxx", category: "xxx", list: "xxxx", dimension1: "7", dimension2: "xxx"})
analytics_debug.js:10 Running command: ga("ec:addImpression", {id: "3", name: "xxxx", category: "xxx", list: "xxxx", dimension1: "7", dimension2: "xxx"})
analytics_debug.js:10 Running command: ga("ec:addImpression", {id: "7", name: "xxxx", category: "xxx", list: "xxxx", dimension1: "7", dimension2: "xxx"})
analytics_debug.js:10 Running command: ga("ec:addImpression", {id: "8", name: "xxxx", category: "xxx", list: "xxxx", dimension1: "7", dimension2: "xxx"})
analytics_debug.js:10 Running command: ga("send", "pageview")
analytics_debug.js:10
Sent beacon:
analytics_debug.js:10 _j1 (&jid)
analytics_debug.js:10 adSenseId (&a) xxxxxxxxx
analytics_debug.js:10 apiVersion (&v) 1
analytics_debug.js:10 clientId (&cid) xxxxxxxxx
analytics_debug.js:10 currencyCode (&cu) EUR
analytics_debug.js:10 ec:impression list "1" name (&il1nm) xxx
analytics_debug.js:10 ec:impression list "1" product "1" category (&il1pi1ca) xxx
analytics_debug.js:10 ec:impression list "1" product "1" dimension "1" (&il1pi1cd1) 7
analytics_debug.js:10 ec:impression list "1" product "1" dimension "2" (&il1pi1cd2) xxx
analytics_debug.js:10 ec:impression list "1" product "1" id (&il1pi1id) 1
analytics_debug.js:10 ec:impression list "1" product "1" name (&il1pi1nm) xxx
analytics_debug.js:10 ec:impression list "1" product "2" category (&il1pi2ca) xxx
analytics_debug.js:10 ec:impression list "1" product "2" dimension "1" (&il1pi2cd1) 7
analytics_debug.js:10 ec:impression list "1" product "2" dimension "2" (&il1pi2cd2) xxx
analytics_debug.js:10 ec:impression list "1" product "2" id (&il1pi2id) 2
analytics_debug.js:10 ec:impression list "1" product "2" name (&il1pi2nm) xxx
analytics_debug.js:10 ec:impression list "1" product "3" category (&il1pi3ca) xxx
analytics_debug.js:10 ec:impression list "1" product "3" dimension "1" (&il1pi3cd1) 7
analytics_debug.js:10 ec:impression list "1" product "3" dimension "2" (&il1pi3cd2) xxx
analytics_debug.js:10 ec:impression list "1" product "3" id (&il1pi3id) 3
analytics_debug.js:10 ec:impression list "1" product "3" name (&il1pi3nm) xxx
analytics_debug.js:10 ec:impression list "1" product "4" category (&il1pi4ca) xxx
analytics_debug.js:10 ec:impression list "1" product "4" dimension "1" (&il1pi4cd1) 7
analytics_debug.js:10 ec:impression list "1" product "4" dimension "2" (&il1pi4cd2) xxx
analytics_debug.js:10 ec:impression list "1" product "4" id (&il1pi4id) 7
analytics_debug.js:10 ec:impression list "1" product "4" name (&il1pi4nm) xxx
analytics_debug.js:10 ec:impression list "1" product "5" category (&il1pi5ca) xxx
analytics_debug.js:10 ec:impression list "1" product "5" dimension "1" (&il1pi5cd1) 7
analytics_debug.js:10 ec:impression list "1" product "5" dimension "2" (&il1pi5cd2) xxx
analytics_debug.js:10 ec:impression list "1" product "5" id (&il1pi5id) 8
analytics_debug.js:10 ec:impression list "1" product "5" name (&il1pi5nm) xxx
analytics_debug.js:10 encoding (&de) UTF-8
analytics_debug.js:10 hitType (&t) pageview
analytics_debug.js:10 javaEnabled (&je) 0
analytics_debug.js:10 language (&ul) en-us
analytics_debug.js:10 location (&dl) https://www.myclient.nl/xxxx
analytics_debug.js:10 screenColors (&sd) 24-bit
analytics_debug.js:10 screenResolution (&sr) 1680x1050
analytics_debug.js:10 title (&dt) xxxxx - myclient
analytics_debug.js:10 trackingId (&tid) UA-xxxx-1
analytics_debug.js:10 viewportSize (&vp) 1610x494
My assumption is that there are two problems.
1) You are using product scoped custom dimensions.
As your custom dimensions are product scoped, it is important to note that these dimensions do not attribute to the ID through all stages of Enhanced Ecommerce. This means that the custom dimension will only have data relevant to the hit in which it is sent. In this case, the only metric you will be able to retrieve is 'Impressions'. If you want to view this custom dimensions with other relevant metrics, then you need to include it in each Enhanced Ecommerce hit.
Link for more Enhanced Ecommerce attribution info: https://support.google.com/analytics/answer/6014841?hl=en#product_attribution
Which leads us to:
2) You might be using the wrong metrics with your custom dimensions.
Because these dimensions are only relevant at the point of which they are sent, and given the example you have provided, the only report which can be created is:
Dimensions:
Dimension 1, Dimension 2
Metrics:
Impressions
In the provided custom report example (with author/pageviews), the metric being used is pageviews. As your dimensions are not hit level dimensions, sent along with the page view hit, there is no 'pageview' metrics available for these custom dimensions, leading to why they are empty.
Related
I have setup a Keycloak Cluster in GKE with NGINX as Ingress Controller. I have use the Codecentrics Helm Chart: [https://github.com/codecentric/helm-charts/tree/master/charts/keycloak][Keycloak Helm Chart]
I am using JDBC_PING for JGroups and have the following cli script and Ingress config. I have replicas set to 2. When I kill a pod the session is still usable and everything is working fine, I can navigate in the keycloak admin interface and do everything. But when I hit F5 to reload the page I receive an 502 Bad Gateway error. Sometimes it does recover and I can just reload and everything is just fine, but sometimes I have to delete the cookies completely to make it work again.
I am not sure where the issue is coming from.
Cookies in Browser:
MySQL Table JGROUPSPING:
Ingress Annotations:
annotations:
kubernetes.io/ingress.class: "nginx"
kubernetes.io/tls-acme: "true"
nginx.ingress.kubernetes.io/force-ssl-redirect: "false"
nginx.ingress.kubernetes.io/limit-rate: "150"
nginx.ingress.kubernetes.io/limit-rps: "150"
nginx.ingress.kubernetes.io/session-cookie-change-on-failure: "true"
nginx.ingress.kubernetes.io/affinity: "cookie"
nginx.ingress.kubernetes.io/session-cookie-name: "route"
nginx.ingress.kubernetes.io/session-cookie-expires: "21600"
nginx.ingress.kubernetes.io/session-cookie-max-age: "21600"
nginx.ingress.kubernetes.io/server-snippet: |
location /auth/realms/master/metrics {
return 403;
}
extra envs:
# Additional environment variables for Keycloak
extraEnv: |
- name: KEYCLOAK_STATISTICS
value: all
- name: PROXY_ADDRESS_FORWARDING
value: "true"
- name: KEYCLOAK_USER
value: '{{ .Values.ADMIN_USER }}'
- name: KEYCLOAK_PASSWORD
value: '{{ .Values.ADMIN_PASS }}'
- name: JAVA_OPTS
value: >-
-XX:+UseContainerSupport
-XX:MaxRAMPercentage=50.0
-Djava.net.preferIPv4Stack=true
-Djboss.modules.system.pkgs=$JBOSS_MODULES_SYSTEM_PKGS
-Djava.awt.headless=true
- name: JGROUPS_DISCOVERY_PROTOCOL
value: JDBC_PING
- name: CACHE_OWNERS_COUNT
value: "2"
- name: CACHE_OWNERS_AUTH_SESSIONS_COUNT
value: "2"
- name: DB_VENDOR
value: mysql
- name: DB_ADDR
value: "127.0.0.1"
- name: DB_PORT
value: "3306"
- name: DB_DATABASE
value: keycloak_prod
- name: DB_USER
value: '{{ .Values.SQL_USER }}'
- name: DB_PASSWORD
value: '{{ .Values.SQL_PASS }}'
Keycloak CLI script:
embed-server --server-config=standalone-ha.xml --std-out=echo
batch
echo Configuring node identifier
## Sets the node identifier to the node name (= pod name). Node identifiers have to be unique. They can have a
## maximum length of 23 characters. Thus, the chart's fullname template truncates its length accordingly.
/subsystem=transactions:write-attribute(name=node-identifier, value=${jboss.node.name})
echo NodeName: ${jboss.node.name}
echo Finished configuring node identifier
echo CUSTOM_CONFIG: executing CONFIG FOR K8S Failover Support
echo "------------------------------------------------------------------------------------------------------------"
echo "---------------------------------CUSTOM STARTUP CONFIG------------------------------------------------------"
echo "------------------------------------------------------------------------------------------------------------"
## JDBC PING
/subsystem=infinispan/cache-container=keycloak/distributed-cache=sessions:write-attribute(name=owners, value=${env.CACHE_OWNERS_COUNT:2})
/subsystem=infinispan/cache-container=keycloak/distributed-cache=authenticationSessions:write-attribute(name=owners, value=${env.CACHE_OWNERS_COUNT:2})
/subsystem=infinispan/cache-container=keycloak/distributed-cache=offlineSessions:write-attribute(name=owners, value=${env.CACHE_OWNERS_COUNT:2})
/subsystem=infinispan/cache-container=keycloak/distributed-cache=loginFailures:write-attribute(name=owners, value=${env.CACHE_OWNERS_COUNT:2})
/subsystem=jgroups/stack=tcp:remove()
/subsystem=jgroups/stack=tcp:add()
/subsystem=jgroups/stack=tcp/transport=TCP:add(socket-binding="jgroups-tcp")
/subsystem=jgroups/stack=tcp/protocol=JDBC_PING:add()
/subsystem=jgroups/stack=tcp/protocol=JDBC_PING/property=datasource_jndi_name:add(value=java:jboss/datasources/KeycloakDS)
/subsystem=jgroups/stack=tcp/protocol=JDBC_PING/property=initialize_sql:add(value="CREATE TABLE IF NOT EXISTS JGROUPSPING (own_addr varchar(200) NOT NULL, cluster_name varchar(200) NOT NULL, updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, ping_data varbinary(5000) DEFAULT NULL, PRIMARY KEY (own_addr, cluster_name)) ENGINE=InnoDB DEFAULT CHARSET=utf8")
/subsystem=jgroups/stack=tcp/protocol=MERGE3:add()
/subsystem=jgroups/stack=tcp/protocol=FD_SOCK:add(socket-binding="jgroups-tcp-fd")
/subsystem=jgroups/stack=tcp/protocol=FD:add()
/subsystem=jgroups/stack=tcp/protocol=VERIFY_SUSPECT:add()
/subsystem=jgroups/stack=tcp/protocol=pbcast.NAKACK2:add()
/subsystem=jgroups/stack=tcp/protocol=UNICAST3:add()
/subsystem=jgroups/stack=tcp/protocol=pbcast.STABLE:add()
/subsystem=jgroups/stack=tcp/protocol=pbcast.GMS:add()
/subsystem=jgroups/stack=tcp/protocol=pbcast.GMS/property=max_join_attempts:add(value=5)
/subsystem=jgroups/stack=tcp/protocol=MFC:add()
/subsystem=jgroups/stack=tcp/protocol=FRAG3:add()
/subsystem=jgroups/stack=udp:remove()
/subsystem=jgroups/channel=ee:write-attribute(name=stack, value=tcp)
/socket-binding-group=standard-sockets/socket-binding=jgroups-mping:remove()
## Cache Setup for Failover
/subsystem=infinispan/cache-container=keycloak/distributed-cache=sessions:remove()
/subsystem=infinispan/cache-container=keycloak/distributed-cache=authenticationSessions:remove()
/subsystem=infinispan/cache-container=keycloak/distributed-cache=offlineSessions:remove()
/subsystem=infinispan/cache-container=keycloak/distributed-cache=clientSessions:remove()
/subsystem=infinispan/cache-container=keycloak/distributed-cache=offlineClientSessions:remove()
/subsystem=infinispan/cache-container=keycloak/distributed-cache=loginFailures:remove()
/subsystem=infinispan/cache-container=keycloak/replicated-cache=sessions:add()
/subsystem=infinispan/cache-container=keycloak/replicated-cache=authenticationSessions:add()
/subsystem=infinispan/cache-container=keycloak/replicated-cache=offlineSessions:add()
/subsystem=infinispan/cache-container=keycloak/replicated-cache=clientSessions:add()
/subsystem=infinispan/cache-container=keycloak/replicated-cache=offlineClientSessions:add()
/subsystem=infinispan/cache-container=keycloak/replicated-cache=loginFailures:add()
echo "------------------------------------------------------------------------------------------------------------"
echo "---------------------------------CUSTOM STARTUP CONFIG DONE!------------------------------------------------"
echo "------------------------------------------------------------------------------------------------------------"
run-batch
try
:resolve-expression(expression=${env.JGROUPS_DISCOVERY_EXTERNAL_IP})
/subsystem=jgroups/stack=tcp/transport=TCP/property=external_addr/:add(value=${env.JGROUPS_DISCOVERY_EXTERNAL_IP})
catch
echo "JGROUPS_DISCOVERY_EXTERNAL_IP maybe not set."
end-try
stop-embedded-server
Log of the restarted Pod:
log-restarted-pod.txt
Log of the still running pod:
log-still-running-pod.txt
I managed to figure out this issue, we need to add below annotation to our ingress.yaml file.
nginx.ingress.kubernetes.io/proxy-buffer-size: "12k"
I'm look for an simple way to create a list keys of a dictionary as one does in Python with the keys method but it seems this doesn't exist in Ansible. For example, below is the output of the setup module on a host.
"ansible_lvm": {
...
"pvs": {
"/dev/sda2": {
"free_g": "0",
"size_g": "8.81",
"vg": "rhel"
},
"/dev/sdb1": {
"free_g": "1.50",
"size_g": "3.00",
"vg": "bob"
}
},
The result I would like to achieve is to have the devices, dev/sda1 ..., in a list so that I can easily test to see if a device in the list of pvs or other operations on the list of keys. I know there are workarounds for about every case I have come up with but I feel like I'm missing something since it seems like something rather fundamental.
Simply transform your dict to a list and you will get its keys as in the following example:
---
- name: List of keys demo
hosts: localhost
gather_facts: false
vars:
"pvs": {
"/dev/sda2": {
"free_g": "0",
"size_g": "8.81",
"vg": "rhel"
},
"/dev/sdb1": {
"free_g": "1.50",
"size_g": "3.00",
"vg": "bob"
}
}
tasks:
- name: "List keys"
debug:
msg: "{{ pvs | list }}"
which gives
PLAY [List of keys demo] ********************************************************************************************************************************************************************************************************************
TASK [List keys] ****************************************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
"/dev/sda2",
"/dev/sdb1"
]
}
PLAY RECAP **********************************************************************************************************************************************************************************************************************************
localhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
My gcloud firebase test android run command is stuck uploading the app-debug-androidTest.apk. What is an example of the output for this command once it gets past the following point where it's stuck for me?
FirebaseTestLabPlayground[master]15:40:36 gcloud firebase test android run \
> --project locuslabs-android-sdk \
> --app app/build/outputs/apk/debug/app-debug.apk \
> --test app/build/outputs/apk/androidTest/debug/app-debug-androidTest.apk \
> --device model=Pixel2,version=27,locale=en_US,orientation=portrait \
> --verbosity debug
INFO: Test Service endpoint: [None]
INFO: Tool Results endpoint: [None]
DEBUG: Running [gcloud.firebase.test.android.run] with arguments: [--app: "app/build/outputs/apk/debug/app-debug.apk", --device: "[OrderedDict([(u'model', u'Pixel2'), (u'version', u'27'), (u'locale', u'en_US'), (u'orientation', u'portrait')])]", --project: "locuslabs-android-sdk", --test: "app/build/outputs/apk/androidTest/debug/app-debug-androidTest.apk", --verbosity: "debug"]
Have questions, feedback, or issues? Get support by visiting:
https://firebase.google.com/support/
DEBUG: Applying default auto_google_login: True
DEBUG: Applying default performance_metrics: True
DEBUG: Applying default num_flaky_test_attempts: 0
DEBUG: Applying default record_video: True
DEBUG: Applying default timeout: 900
DEBUG: Applying default async: False
INFO: Raw results root path is: [gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/]
Uploading [app/build/outputs/apk/debug/app-debug.apk] to Firebase Test Lab...
Uploading [app/build/outputs/apk/androidTest/debug/app-debug-androidTest.apk] to Firebase Test Lab...
What will likely come next?
Here is the rest of the transcript in case it helps anyone else who's stuck trying gcloud firebase test android run for the first time:
FirebaseTestLabPlayground[master]15:40:36 gcloud firebase test android run \
> --project locuslabs-android-sdk \
> --app app/build/outputs/apk/debug/app-debug.apk \
> --test app/build/outputs/apk/androidTest/debug/app-debug-androidTest.apk \
> --device model=Pixel2,version=27,locale=en_US,orientation=portrait \
> --verbosity debug
INFO: Test Service endpoint: [None]
INFO: Tool Results endpoint: [None]
DEBUG: Running [gcloud.firebase.test.android.run] with arguments: [--app: "app/build/outputs/apk/debug/app-debug.apk", --device: "[OrderedDict([(u'model', u'Pixel2'), (u'version', u'27'), (u'locale', u'en_US'), (u'orientation', u'portrait')])]", --project: "locuslabs-android-sdk", --test: "app/build/outputs/apk/androidTest/debug/app-debug-androidTest.apk", --verbosity: "debug"]
Have questions, feedback, or issues? Get support by visiting:
https://firebase.google.com/support/
DEBUG: Applying default auto_google_login: True
DEBUG: Applying default performance_metrics: True
DEBUG: Applying default num_flaky_test_attempts: 0
DEBUG: Applying default record_video: True
DEBUG: Applying default timeout: 900
DEBUG: Applying default async: False
INFO: Raw results root path is: [gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/]
Uploading [app/build/outputs/apk/debug/app-debug.apk] to Firebase Test Lab...
Uploading [app/build/outputs/apk/androidTest/debug/app-debug-androidTest.apk] to Firebase Test Lab...
Raw results will be stored in your GCS bucket at [https://console.developers.google.com/storage/browser/test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/]
DEBUG: TestMatrices.Create request:
<TestingProjectsTestMatricesCreateRequest
projectId: u'locuslabs-android-sdk'
requestId: '3c76ca4e247d4b38bf102ffcdbaa637b'
testMatrix: <TestMatrix
clientInfo: <ClientInfo
clientInfoDetails: [<ClientInfoDetail
key: u'Cloud SDK Version'
value: '242.0.0'>, <ClientInfoDetail
key: u'Release Track'
value: 'GA'>]
name: u'gcloud'>
environmentMatrix: <EnvironmentMatrix
androidDeviceList: <AndroidDeviceList
androidDevices: [<AndroidDevice
androidModelId: u'Pixel2'
androidVersionId: u'27'
locale: u'en_US'
orientation: u'portrait'>]>>
flakyTestAttempts: 0
resultStorage: <ResultStorage
googleCloudStorage: <GoogleCloudStorage
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/'>
toolResultsHistory: <ToolResultsHistory
projectId: u'locuslabs-android-sdk'>>
testExecutions: []
testSpecification: <TestSpecification
androidInstrumentationTest: <AndroidInstrumentationTest
appApk: <FileReference
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/app-debug.apk'>
orchestratorOption: OrchestratorOptionValueValuesEnum(ORCHESTRATOR_OPTION_UNSPECIFIED, 0)
testApk: <FileReference
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/app-debug-androidTest.apk'>
testTargets: []>
disablePerformanceMetrics: False
disableVideoRecording: False
testSetup: <TestSetup
account: <Account
googleAuto: <GoogleAuto>>
additionalApks: []
directoriesToPull: []
environmentVariables: []
filesToPush: []>
testTimeout: u'900s'>>>
DEBUG: TestMatrices.Create response:
<TestMatrix
clientInfo: <ClientInfo
clientInfoDetails: [<ClientInfoDetail
key: u'Cloud SDK Version'
value: u'242.0.0'>, <ClientInfoDetail
key: u'Release Track'
value: u'GA'>]
name: u'gcloud'>
environmentMatrix: <EnvironmentMatrix
androidDeviceList: <AndroidDeviceList
androidDevices: [<AndroidDevice
androidModelId: u'Pixel2'
androidVersionId: u'27'
locale: u'en_US'
orientation: u'portrait'>]>>
projectId: u'locuslabs-android-sdk'
resultStorage: <ResultStorage
googleCloudStorage: <GoogleCloudStorage
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/'>
toolResultsHistory: <ToolResultsHistory
projectId: u'locuslabs-android-sdk'>>
state: StateValueValuesEnum(VALIDATING, 1)
testExecutions: [<TestExecution
environment: <Environment
androidDevice: <AndroidDevice
androidModelId: u'Pixel2'
androidVersionId: u'27'
locale: u'en_US'
orientation: u'portrait'>>
id: u'matrix-fq9ojlzvta35a_execution-2kcgdj0bkm22a'
matrixId: u'matrix-fq9ojlzvta35a'
projectId: u'locuslabs-android-sdk'
state: StateValueValuesEnum(VALIDATING, 1)
testSpecification: <TestSpecification
androidInstrumentationTest: <AndroidInstrumentationTest
appApk: <FileReference
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/app-debug.apk'>
testApk: <FileReference
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/app-debug-androidTest.apk'>
testTargets: []>
testSetup: <TestSetup
account: <Account
googleAuto: <GoogleAuto>>
additionalApks: []
directoriesToPull: []
environmentVariables: []
filesToPush: []>
testTimeout: u'900s'>
timestamp: u'2019-04-19T08:42:36.638Z'>]
testMatrixId: u'matrix-fq9ojlzvta35a'
testSpecification: <TestSpecification
androidInstrumentationTest: <AndroidInstrumentationTest
appApk: <FileReference
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/app-debug.apk'>
testApk: <FileReference
gcsPath: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/app-debug-androidTest.apk'>
testTargets: []>
testSetup: <TestSetup
account: <Account
googleAuto: <GoogleAuto>>
additionalApks: []
directoriesToPull: []
environmentVariables: []
filesToPush: []>
testTimeout: u'900s'>
timestamp: u'2019-04-19T08:42:36.638Z'>
Test [matrix-fq9ojlzvta35a] has been created in the Google Cloud.
Firebase Test Lab will execute your instrumentation test on 1 device(s).
Creating individual test executions...done.
Test results will be streamed to [https://console.firebase.google.com/project/locuslabs-android-sdk/testlab/histories/bh.f0b3cb84d82b84d2/matrices/7272098978475034799].
15:42:48 Test is Pending
15:43:11 Starting attempt 1.
15:43:11 Test is Running
15:44:07 Started logcat recording.
15:44:07 Preparing device.
15:44:38 Logging in to Google account on device.
15:44:38 Installing apps.
15:44:53 Retrieving Pre-Test Package Stats information from the device.
15:44:53 Retrieving Performance Environment information from the device.
15:44:53 Started crash detection.
15:44:53 Started crash monitoring.
15:44:53 Started performance monitoring.
15:44:53 Started video recording.
15:44:53 Starting instrumentation test.
15:45:00 Completed instrumentation test.
15:45:14 Stopped performance monitoring.
15:45:29 Stopped crash monitoring.
15:45:29 Stopped logcat recording.
15:45:29 Retrieving Post-test Package Stats information from the device.
15:45:29 Logging out of Google account on device.
15:45:29 Done. Test time = 4 (secs)
15:45:29 Starting results processing. Attempt: 1
15:45:37 Completed results processing. Time taken = 4 (secs)
15:45:37 Test is Finished
INFO: Test matrix completed in state: FINISHED
Instrumentation testing complete.
More details are available at [https://console.firebase.google.com/project/locuslabs-android-sdk/testlab/histories/bh.f0b3cb84d82b84d2/matrices/7272098978475034799].
DEBUG:
TRHistoriesExecutions.Get response:
<Execution
completionTime: <Timestamp
nanos: 674000000
seconds: 1555663532>
creationTime: <Timestamp
nanos: 31000000
seconds: 1555663361>
executionId: u'7272098978475034799'
outcome: <Outcome
summary: SummaryValueValuesEnum(success, 4)>
specification: <Specification
androidTest: <AndroidTest
androidAppInfo: <AndroidAppInfo
name: u'FirebaseTestLabPlayground'
packageName: u'com.example.firebasetestlabplayground'
versionCode: u'1'
versionName: u'1.0'>
androidInstrumentationTest: <AndroidInstrumentationTest
testPackageId: u'com.example.firebasetestlabplayground.test'
testRunnerClass: u'android.support.test.runner.AndroidJUnitRunner'
testTargets: []>
testTimeout: <Duration
seconds: 900>>>
state: StateValueValuesEnum(complete, 0)
testExecutionMatrixId: u'matrix-fq9ojlzvta35a'>
DEBUG:
ToolResultsSteps.List response:
<ListStepsResponse
steps: [<Step
completionTime: <Timestamp
nanos: 849000000
seconds: 1555663531>
creationTime: <Timestamp
nanos: 232000000
seconds: 1555663361>
description: u'all targets'
dimensionValue: [<StepDimensionValueEntry
key: u'Model'
value: u'Pixel2'>, <StepDimensionValueEntry
key: u'Version'
value: u'27'>, <StepDimensionValueEntry
key: u'Locale'
value: u'en_US'>, <StepDimensionValueEntry
key: u'Orientation'
value: u'portrait'>]
labels: []
name: u'Instrumentation test'
outcome: <Outcome
summary: SummaryValueValuesEnum(success, 4)>
runDuration: <Duration
nanos: 617000000
seconds: 170>
state: StateValueValuesEnum(complete, 0)
stepId: u'bs.b2c854c31dd1dcd1'
testExecutionStep: <TestExecutionStep
testIssues: [<TestIssue
category: CategoryValueValuesEnum(common, 0)
errorMessage: u'Test is compatible with Android Test Orchestrator.'
severity: SeverityValueValuesEnum(suggestion, 2)
type: TypeValueValuesEnum(compatibleWithOrchestrator, 2)>]
testSuiteOverviews: [<TestSuiteOverview
totalCount: 1
xmlSource: <FileReference
fileUri: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/Pixel2-27-en_US-portrait/test_result_1.xml'>>]
testTiming: <TestTiming
testProcessDuration: <Duration
seconds: 4>>
toolExecution: <ToolExecution
commandLineArguments: []
toolLogs: [<FileReference
fileUri: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/Pixel2-27-en_US-portrait/logcat'>]
toolOutputs: [<ToolOutputReference
output: <FileReference
fileUri: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/Pixel2-27-en_US-portrait/test_cases/0000_logcat'>
testCase: <TestCaseReference
className: u'com.example.firebasetestlabplayground.ExampleInstrumentedTest'
name: u'useAppContext'>>, <ToolOutputReference
output: <FileReference
fileUri: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/Pixel2-27-en_US-portrait/test_result_1.xml'>>, <ToolOutputReference
output: <FileReference
fileUri: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/Pixel2-27-en_US-portrait/video.mp4'>>, <ToolOutputReference
output: <FileReference
fileUri: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/Pixel2-27-en_US-portrait/bugreport.txt'>>, <ToolOutputReference
output: <FileReference
fileUri: u'gs://test-lab-bcr7j9th055js-i215tdq3ht0hw/2019-04-19_15:41:26.364106_bmag/Pixel2-27-en_US-portrait/instrumentation.results'>>]>>>]>
INFO: Display format: "
table[box](
outcome.color(red=Fail, green=Pass, yellow=Inconclusive),
axis_value:label=TEST_AXIS_VALUE,
test_details:label=TEST_DETAILS
)
"
┌─────────┬──────────────────────────┬─────────────────────┐
│ OUTCOME │ TEST_AXIS_VALUE │ TEST_DETAILS │
├─────────┼──────────────────────────┼─────────────────────┤
│ Passed │ Pixel2-27-en_US-portrait │ 1 test cases passed │
└─────────┴──────────────────────────┴─────────────────────┘
FirebaseTestLabPlayground[master]15:45:45 gcloud firebase test android run --project locuslabs-android-sdk --app app/build/outputs/apk/debug/app-debug.apk --test app/build/outputs/apk/androidTest/debug/app-debug-androidTest.apk --device model=Pixel2,version=27,locale=en_US,orientation=portrait --verbosity debug
aws dynamodb put-item --table-name mytable ^ --item '{ "CountryCode": {"S": "US" }, "MyBank": {"S": "HSBC"}}'
Error : Unknown options : {, S:, US, },, MyBank: , {S:, HSBC}}' , CountryCode:
I would like to create several node on my bigip. For that I want to do a loop on my var prompt and register each value in my variable {{node_list}}.
This is what I've tried
- name: node creation
hosts: F5
gather_facts: no
connection: local
vars_prompt:
## ASK NUMBER OF NODES
- name: node_nb
prompt: "number of nodes"
private: no
## ASK THE NAME AND IP WITH FORMAT NAME;IP
- name: node_list
prompt: "name and Ip of the node like that toto;1.1.1.1"
private: no
with_sequence: count={{ node_nb | int }}
- name: Create node
bigip_node:
user: '{{ ansible_user }}'
password: '{{ ansible_password }}'
server: 'xxxxx'
host: '{{ (item).split(";")[1] }}'
name: '{{ (item).split(";")[0] }}'
partition: 'Common'
state: present
validate_certs: false
with_items: '{{ node_list }}'
First :
My var prompt don't loop if for example I specify "4" in {{ node_nb }}. The question is prompt one time but I want 4 times.
Second:
I would register all informations of the value in input each time in a list. If I want 4 nodes I need to have 4 items in my list
Just have them enter the list separated by spaces, since you are already using ; to separate node names from IPs, and it additionally saves you the trouble of having to prompt for the count because the count will be however many items there are in the list
with_sequence only works with tasks.
so just keep one variable node_list in to the vars_prompt and pass ',' separated list ['asd;1.1.1.1','sdf;2.2.2.2'] as a value.