ReduxToolKit | CreateEntityAdaptor userSelectors.selectAll giving Cannot read properties of undefined (reading 'map') - redux

Hi i am using ReduxToolKit CreateEntityAdaptor for crud, when i get all users from API by using userSelectors.selectAll, it gives "Cannot read properties of undefined (reading 'map')".
let me show my API response.
{
"data": [
{
"id": 16,
"name": "Admin",
"email": "admin#admin.com",
"assigned_roles": [
"Administrator"
],
"created_at": "2022-10-06T20:08:32.000000Z"
}
],
"links": {
"first": "http://laravel-api.test/api/users?page=1",
"last": "http://laravel-api.test/api/users?page=1",
"prev": null,
"next": null
},
"meta": {
"current_page": 1,
"from": 1,
"last_page": 1,
"links": [
{
"url": null,
"label": "« Previous",
"active": false
},
{
"url": "http://laravel-api.test/api/users?page=1",
"label": "1",
"active": true
},
{
"url": null,
"label": "Next »",
"active": false
}
],
"path": "http://laravel-api.test/api/users",
"per_page": 15,
"to": 1,
"total": 1
}
}
using AsyncThunk for getting data from API services/userService file
import { createAsyncThunk} from "#reduxjs/toolkit";
import axios from "axios";
import { API_URL, ACCESS_TOKEN } from "../constants";
export const fetchUsers = createAsyncThunk(
'user/fetchUsers',
async (page) => {
const data = await axios(API_URL+'/users?page='+page,
{ method:'GET',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${ACCESS_TOKEN}`
},
});
return data.data;
}
)
features/userSlice.js
import { createSlice, createEntityAdapter } from "#reduxjs/toolkit";
import { HTTP_STATUS } from "../constants";
import { fetchUsers } from "../services/userService";
const userAdapter = createEntityAdapter({
selectId: (user) => user.id,
});
const userSlice = createSlice({
name: "user",
initialState: {
loading: false,
status: null,
message: "",
},
reducers:{
pageByNumber: (state,{payload}) => {
state.page = payload.page
},
nextPage: (state, {payload}) => {
state.page = state.page++
},
previousPage: (state, {payload}) => {
state.page = state.page--
},
clear: (state) => {
state.status = null
state.message = null
}
},
extraReducers: {
[fetchUsers.pending]: (state, action) => {
state.loading = true
state.status = HTTP_STATUS.PENDING
},
[fetchUsers.fulfilled]: (state, { payload }) => {
console.log(payload.data);
state.loading = false
state.page = payload.meta.current_page
state.total_pages = Math.ceil(payload.meta.total/payload.meta.per_page)
userAdapter.setAll(state, payload.data)
state.status = HTTP_STATUS.FULFILLED
},
[fetchUsers.rejected]: (state, { payload }) => {
state.loading = false
state.status = HTTP_STATUS.REJECTED
},
},
});
export const userSelectors = userAdapter.getSelectors(
(state) => state.user
)
export const {pageByNumber, nextPage, previousPage,clear} = userSlice.actions
export default userSlice.reducer
views/users/index.js
i am not getting understand why there is map array error.

Hi i got the answer actually i had not passed my initial state to create Entity Adapter, below i have shown latest code for initial state.
initialState: userAdapter.getInitialState({ //Initial state should be wrapped in Adapter
loading: false,
status: null,
message: "",
}),

Related

Vue3 Pinia action is not being called

I am trying to setup a store using Pinia in vue3 with apollo client and composition API.
here is my store
import { defineStore } from "pinia";
import apolloClient from "../apollo.provider";
import { ALL_POSTS } from '#/constants/blog'
export const usePostsStore = defineStore("posts", {
state: () => ({
posts: []
}),
actions: {
async AllPosts() {
const { data } = await apolloClient.query({
query: ALL_POSTS,
fetchPolicy: 'no-cache'
})
console.log(data)
this.posts = data.posts.data
},
},
})
and here is the response
{
"data": {
"posts": {
"data": [
{
"id": 2
},
{
"id": 3
},
{
"id": 5
},
{
"id": 4
},
{
"id": 1
}
]
}
}
}
I get an empty array with this setup. Please help me understand how to get the posts!

How to custom GridJs pagination with supabase?

I'm software engineer in Cambodia.
I want to custom Grid.js pagination with supanase, but I faced a problem.
I don't know the solution because it's not in the documentation.
I'm using Nuxt 3
Please tell me how to implement.
The Code is below:
onMounted(() => {
grid.updateConfig({
columns: [
{ name: 'Avatar', id: 'avatar' },
{ name: 'name', id: 'name' },
{ name: 'gender', id: 'gender' },
{ name: 'email', id: 'email' },
{ name: 'phone', id: 'phone' },
{ name: 'address', id: 'address' },
],
pagination: {
enabled: true,
limit: 5,
server: {
url: (prev, page, limit) => `${prev}&limit=${limit}&offset=${page * limit}`
},
summary: true,
},
server: {
keepalive: true,
data: async (opt) => {
console.log(opt)
const { data: customers, error, count } = await supabase
.from('customers')
.select('id, name, gender, email, phone, address, avatar', { count: 'exact' })
.is('deleted_at', null)
.order('created_at', { ascending: false })
return {
data: customers.map((customer) => [
customer.avatar,
customer.name,
customer.gender,
customer.email,
customer.phone,
customer.address,
]),
total: count,
}
},
},
width: '100%',
search: true,
pagination: true,
fixedHeader: true,
className: {
td: 'sr-td-class',
table: 'sr-table',
},
})
grid.render(table.value)
})
I found resolve:
GridJs configuration:
onMounted(() => {
grid.updateConfig({
columns: [
{ name: 'Avatar', id: 'avatar' },
{ name: 'name', id: 'name' },
{ name: 'gender', id: 'gender' },
{ name: 'email', id: 'email' },
{ name: 'phone', id: 'phone' },
{ name: 'address', id: 'address' },
],
pagination: {
enabled: true,
limit: 5,
server: {
url: (prev, page, limit) => `${prev}&limit=${limit}&offset=${page * limit}`
},
summary: true,
},
server: {
keepalive: true,
data: async (opt) => {
console.log(opt)
const { data: customers, error, count } = await supabase
.from('customers')
.select('id, name, gender, email, phone, address, avatar', { count: 'exact' })
.is('deleted_at', null)
.order('created_at', { ascending: false })
return {
data: customers.map((customer) => [
customer.avatar,
customer.name,
customer.gender,
customer.email,
customer.phone,
customer.address,
]),
total: count,
}
},
},
width: '100%',
fixedHeader: true,
className: {
td: 'sr-td-class',
table: 'sr-table',
},
})
grid.render(table.value)
})
Then create server/api directory
after create file customers.ts in server/api/ directory
This is code in customers.ts file
import { serverSupabaseUser, serverSupabaseClient } from '#supabase/server'
export default defineEventHandler(async (event) => {
const user = await serverSupabaseUser(event)
const client = serverSupabaseClient(event)
const query = useQuery(event)
const from = query.page ? parseInt(query.page) * parseInt(query.limit) : 0
const to = query.page ? from + parseInt(query.limit) : query.limit
if (!user) {
throw createError({ statusCode: 401, message: 'Unauthorized' })
}
const { data, error, count } = await client
.from('customers')
.select('id, name, gender, email, phone, address, avatar', {
count: 'exact',
})
.is('deleted_at', null)
.order('created_at', { ascending: false })
.range(from, to)
return { customers: data, count }
})

Getting a fetch error using redux toolkit and RTK-Query

I am using RTK-Query, and Redux-toolkit for this app, and I created an api-slice with createApi, as per the docs.
When I run a request to the backend, I get a "FETCH_ERROR"; however, when I run the same request using Axios, I get the data correctly from the backend, which leads me to believe I have an error in my code. I am just not sure where exactly it is.
Here is the error:
Object {
"api": Object {
"config": Object {
"focused": true,
"keepUnusedDataFor": 60,
"middlewareRegistered": true,
"online": true,
"reducerPath": "api",
"refetchOnFocus": false,
"refetchOnMountOrArgChange": false,
"refetchOnReconnect": false,
},
"mutations": Object {},
"provided": Object {},
"queries": Object {
"test(undefined)": Object {
"endpointName": "test",
"error": Object {
"error": "TypeError: Network request failed",
"status": "FETCH_ERROR",
},
"requestId": "BWOuLpOxoDKTzlUYFLW4x",
"startedTimeStamp": 1643667104869,
"status": "rejected",
},
},
"subscriptions": Object {
"test(undefined)": Object {
"QJSCV641RznGWyudGWuMb": Object {
"pollingInterval": 0,
"refetchOnFocus": undefined,
"refetchOnReconnect": undefined,
},
},
},
},
"test": Object {
"data": Array [],
},
}
Here is the test slice:
import { createSlice } from "#reduxjs/toolkit";
const testSlice = createSlice({
name: "test",
initialState: {
data: [],
},
reducers: {
getData: (state) => {
state;
},
},
});
export const { getData } = testSlice.actions;
export default testSlice.reducer;
Here is the apiSlice:
import { createApi, fetchBaseQuery } from "#reduxjs/toolkit/query/react";
export const apiSice = createApi({
reducerPath: "test",
baseQuery: fetchBaseQuery({ baseUrl: process.env.REACT_APP_backend_url }),
endpoints: (builder) => ({
test: builder.query({
query: () => "/test",
}),
}),
});
export const { useTestQuery } = apiSice;
I solved it by changing the backend URL to my current ipv4 (for expo development, otherwise just your whatever your backend URL is) address in my .env file, then deleting cache, and restarting my app. In my case I was using expo so, expo r -c, and it worked.

Why i my utterance does not trigger the intent?

when i say get me in the end, it would not trigger that intent that i intent to trigger.
/* *
This sample demonstrates handling intents from an Alexa skill using the Alexa Skills Kit SDK (v2).
Please visit https://alexa.design/cookbook for additional examples on implementing slots, dialog management,
session persistence, api calls, and more.
*/
const Alexa = require('ask-sdk-core');
const msg = require("./localisation");
const constants = require("./constants");
const getSlotValues = (handlerInput) => {
return handlerInput.requestEnvelope.request.intent.slots;
}
const LaunchRequestHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'LaunchRequest';
},
handle(handlerInput) {
const speakOutput = msg.HELLO_MSG;
return handlerInput.responseBuilder
.speak(speakOutput)
.reprompt(speakOutput)
.getResponse();
}
};
//If user the say follow up, we are going to ask another question.
const FollowUpIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'FollowUpIntentHandler';
},
handle(handlerInput) {
const {responseBuilder, attributesManager} = handlerInput;
const slotValue = getSlotValues(handlerInput);
const sessionAttribute = attributesManager.getSessionAttributes();
sessionAttribute['follow']= slotValue.follow.value;
// const sessionAttribute = attributeManager.getSessionAttributes();
// const slotValues = getSlotValues(handlerInput);
// sessionAttribute["follow"] = slotValues.follow.value;
const speakOutput = msg.FOLLOW_UP_QUESTION_MSG;
return responseBuilder
.speak(speakOutput)
//.reprompt('add a reprompt if you want to keep the session open for the user to respond')
.getResponse();
}
};
//If user the say new we are going to ask another question.
const NewIntentHandler = {
canHandle(handlerInput){
const {request} = handlerInput.requestEnvelope;
return request.type === 'IntentRequest'
&& request.intent.name === "NewIntentHandler";
},
handle(handlerInput){
const {responseBuilder, attributeManager} = handlerInput;
const sessionAttribute = attributeManager.getSessionAttributes();
const slotValues = getSlotValues(handlerInput);
sessionAttribute["new"] = slotValues.new.value;
const speakOutput = msg.NEW_QUESTION_MSG;
return responseBuilder.speak(speakOutput).reprompt(speakOutput).getResponse();
}
};
const CaptureUserQuestionIntentHandler = {
canHandle(handlerInput){
const {request} = handlerInput.requestEnvelope;
const {attributesManager} = handlerInput;
const sessionAttribute = attributesManager.getSessionAttributes();
return request.type === "IntentRequest"
&& request.intent.name === "CaptureUserQuestionIntentHandler"
|| sessionAttribute['new']
|| sessionAttribute['follow'];
},
handle(handlerInput){
const slotValue = getSlotValues(handlerInput);
const {responseBuilder, attributesManager} = handlerInput;
const sessionAttribute = attributesManager.getSessionAttributes();
sessionAttribute["bodyPart"] = slotValue.bodyPart.value;
sessionAttribute["drainage"] = slotValue.drainage.value;
sessionAttribute["time"] = slotValue.time.value;
// const bodyPart = slotValue.bodyPart.value;
// const time = slotValue.time.value;
// const drainage = slotValue.drainage.value;
const speakOutput = `What is the ${sessionAttribute['drainage']} coming from? The back of the ear near the incision or the opening of your ear or ear canal?`;
return responseBuilder
.speak(speakOutput)
//.reprompt(speakOutput)
.getResponse()
}
};
const BackOrNearIncisionIntentHandler = {
canHandle(handlerInput){
const {request} = handlerInput.requestEnvelope;
const {attributesManager} = handlerInput;
const sessionAttribute = attributesManager.getSessionAttributes();
return request.type === "IntentRequest"
&& request.intent.name === "BackOrNearIncisionIntentHandler"
&& sessionAttribute["drainage"];
},
handle(handlerInput){
const slotValues = getSlotValues(handlerInput);
const {attributesManager, responseBuilder} = handlerInput;
const sessionAttribute = attributesManager.getSessionAttributes();
let speakOutput;
sessionAttribute['color'] = slotValues.color.value;
const color = sessionAttribute['color'];
if(color === constants.CLEAR){
speakOutput = msg.IMPORANT_GET_TREATED;
}else{
speakOutput = msg.SUGGEST_APPLY_MEDICINE;
}
return responseBuilder
.speak(speakOutput)
//.reprompt(speakOutput)
.getResponse()
}
};
const YesPleaseIntentHandler = {
canHandle(handlerInput){
const {request} = handlerInput.requestEnvelope;
const {attributesManager} = handlerInput;
const sessionAttribute = attributesManager.getSessionAttributes();
return request.type === "IntentRequest"
&& request.intent.name === "AMAZON.YesIntent"
&& sessionAttribute["color"] === constants.CLEAR;
},
handle(handlerInput){
const {responseBuilder} = handlerInput;
// let speakOutput = msg.YOUR_NEXT_APPOINTMENT;
let speakOutput = msg.YOUR_NEXT_APPOINTMENT;
return responseBuilder
.speak(speakOutput)
//.reprompt(speakOutput)
.getResponse()
}
};
const UserIntentHandlers = {
canHandle(handlerInput){
const {request} = handlerInput.requestEnvelope;
return request.type === "IntentRequest"
&& request.intent.name === "UserIntentHandler";
},
handle(handlerInput){
const {responseBuilder} = handlerInput;
// let speakOutput = msg.YOUR_NEXT_APPOINTMENT;
let speakOutput = msg.YOUR_NEXT_APPOINTMENT;
console.log(responseBuilder);
return responseBuilder
.speak(speakOutput)
//.reprompt(speakOutput)
.getResponse()
}
};
const HelpIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'AMAZON.HelpIntent';
},
handle(handlerInput) {
const speakOutput = 'You can say hello to me! How can I help?';
return handlerInput.responseBuilder
.speak(speakOutput)
.reprompt(speakOutput)
.getResponse();
}
};
const CancelAndStopIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& (Alexa.getIntentName(handlerInput.requestEnvelope) === 'AMAZON.CancelIntent'
|| Alexa.getIntentName(handlerInput.requestEnvelope) === 'AMAZON.StopIntent');
},
handle(handlerInput) {
const speakOutput = 'Goodbye!';
return handlerInput.responseBuilder
.speak(speakOutput)
.getResponse();
}
};
/* *
* FallbackIntent triggers when a customer says something that doesn’t map to any intents in your skill
* It must also be defined in the language model (if the locale supports it)
* This handler can be safely added but will be ingnored in locales that do not support it yet
* */
const FallbackIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'AMAZON.FallbackIntent';
},
handle(handlerInput) {
const speakOutput = 'Sorry, I don\'t know about that. Please try again.';
return handlerInput.responseBuilder
.speak(speakOutput)
.reprompt(speakOutput)
.getResponse();
}
};
/* *
* SessionEndedRequest notifies that a session was ended. This handler will be triggered when a currently open
* session is closed for one of the following reasons: 1) The user says "exit" or "quit". 2) The user does not
* respond or says something that does not match an intent defined in your voice model. 3) An error occurs
* */
const SessionEndedRequestHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'SessionEndedRequest';
},
handle(handlerInput) {
console.log(`~~~~ Session ended: ${JSON.stringify(handlerInput.requestEnvelope)}`);
// Any cleanup logic goes here.
return handlerInput.responseBuilder.getResponse(); // notice we send an empty response
}
};
/* *
* The intent reflector is used for interaction model testing and debugging.
* It will simply repeat the intent the user said. You can create custom handlers for your intents
* by defining them above, then also adding them to the request handler chain below
* */
const IntentReflectorHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest';
},
handle(handlerInput) {
const intentName = Alexa.getIntentName(handlerInput.requestEnvelope);
const speakOutput = `You just triggered ${intentName}`;
return handlerInput.responseBuilder
.speak(speakOutput)
//.reprompt('add a reprompt if you want to keep the session open for the user to respond')
.getResponse();
}
};
/**
* Generic error handling to capture any syntax or routing errors. If you receive an error
* stating the request handler chain is not found, you have not implemented a handler for
* the intent being invoked or included it in the skill builder below
* */
const ErrorHandler = {
canHandle() {
return true;
},
handle(handlerInput, error) {
const speakOutput = 'Sorry, I had trouble doing what you asked. Please try again.';
console.log(`~~~~ Error handled: ${JSON.stringify(error)}`);
return handlerInput.responseBuilder
.speak(speakOutput)
.reprompt(speakOutput)
.getResponse();
}
};
/**
* This handler acts as the entry point for your skill, routing all request and response
* payloads to the handlers above. Make sure any new handlers or interceptors you've
* defined are included below. The order matters - they're processed top to bottom
* */
exports.handler = Alexa.SkillBuilders.custom()
.addRequestHandlers(
LaunchRequestHandler,
YesPleaseIntentHandler,
FollowUpIntentHandler,
BackOrNearIncisionIntentHandler,
NewIntentHandler,
CaptureUserQuestionIntentHandler,
UserIntentHandlers,
HelpIntentHandler,
CancelAndStopIntentHandler,
FallbackIntentHandler,
SessionEndedRequestHandler,
IntentReflectorHandler)
.addErrorHandlers(
ErrorHandler)
.withCustomUserAgent('sample/hello-world/v1.2')
.lambda();
{
"interactionModel": {
"languageModel": {
"invocationName": "lee healthcare",
"intents": [
{
"name": "AMAZON.CancelIntent",
"samples": []
},
{
"name": "AMAZON.HelpIntent",
"samples": []
},
{
"name": "AMAZON.StopIntent",
"samples": []
},
{
"name": "AMAZON.NavigateHomeIntent",
"samples": []
},
{
"name": "AMAZON.FallbackIntent",
"samples": []
},
{
"name": "FollowUpIntentHandler",
"slots": [
{
"name": "follow",
"type": "FOLLOW"
}
],
"samples": [
"{follow} up question",
"{follow} up",
"{follow}"
]
},
{
"name": "NewIntentHandler",
"slots": [
{
"name": "new",
"type": "NEW"
}
],
"samples": [
"{new} question",
"{new}"
]
},
{
"name": "CaptureUserQuestionIntentHandler",
"slots": [
{
"name": "bodyPart",
"type": "BODYPARTS"
},
{
"name": "time",
"type": "customDate",
"samples": [
"{time}"
]
},
{
"name": "drainage",
"type": "DRAIN",
"samples": [
"{drainage}"
]
}
],
"samples": [
"I have {bodyPart} surgery {time} and have {drainage}"
]
},
{
"name": "BackOrNearIncisionIntentHandler",
"slots": [
{
"name": "color",
"type": "AMAZON.Color",
"samples": [
"{color}"
]
}
],
"samples": [
"{color}",
"near the incision",
"back of the ear near the incision"
]
},
{
"name": "AMAZON.YesIntent",
"samples": []
},
{
"name": "UserIntentHandler",
"slots": [],
"samples": [
"get me"
]
}
],
"types": [
{
"name": "FOLLOW",
"values": [
{
"name": {
"value": "follow"
}
}
]
},
{
"name": "NEW",
"values": [
{
"name": {
"value": "new"
}
}
]
},
{
"name": "BODYPARTS",
"values": [
{
"name": {
"value": "ear"
}
}
]
},
{
"name": "DRAIN",
"values": [
{
"name": {
"value": "drainage"
}
}
]
},
{
"name": "customDate",
"values": [
{
"name": {
"value": "last week"
}
}
]
},
{
"name": "AMAZON.Color",
"values": [
{
"name": {
"value": "clear"
}
}
]
}
]
},
"dialog": {
"intents": [
{
"name": "CaptureUserQuestionIntentHandler",
"confirmationRequired": false,
"prompts": {},
"slots": [
{
"name": "bodyPart",
"type": "BODYPARTS",
"confirmationRequired": false,
"elicitationRequired": false,
"prompts": {}
},
{
"name": "time",
"type": "customDate",
"confirmationRequired": false,
"elicitationRequired": true,
"prompts": {
"elicitation": "Elicit.Slot.832794320406.761354353518"
}
},
{
"name": "drainage",
"type": "DRAIN",
"confirmationRequired": false,
"elicitationRequired": true,
"prompts": {
"elicitation": "Elicit.Slot.832794320406.873931110175"
}
}
]
},
{
"name": "BackOrNearIncisionIntentHandler",
"confirmationRequired": false,
"prompts": {},
"slots": [
{
"name": "color",
"type": "AMAZON.Color",
"confirmationRequired": false,
"elicitationRequired": true,
"prompts": {
"elicitation": "Elicit.Slot.414884367204.126479337664"
}
}
]
}
],
"delegationStrategy": "ALWAYS"
},
"prompts": [
{
"id": "Slot.Validation.544061479456.1369268390684.618525999294",
"variations": [
{
"type": "PlainText",
"value": "Can i help you with the follow question or new question?"
}
]
},
{
"id": "Elicit.Slot.832794320406.761354353518",
"variations": [
{
"type": "PlainText",
"value": "When does it happen?"
}
]
},
{
"id": "Elicit.Slot.832794320406.873931110175",
"variations": [
{
"type": "PlainText",
"value": "What else do you have ?"
}
]
},
{
"id": "Elicit.Slot.414884367204.126479337664",
"variations": [
{
"type": "PlainText",
"value": "What color is drainage?"
}
]
}
]
}
}
Are you still having this problem? When I created a new skill using your code and interaction model, "get me" successfully triggered the UserIntentHandlers. What intent is getting triggered?
In the Alexa Developer Console, in the "Build" tab for your skill, click "Intents" in the side menu. You'll see an "Evaluate Model" button near the top right. Click it and choose "Utterance Profiler". You can see what intent gets triggered when you say "get me".

how to solve this in javascript?

> Locate the `displayBirthdate` function you initially defined, which took no parameter. Modify it to use object de-structuring to get just the 'dob' property of the parameter object it will receive
here's the code
```javascript
const displayBirthdate = () => {};
const displayAddress = () => {};
const displayExtraUserInfo = (extra) => {
document.getElementById("btn-birthdate").addEventListener("click", ()=>
{ displayBirthdate(extra) })
document.getElementById("btn-phone").addEventListener("click", () =>
{ displayPhone(extra) })
document.getElementById("btn-address").addEventListener("click", () =>
{ displayAddress(extra) })
```
adding to the above question
this is the expected response passed as parameter extra
{
"results": [
{
"gender": "female",
"name": {
"title": "ms",
"first": "ceyhan",
"last": "dizdar"
},
"location": {
"street": "3826 şehitler cd",
"city": "tekirdağ",
"state": "bitlis",
"postcode": 11689,
"coordinates": {
"latitude": "79.1017",
"longitude": "27.1350"
},
"timezone": {
"offset": "+5:45",
"description": "Kathmandu"
}
},
"email": "ceyhan.dizdar#example.com",
"login": {
"uuid": "34eb65b2-0535-4656-bd68-4da69dc6d016",
"username": "orangefish864",
"password": "grandpa",
"salt": "vowzvAS2",
"md5": "cf4a7f3210ef97e8e72defafd80b94c8",
"sha1": "4f2af3439862b9bf25757ee73df8cd410ce201a2",
"sha256":
"1497acbca446b5fa47d4bc5ffe4e82c17818176596b66d94f213f091c8ed8077"
},
"dob": {
"date": "1979-08-10T22:03:55Z",
"age": 39
},
"registered": {
"date": "2008-05-24T13:30:20Z",
"age": 10
},
"phone": "(873)-801-4132",
"cell": "(751)-606-5317",
"id": {
"name": "",
"value": null
},
"picture": {
"large": "https://randomuser.me/api/portraits/women/59.jpg",
"medium": "https://randomuser.me/api/portraits/med/women/59.jpg",
"thumbnail":
"https://randomuser.me/api/portraits/thumb/women/59.jpg"
},
"nat": "TR"
}
],
"info": {
"seed": "008a9fe3a638239b",
"results": 1,
"page": 1,
"version": "1.2"
}
}
Now the question is this:
write an arrow function example displayBirthdate(), pass in this object(extra) as a parameter. using de-structuring method, grab the "dob" property in the object(extra).
below is how i have attempted to solve the question:
const displayBirthdate = (obj) =>{
const{results} = obj;
const[{dob}] = results;
}
but it appears to be incorrect. please any help would be appreciated. thank you
const displayBirthdate = ({dob}) => {};
const displayAddress ({location}) =>{};
In your attempted solution, you were trying to obtain the dob object
from results, but const{results} = obj; evaluates results to
an array. const {dob} = results[0]; in my answer gets the dob object from the first element in the results array. Cheers!
const displayBirthdate = (obj) =>{
const{results} = obj;
const[{dob}] = results;
}
const displayBirthdate = (obj) =>{
const {results} = obj;
console.log(results); // this is an array with the required object at index = 0
const {dob} = results[0];
console.log(dob); // dob is the object you are looking for
}

Resources