Skip to content

Commit

Permalink
Fixed issue #3615
Browse files Browse the repository at this point in the history
  • Loading branch information
liutng authored and humphd committed Nov 21, 2022
1 parent 2081146 commit f6192c4
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 13 deletions.
40 changes: 31 additions & 9 deletions src/api/posts/src/routes/posts.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,34 @@ const postsUrl = process.env.POSTS_URL || '/';

const posts = Router();

const prepareData = (ids, shouldExpand = false) => {
if (shouldExpand) {
return Promise.all(
ids.map(async (id) => {
// obtain the corresponding feed to populate the author's name to the return data.
const post = await Post.byId(id);
return {
id,
url: `${postsUrl}/${id}`,
author: post.feed.author,
title: post.title,
publishDate: post.published,
};
})
);
}
return Promise.resolve(
ids
// Return id and url for a specific post
.map((id) => ({ id, url: `${postsUrl}/${id}` }))
);
};

posts.get('/', validatePostsQuery(), async (req, res, next) => {
const defaultNumberOfPosts = process.env.MAX_POSTS_PER_PAGE || 30;
const capNumOfPosts = 100;
const page = parseInt(req.query.page || 1, 10);

const expand = req.query.expand ? parseInt(req.query.expand, 10) : 0;
let ids;
let perPage;
let postsCount;
Expand Down Expand Up @@ -60,14 +83,13 @@ posts.get('/', validatePostsQuery(), async (req, res, next) => {
first: `/posts?per_page=${perPage}&page=${1}`,
last: `/posts?per_page=${perPage}&page=${Math.floor(postsCount / perPage)}`,
});
res.json(
ids
// Return id and url for a specific post
.map((id) => ({
id,
url: `${postsUrl}/${id}`,
}))
);
try {
const data = await prepareData(ids, expand === 1);
res.json(data);
} catch (err) {
logger.error({ err }, 'Unable to get detail information of posts from Redis');
next(createError(503, 'Unable to connect to database'));
}
});

posts.get('/:id', validatePostsIdParam(), async (req, res, next) => {
Expand Down
61 changes: 57 additions & 4 deletions src/api/posts/test/posts.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,21 @@ describe('/posts', () => {
const createdItems = 150;
const nonInteger = 'test';

const posts = [...Array(createdItems).keys()].map((item) => {
const feeds = [...Array(createdItems).keys()].map((item) => {
const guid = `http://telescope${item}.cdot.systems`;
const id = hash(guid);
return {
id,
author: 'author',
url: 'http://foo.url',
user: 'user',
link: `http://github.com/githubUsername`,
etag: 'etag',
lastModified: new Date('2009-09-07T22:23:00.544Z'),
githubUsername: `githubUsername`,
};
});
const posts = [...Array(createdItems).keys()].map((item, index) => {
const guid = `http://telescope${item}.cdot.systems`;
const id = hash(guid);
return {
Expand All @@ -22,13 +36,26 @@ describe('/posts', () => {
html: 'html',
updated: new Date('2009-09-07T22:23:00.544Z'),
published: new Date('2009-09-07T22:20:00.000Z'),
url: 'foo',
site: 'foo',
url: `http://localhost/v1/posts/${id}`,
site: 'http://foo.site',
feed: feeds[index].id,
};
});

beforeAll(() => posts.map((post) => addPost(post)));
const expandedPosts = [...Array(createdItems).keys()].map((item) => {
return {
id: posts[item].id,
url: posts[item].url,
author: feeds[item].author,
title: posts[item].title,
publishDate: '2009-09-07T22:20:00.000Z',
};
});

beforeAll(() => {
posts.map((post) => addPost(post));
feeds.map((feed) => addFeed(feed));
});
test('default number of items should be returned', async () => {
const res = await request(app).get('/');

Expand Down Expand Up @@ -69,6 +96,32 @@ describe('/posts', () => {
expect(res.body.length).toBeGreaterThan(0);
});

test('request posts with a valid expand(do not expand) query param', async () => {
const res = await request(app).get('/?page=1&expand=0');
expect(res.status).toEqual(200);
// This will depend on the env value, so as long as we get back something.
expect(res.body.length).toBeGreaterThan(0);
});
test('request posts with a valid expand query param', async () => {
const res = await request(app).get('/?page=1&expand=1');
expect(res.status).toEqual(200);
expect(res.body.length).toBeGreaterThan(0);
res.body.forEach((post) => {
// check if the returned object is the same as the generated one in this test.
expect(post).toEqual(
// We need to find the corresponding post of res.body in expandedPosts by filtering it with its id.
expect.objectContaining(expandedPosts.find((expandedPst) => expandedPst.id === post.id))
);
});
expect(expandedPosts).toEqual(expect.arrayContaining(res.body));
});
test('request posts with an invalid expand query param', async () => {
const res = await request(app).get('/?page=1&expand=abc');
expect(res.status).toEqual(200);
// This will depend on the env value, so as long as we get back something.
expect(res.body.length).toBeGreaterThan(0);
});

test('request posts with non-integer page param', async () => {
const res = await request(app).get(`/?page=${nonInteger}`);
expect(res.status).toEqual(400);
Expand Down
47 changes: 47 additions & 0 deletions src/web/docusaurus/docs/api-services/posts.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,53 @@ Status: 200 OK
}
```

### Get posts with posts basic information(author, title, and publishDate)

Find any posts that are currently cached in redis with their basic information.

```
GET /
```

#### Code Samples

##### Shell

```bash
curl -X GET \
http://localhost/v1/posts?expand=1
```

##### JavaScript

- With `fetch`

```js
fetch('http://localhost/v1/posts?expand=1');
```

#### Responses

##### Successful response

```
Status: 200 OK
```

```json
{
"posts": [
{
"id": "8a41a9109e",
"url": "http://localhost/v1/posts/8a41a9109e",
"author": "Ray Gervais",
"title": "What Does \"Cloud Native\" Even Mean?",
"publishDate": "2022-11-04T00:00:00.000Z"
}
]
}
```

### Get a single post

Find a single post that is currently cached in redis. Response can be retrieved as html, json, or plain-text. Specify as part of the headers what you want the response to look like.
Expand Down

0 comments on commit f6192c4

Please sign in to comment.