Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor repository #149

Merged
merged 59 commits into from
Aug 2, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
3c82f2f
fix: nits
h13i32maru Jul 31, 2020
238bfc2
refactor(StreamRepo):
h13i32maru Jul 31, 2020
138cd93
refactor(StreamRepo):
h13i32maru Jul 31, 2020
7dfa9c6
refactor(StreamRepo):
h13i32maru Aug 1, 2020
adcbd23
refactor(StreamRepo):
h13i32maru Aug 1, 2020
2bad1d3
refactor(StreamRepo):
h13i32maru Aug 1, 2020
50c1584
refactor(StreamRepo):
h13i32maru Aug 1, 2020
dc07165
refactor(StreamRepo):
h13i32maru Aug 1, 2020
1ca4bf6
refactor(StreamRepo):
h13i32maru Aug 1, 2020
c88fea0
refactor(StreamRepo):
h13i32maru Aug 1, 2020
f5081be
refactor(StreamRepo):
h13i32maru Aug 1, 2020
e57bb00
refactor(StreamRepo):
h13i32maru Aug 1, 2020
eb726e5
refactor(StreamRepo):
h13i32maru Aug 1, 2020
ae12f7d
refactor(StreamRepo):
h13i32maru Aug 1, 2020
ca4b6bb
refactor(StreamRepo):
h13i32maru Aug 1, 2020
c6c26fd
refactor(FilterHistoryRepo):
h13i32maru Aug 1, 2020
9c37bed
refactor(FilterHistoryRepo):
h13i32maru Aug 1, 2020
0df5a99
refactor(StreamRepo):
h13i32maru Aug 1, 2020
de9adf3
refactor(StreamIssueRepo):
h13i32maru Aug 1, 2020
498447d
refactor:
h13i32maru Aug 1, 2020
67f7e7c
refactor(StreamIssueRepo):
h13i32maru Aug 1, 2020
5d17553
refactor(StreamIssueRepo):
h13i32maru Aug 1, 2020
a9e1f5e
refactor(SubscriptionIssueRepo):
h13i32maru Aug 1, 2020
c9496ac
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
1ff6872
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
933087b
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
c3eee22
fix(Stream): query index calc
h13i32maru Aug 1, 2020
2e76678
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
8a9af6d
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
5b4678d
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
be72387
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
54b1f49
refactor(StreamPolling):
h13i32maru Aug 1, 2020
8bb448b
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
9d10f56
refactor(SystemStreamRepo):
h13i32maru Aug 1, 2020
e135662
refactor(IssueRepo):
h13i32maru Aug 1, 2020
efceb1f
refactor(IssueRepo):
h13i32maru Aug 1, 2020
4e4059d
refactor(IssueRepo):
h13i32maru Aug 1, 2020
2b7e4ce
refactor(IssueRepo):
h13i32maru Aug 1, 2020
30f82e5
refactor(IssueRepo):
h13i32maru Aug 1, 2020
8d245b9
refactor(IssueRepo):
h13i32maru Aug 1, 2020
f0d0d98
refactor(IssueRepo):
h13i32maru Aug 1, 2020
0730d1e
refactor(IssueRepo):
h13i32maru Aug 1, 2020
457a361
refactor(IssueRepo):
h13i32maru Aug 1, 2020
59168df
refactor(VersionRepo):
h13i32maru Aug 1, 2020
8e2c123
refactor(IssueRepo):
h13i32maru Aug 1, 2020
3d9d5d6
refactor(IssueRepo):
h13i32maru Aug 1, 2020
19e50f8
refactor(IssueRepo):
h13i32maru Aug 1, 2020
bff9d34
refactor(IssueRepo):
h13i32maru Aug 1, 2020
b44e56c
refactor: nits
h13i32maru Aug 2, 2020
cab2570
refactor(IssueRepo):
h13i32maru Aug 2, 2020
8bcf72c
refactor(IssueRepo):
h13i32maru Aug 2, 2020
5166b9f
refactor(IssueRepo):
h13i32maru Aug 2, 2020
c0852ca
refactor(IssueRepo):
h13i32maru Aug 2, 2020
d71e3e7
refactor(IssueRepo):
h13i32maru Aug 2, 2020
eedb2d0
refactor(IssueRepo):
h13i32maru Aug 2, 2020
e45dc8d
refactor(StreamIssueRepo):
h13i32maru Aug 2, 2020
992dac7
refactor(DBIPC): show slow query
h13i32maru Aug 2, 2020
13aa0f8
refactor(Repo):
h13i32maru Aug 2, 2020
f9e5f9a
refactor(IssueRepo):
h13i32maru Aug 2, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
refactor(IssueRepo):
  • Loading branch information
h13i32maru committed Aug 2, 2020
commit 8bcf72c8313fb59ad10254d2358cc85ba95e10d5
16 changes: 12 additions & 4 deletions src/Renderer/Fragment/Issues/IssuesFragment.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -195,12 +195,20 @@ export class IssuesFragment extends React.Component<any, State> {
let ids;
if (this._libraryStreamName && type === 'library' && this._libraryStreamName === streamIdOrName) {
ids = updatedIssueIds;
} else if (this._streamId !== null && type == 'system') {
const {error, issueIds} = await IssueRepo.includeIds(this._streamId, updatedIssueIds);
} else if (this._streamId !== null && this._streamId < 0) {
// todo: eventから受け取ったstreamを使えるようにする
const res = await SystemStreamRepo.getSystemStream(this._streamId);
if (res.error) return console.error(res.error);

const {error, issueIds} = await IssueRepo.includeIds(updatedIssueIds, this._streamId, res.systemStream.defaultFilter);
if (error) return console.error(error);
ids = issueIds;
} else if (this._streamId !== null && type == 'stream') {
const {error, issueIds} = await IssueRepo.includeIds(this._streamId, updatedIssueIds, this._filterQuery);
} else if (this._streamId !== null && this._streamId >= 0) {
// todo: eventから受け取ったstreamを使えるようにする
const res = await StreamRepo.getStream(this._streamId);
if (res.error) return console.error(res.error);

const {error, issueIds} = await IssueRepo.includeIds(updatedIssueIds, this._streamId, res.stream.defaultFilter, this._filterQuery);
if (error) return console.error(error);
ids = issueIds;
} else {
Expand Down
191 changes: 94 additions & 97 deletions src/Renderer/Repository/Issue/Issue.ts
Original file line number Diff line number Diff line change
@@ -1,99 +1,96 @@
import {IssueFilter} from './IssueFilter';
import {DBIPC} from '../../../IPC/DBIPC';
// class _Issue {
// async findIssues(streamId, filterQuery = null, pageNumber = 0, perPage = 30) {
// const sql = this._buildSQL();
// let issues;
// let totalCount;
// const offset = pageNumber * perPage;
//
// const extraCondition = IssueFilter.buildCondition(filterQuery);
// if (extraCondition.filter) {
// // hack
// sql.issuesQuery = sql.issuesQuery.replace('where -- replace', `where ${extraCondition.filter} and`);
// sql.countQuery = sql.countQuery.replace('where -- replace', `where ${extraCondition.filter} and`);
// }
// if (extraCondition.sort) {
// // hack
// sql.issuesQuery = sql.issuesQuery.replace(/order by\s+[\w\s]+/m, `order by ${extraCondition.sort}\n`);
// }
//
// const {row: temp} = await DBIPC.selectSingle(sql.countQuery, [streamId]);
// totalCount = temp.count;
//
// // hack: if pageNumber is negative, immediate return. because performance.
// if (pageNumber < 0) return {totalCount};
//
// const {rows} = await DBIPC.select(sql.issuesQuery + ` limit ${offset}, ${perPage}`, [streamId]);
// issues = rows;
// for (const issue of issues) {
// const value = JSON.parse(issue.value);
//
// // todo: this hack is for old github response
// // we must add value.assignee before `issue.value = value`.
// // because issue.value is setter/getter, so setter behavior is special.
// if (!value.assignees) {
// value.assignees = value.assignee ? [value.assignee] : [];
// }
//
// issue.value = value;
// }
//
// const hasNextPage = offset + perPage < totalCount;
// return {issues, totalCount, hasNextPage};
// }
//
// _buildSQL() {
// const issuesQuery = `
// select
// t1.*
// from
// issues as t1
// inner join
// streams_issues as t2 on t1.id = t2.issue_id
// where -- replace
// stream_id = ?
// and archived_at is null
// order by
// updated_at desc
// `;
//
// const countQuery = `
// select
// count(1) as count
// from
// issues as t1
// inner join
// streams_issues as t2 on t1.id = t2.issue_id
// where -- replace
// stream_id = ?
// and archived_at is null
// `;
//
// return {issuesQuery, countQuery};
// }

class _Issue {
async findIssues(streamId, filterQuery = null, pageNumber = 0, perPage = 30) {
const sql = this._buildSQL();
let issues;
let totalCount;
const offset = pageNumber * perPage;
// async includeIds(streamId, issueIds, filter = null) {
// let filterCondition = '';
// if (filter) {
// const tmp = IssueFilter.buildCondition(filter);
// filterCondition = `inner join (select id from issues where ${tmp.filter}) as t2 on t1.issue_id = t2.id`;
// }
//
// const {rows: includedIssueIds} = await DBIPC.select(`
// select
// issue_id
// from
// streams_issues as t1
// ${filterCondition}
// where
// stream_id = ? and
// issue_id in (${issueIds.join(',')})
// `, [streamId]);
//
// return includedIssueIds.map((item)=> item.issue_id);
// }
// }

const extraCondition = IssueFilter.buildCondition(filterQuery);
if (extraCondition.filter) {
// hack
sql.issuesQuery = sql.issuesQuery.replace('where -- replace', `where ${extraCondition.filter} and`);
sql.countQuery = sql.countQuery.replace('where -- replace', `where ${extraCondition.filter} and`);
}
if (extraCondition.sort) {
// hack
sql.issuesQuery = sql.issuesQuery.replace(/order by\s+[\w\s]+/m, `order by ${extraCondition.sort}\n`);
}

const {row: temp} = await DBIPC.selectSingle(sql.countQuery, [streamId]);
totalCount = temp.count;

// hack: if pageNumber is negative, immediate return. because performance.
if (pageNumber < 0) return {totalCount};

const {rows} = await DBIPC.select(sql.issuesQuery + ` limit ${offset}, ${perPage}`, [streamId]);
issues = rows;
for (const issue of issues) {
const value = JSON.parse(issue.value);

// todo: this hack is for old github response
// we must add value.assignee before `issue.value = value`.
// because issue.value is setter/getter, so setter behavior is special.
if (!value.assignees) {
value.assignees = value.assignee ? [value.assignee] : [];
}

issue.value = value;
}

const hasNextPage = offset + perPage < totalCount;
return {issues, totalCount, hasNextPage};
}

_buildSQL() {
const issuesQuery = `
select
t1.*
from
issues as t1
inner join
streams_issues as t2 on t1.id = t2.issue_id
where -- replace
stream_id = ?
and archived_at is null
order by
updated_at desc
`;

const countQuery = `
select
count(1) as count
from
issues as t1
inner join
streams_issues as t2 on t1.id = t2.issue_id
where -- replace
stream_id = ?
and archived_at is null
`;

return {issuesQuery, countQuery};
}

async includeIds(streamId, issueIds, filter = null) {
let filterCondition = '';
if (filter) {
const tmp = IssueFilter.buildCondition(filter);
filterCondition = `inner join (select id from issues where ${tmp.filter}) as t2 on t1.issue_id = t2.id`;
}

const {rows: includedIssueIds} = await DBIPC.select(`
select
issue_id
from
streams_issues as t1
${filterCondition}
where
stream_id = ? and
issue_id in (${issueIds.join(',')})
`, [streamId]);

return includedIssueIds.map((item)=> item.issue_id);
}
}

export const Issue = new _Issue();
// export const Issue = new _Issue();
26 changes: 22 additions & 4 deletions src/Renderer/Repository/IssueRepo.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import {DBIPC} from '../../IPC/DBIPC';
import {ConfigRepo} from './ConfigRepo';
import {Issue} from './Issue/Issue';
import {LibraryIssue} from './Issue/LibraryIssue';
import {IssueEvent} from '../Event/IssueEvent';
import {IssueFilter} from './Issue/IssueFilter';
Expand Down Expand Up @@ -369,11 +368,30 @@ class _IssueRepo {
return {};
}

async includeIds(streamId: number, issueIds: number[], filter: string = null): Promise<{error?: Error; issueIds?: number[]}> {
const ids = await Issue.includeIds(streamId, issueIds, filter);
return {issueIds: ids};
async includeIds(issueIds: number[], streamId: number | null, defaultFilter: string, userFilter: string = ''): Promise<{error?: Error; issueIds?: number[]}> {
const cond = IssueFilter.buildCondition(`${userFilter} ${defaultFilter}`);
const sql = `
select
id
from
issues
where
${cond.filter}
${streamId !== null ? `and id in (select issue_id from streams_issues where stream_id = ${streamId})` : ''}
and id in (${issueIds.join(',')})
`;
const {error, rows} = await DBIPC.select<{id: number}>(sql);
if (error) return {error};

const includedIssueIds = rows.map(row => row.id);
return {issueIds: includedIssueIds};
}

// async includeIds(streamId: number, issueIds: number[], filter: string = ''): Promise<{error?: Error; issueIds?: number[]}> {
// const ids = await Issue.includeIds(streamId, issueIds, filter);
// return {issueIds: ids};
// }

private async buildSQL(streamId: number, filter: string, page: number, perPage: number): Promise<{issuesSQL: string; countSQL: string; unreadCountSQL: string}> {
const cond = IssueFilter.buildCondition(filter);
const wheres: string[] = [];
Expand Down