-
Notifications
You must be signed in to change notification settings - Fork 1
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat(glossary): from export to publish #1424
Changes from 1 commit
83ecfb4
0be4d5e
1df3298
b052b35
8da5511
1e9ac30
03d6824
3bc5d38
cda1c3d
abd5023
ffa2bd3
54336da
4d67071
4b0ba18
2178a5e
b204a24
ba90d32
942637b
c7ce777
c88a929
b45b138
61c32f6
49cd64e
53c3771
60d50b2
d917a8b
089058d
b4a4c3c
2d8e475
d662156
7c87a33
decc534
e2ab2c6
4e082a7
1e780d9
7afed52
94745d3
18ed206
261f6ea
687d80c
34dc741
248674f
b859eb4
a603b44
03b007b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -12,28 +12,30 @@ interface Return { | |
export const generateEditorialContents = ( | ||
documents: DocumentElasticWithSource<EditorialContentDoc>[] | ||
): Return => { | ||
// intro: data.intro, | ||
// introWithGlossary: addGlossaryContentToMarkdown( | ||
// glossary, | ||
// data.intro ?? "" | ||
// ), | ||
// description: data.description, | ||
// sectionDisplayMode: data.sectionDisplayMode, | ||
// dismissalProcess: data.dismissalProcess, | ||
// ? { | ||
// title: block.content, | ||
// } | ||
// : { | ||
// markdown: block.content, | ||
// htmlWithGlossary: addGlossaryContentToMarkdown( | ||
// glossary, | ||
// block.content | ||
// ), | ||
// }), | ||
// remove markdown et intro | ||
const relatedIdsDocuments = getRelatedIdsDocuments(documents); | ||
const documentsOptimized = documents.map((document: any) => { | ||
const introWithGlossary = document.introWithGlossary; | ||
delete document.intro; | ||
delete document.introWithGlossary; | ||
Comment on lines
+17
to
+18
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. je ne suis pas fan des delete, on ne peux pas passer par une destructuration ? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ouais j'avais essayé, mais c'était moins lisible je trouve |
||
return { | ||
...document, | ||
intro: introWithGlossary, | ||
contents: document.contents.map((content: any) => { | ||
content.blocks = content.blocks.map((block: any) => { | ||
const htmlWithGlossary = block.htmlWithGlossary; | ||
delete block.markdown; | ||
delete block.htmlWithGlossary; | ||
return { | ||
...block, | ||
html: htmlWithGlossary, | ||
}; | ||
}); | ||
return content; | ||
}), | ||
}; | ||
}); | ||
const relatedIdsDocuments = getRelatedIdsDocuments(documentsOptimized); | ||
return { | ||
documents, | ||
documents: documentsOptimized, | ||
relatedIdsDocuments, | ||
}; | ||
}; |
Unchanged files with check annotations Beta
FROM elasticsearch:7.13.4 | ||
WORKDIR /usr/share/elasticsearch | ||
ARG NODE_VERSION=20.3.1-alpine | ||
FROM node:$NODE_VERSION AS deps | ||
WORKDIR /app | ||
RUN yarn workspaces focus --production alert-cli && yarn cache clean | ||
FROM node:$NODE_VERSION | ||
# Add git to docker image | ||
RUN apk add --no-cache git openssh-client | ||
USER 1000 | ||
ARG NODE_VERSION=20.3.1-alpine | ||
FROM node:$NODE_VERSION AS deps | ||
RUN apk add --update python3 make g++ && rm -rf /var/cache/apk/* | ||
Check failure on line 5 in targets/export-elasticsearch/Dockerfile GitHub Actions / Lint Dockerfile
|
||
WORKDIR /app | ||
RUN yarn workspaces focus --production export-elasticsearch && yarn cache clean | ||
RUN mkdir -p /app/targets/export-elasticsearch/node_modules | ||
FROM node:$NODE_VERSION | ||
WORKDIR /app | ||
ARG NODE_VERSION=20.3.1-alpine | ||
FROM node:$NODE_VERSION AS deps | ||
WORKDIR /app | ||
RUN yarn workspaces focus --production frontend && yarn cache clean | ||
RUN mkdir -p targets/frontend/node_modules | ||
FROM node:$NODE_VERSION | ||
WORKDIR /app | ||
}); | ||
it("should throw error if no xlsx file found", async () => { | ||
const url = "https://example.com/files"; | ||
const html = ` | ||
<html> | ||
<body> |
const dataJson = JSON.parse(fs.readFileSync(pathIndex, "utf8")); | ||
const supportedCcIndexJson: Agreement[] = dataJson.map((cc: any) => { | ||
return { | ||
name: cc.title, | ||
num: cc.num, |
); | ||
const html = response.data; | ||
const regex = /href="([^"]*\.xlsx)"/g; | ||
const match = regex.exec(html); | ||
if (!match) { | ||
throw new Error("No xlsx file found"); | ||
} | ||
return match[1]; | ||
} | ||
const baseRegex = /<base href="(.*)" \/>/g; | ||
const baseMatch = baseRegex.exec(html); | ||
if (!baseMatch) { | ||
throw new Error(`xlsx file url not valid : ${match[1]}`); | ||
} |
implements RelevantDocumentsExtractor | ||
{ | ||
extractReferences({ | ||
modified, | ||
removed, | ||
}: Pick<DilaChanges, "modified" | "removed">): Promise<DocumentReferences[]> { | ||
return Promise.resolve([]); | ||
} |
import payload from "./mocks/editorialContent.payload.json"; | ||
jest.mock("@shared/utils", () => { | ||
return { | ||
...jest.requireActual("@shared/utils"), | ||
gqlClient: jest.fn(), | ||
createGetArticleReference: () => | ||
}); | ||
test("extractMailTemplateRef", async () => { | ||
const references = await extractEditorialContentTemplateRef(payload as any); | ||
Check warning on line 23 in targets/alert-cli/src/diff/dila/extractReferences/__tests__/editorialContents.test.ts GitHub Actions / Lint (alert-cli)
|
||
expect(references).toMatchInlineSnapshot(` | ||
[ | ||
{ |
import payload from "./mocks/mailTemplate.payload.json"; | ||
jest.mock("@shared/utils", () => { | ||
return { | ||
...jest.requireActual("@shared/utils"), | ||
gqlClient: jest.fn(), | ||
createGetArticleReference: () => |
server.setErrorConfig((app) => { | ||
// eslint-disable-next-line @typescript-eslint/no-unused-vars | ||
app.use( | ||
(err: Error, _req: Request, res: Response, _next: NextFunction) => { | ||
res.status(500).json({ errors: err.message }); | ||
} | ||
); |
], | ||
]; | ||
const duplicateSlugs = await getDuplicateSlugs(documents); | ||
expect(Object.entries(duplicateSlugs).length).toBe(0); | ||
}); | ||
test("should return an array of duplicated slug", async () => { | ||
], | ||
]; | ||
const duplicateSlugs = await getDuplicateSlugs(documents); | ||
expect(Object.entries(duplicateSlugs).length).toBe(1); | ||
}); | ||
}); | ||
describe("updateContributionsAndGetIDCCs", () => { | ||
test("should return a list of iddc", async () => { | ||
const contributions: any[] = [ | ||
{ idcc: "0292" }, | ||
{ idcc: "0829" }, | ||
{ idcc: "1557" }, | ||
{ idcc: "1909" }, | ||
]; | ||
const idccs = await getIDCCs(contributions); | ||
expect(Array.from(idccs)).toEqual([292, 829, 1557, 1909]); | ||
}); | ||
}); |
describe("getIDCCs", () => { | ||
it("returns unique IDCC values from old and new contributions", () => { | ||
const contribs: any = [{ idcc: "5678" }]; | ||
const idccs = getIDCCs(contribs); | ||
expect(idccs).toEqual(new Set([5678])); | ||
}); |
describe("getInfoMessage", () => { | ||
test("getInfoMessage returns expected message for ANSWER", () => { | ||
const data: any = { | ||
contentType: "ANSWER", | ||
}; | ||
const expected = | ||
"Les informations ci-dessous sont issues de l’analyse des règles prévues par votre convention collective de branche étendue et par le Code du travail."; | ||
expect(getInfoMessage(data)).toBe(expected); | ||
}); | ||
test("getInfoMessage returns expected message for CDT", () => { | ||
const data: any = { | ||
contentType: "CDT", | ||
}; | ||
}); | ||
describe(`should return undefined`, () => { | ||
it(`with a nonexistent article ID or CID`, async () => { | ||
const articles = Articles; | ||
expect(indexedArticle(articles, "KALIARTI123456789012")).toBeUndefined(); |
): Promise<AgreementArticleWithPath[] | undefined[]> { | ||
const agreement = await loadAgreement(agreementIdOrIdcc); | ||
const rootedArticles = flatFilter(agreement as any, { type: "article" }); | ||
Check warning on line 22 in targets/ingester/src/articles/kali/agreement-articles-with-path.ts GitHub Actions / Lint (ingester)
|
||
if (!rootedArticles) { | ||
return []; | ||
} | ||
const articles = await loadArticles(); | ||
return rootedArticles.children.map((article: AgreementArticle) => { | ||
const a = indexedArticle(articles, article.data.cid); | ||
if (!a) return; | ||
return { |
import unistUtilFlatFilter from "unist-util-flat-filter"; | ||
import parents from "unist-util-parents"; | ||
export type NormalizedArticle = { | ||
id: string; | ||
cid: string; | ||
label: string; | ||
const tree = await loadCodeDuTravail(); | ||
const code = parents(tree); | ||
const codeArticles = unistUtilFlatFilter(code as any, { | ||
Check warning on line 32 in targets/ingester/src/articles/legi/load-labour-code-articles.ts GitHub Actions / Lint (ingester)
|
||
type: "article", | ||
}); | ||
return codeArticles.children.map((codeArticle: CodeArticle) => | ||
convertCodeArticleToArticle(codeArticle) | ||
); | ||
}; |
console.timeEnd(` getDocuments ${pkgName}`); | ||
console.log(` ${pkgName}: ${documents.length} documents`); | ||
if (!args.dryRun && documents.length > 0) { | ||
await initDocAvailabity(documents[0].source); | ||
console.log( | ||
` ready to ingest ${documents.length} documents from ${pkgName}` | ||
); |
Answer, | ||
GenericContributionDoc, | ||
CustomizedContributionDoc, | ||
LegalRef, | ||
FicheServicePublicDoc, | ||
GenericAnswer, | ||
} from "@socialgouv/cdtn-types"; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Il est optional pour le cas des fiches SP ?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Je crois j'avais fait ça par simplicité pour éviter de modifier le type en le destructurant. Mais oui, ce champ n'existe pas en théorie. Mais ça demandait pas mal de refacto au niveau des types....