From eb42bef231567fd7708e8e0f96bd653038a1bbd9 Mon Sep 17 00:00:00 2001 From: Grzegorz Godlewski Date: Thu, 4 Jul 2024 18:30:35 +0200 Subject: [PATCH] Make hash links nicer --- src/LinkTranslator.ts | 4 +-- src/containers/server/ServerContainer.ts | 2 +- .../transform/TaskGoogleMarkdownTransform.ts | 2 +- .../transform/TaskLocalFileTransform.ts | 10 +++++- .../transform/TransformContainer.ts | 21 +++++++---- src/odt/OdtToMarkdown.ts | 10 ++++-- src/odt/executeOdtToMarkdown.ts | 4 ++- src/odt/postprocess/postProcess.ts | 4 ++- src/odt/postprocess/rewriteHeaders.ts | 28 +++++++++++++-- test/git/RebaseTest.ts | 2 -- test/odt_md/bullets.md | 6 ++-- test/odt_md/confluence.md | 14 ++++---- test/odt_md/example-document.md | 36 +++++++++---------- test/odt_md/issue-432.md | 2 +- test/odt_md/issue-435-436.md | 2 +- test/odt_md/pre-mie.md | 4 +-- test/odt_md/project-overview.md | 22 ++++++------ test/odt_md/strong-headers.md | 8 ++--- 18 files changed, 113 insertions(+), 68 deletions(-) diff --git a/src/LinkTranslator.ts b/src/LinkTranslator.ts index 310ce343..58029ee3 100644 --- a/src/LinkTranslator.ts +++ b/src/LinkTranslator.ts @@ -40,7 +40,7 @@ export function convertExtension(localPath: string, mode?: LinkMode) { return dirName + parts.join('.'); } -export function convertToRelativeMarkDownPath(localPath, basePath) { +export function convertToRelativeMarkDownPath(localPath: string, basePath: string) { if (localPath.startsWith('https://')) return localPath; if (localPath.startsWith('http://')) return localPath; if (basePath === localPath) return '.'; @@ -51,7 +51,7 @@ export function convertToRelativeMarkDownPath(localPath, basePath) { }))); } -export function convertToRelativeSvgPath(localPath, basePath) { +export function convertToRelativeSvgPath(localPath: string, basePath: string) { if (localPath.startsWith('https://')) return localPath; if (localPath.startsWith('http://')) return localPath; if (basePath === localPath) return '.'; diff --git a/src/containers/server/ServerContainer.ts b/src/containers/server/ServerContainer.ts index 7d59d26a..4f9df7f7 100644 --- a/src/containers/server/ServerContainer.ts +++ b/src/containers/server/ServerContainer.ts @@ -44,7 +44,7 @@ import {GoogleTreeProcessor} from '../google_folder/GoogleTreeProcessor.ts'; import {initStaticDistPages} from './static.ts'; import {initUiServer} from './vuejs.ts'; import {initErrorHandler} from './error.ts'; -import {WebHookController} from './routes/WebHookController.js'; +import {WebHookController} from './routes/WebHookController.ts'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); diff --git a/src/containers/transform/TaskGoogleMarkdownTransform.ts b/src/containers/transform/TaskGoogleMarkdownTransform.ts index 512032e8..37d2ca82 100644 --- a/src/containers/transform/TaskGoogleMarkdownTransform.ts +++ b/src/containers/transform/TaskGoogleMarkdownTransform.ts @@ -12,7 +12,7 @@ import {SvgTransform} from '../../SvgTransform.ts'; import {generateDocumentFrontMatter} from './frontmatters/generateDocumentFrontMatter.ts'; import {generateConflictMarkdown} from './frontmatters/generateConflictMarkdown.ts'; import {googleMimeToExt} from './TaskLocalFileTransform.ts'; -import {getUrlHash, urlToFolderId} from '../../utils/idParsers.js'; +import {getUrlHash, urlToFolderId} from '../../utils/idParsers.ts'; export class TaskGoogleMarkdownTransform extends QueueTask { constructor(protected logger: winston.Logger, diff --git a/src/containers/transform/TaskLocalFileTransform.ts b/src/containers/transform/TaskLocalFileTransform.ts index 23214da0..59309c3c 100644 --- a/src/containers/transform/TaskLocalFileTransform.ts +++ b/src/containers/transform/TaskLocalFileTransform.ts @@ -49,7 +49,8 @@ export class TaskLocalFileTransform extends QueueTask { private destinationDirectory: FileContentService, private localFile: LocalFile, private localLinks: LocalLinks, - private userConfig: UserConfig + private userConfig: UserConfig, + private globalHeadersMap: {[key: string]: string}, ) { super(logger); this.retries = 0; @@ -128,6 +129,7 @@ export class TaskLocalFileTransform extends QueueTask { async generateDocument(localFile: MdFile) { let frontMatter; let markdown; + let headersMap = {}; let links = []; let errors = []; @@ -163,6 +165,7 @@ export class TaskLocalFileTransform extends QueueTask { } else { converter.setPicturesDir('../' + this.realFileName.replace(/.md$/, '.assets/'), picturesDirAbsolute); } + headersMap = converter.getHeadersMap(); markdown = await converter.convert(); links = Array.from(converter.links); frontMatter = generateDocumentFrontMatter(localFile, links, this.userConfig.fm_without_version); @@ -174,6 +177,7 @@ export class TaskLocalFileTransform extends QueueTask { frontMatter: string; markdown: string; errors: Array; + headersMap: {[key: string]: string}; } const workerResult: WorkerResult = await this.jobManagerContainer.scheduleWorker('OdtToMarkdown', { @@ -190,6 +194,7 @@ export class TaskLocalFileTransform extends QueueTask { frontMatter = workerResult.frontMatter; markdown = workerResult.markdown; errors = workerResult.errors; + headersMap = workerResult.headersMap; this.warnings = errors.length; } @@ -202,6 +207,9 @@ export class TaskLocalFileTransform extends QueueTask { await this.destinationDirectory.writeFile(this.realFileName, frontMatter + markdown); this.localLinks.append(localFile.id, localFile.fileName, links); + for (const k in headersMap) { + this.globalHeadersMap['gdoc:' + localFile.id + k] = 'gdoc:' + localFile.id + headersMap[k]; + } } async generate(localFile: LocalFile): Promise { diff --git a/src/containers/transform/TransformContainer.ts b/src/containers/transform/TransformContainer.ts index b9655166..16606699 100644 --- a/src/containers/transform/TransformContainer.ts +++ b/src/containers/transform/TransformContainer.ts @@ -9,7 +9,6 @@ import {GoogleFilesScanner} from './GoogleFilesScanner.ts'; import {convertToRelativeMarkDownPath, convertToRelativeSvgPath} from '../../LinkTranslator.ts'; import {LocalFilesGenerator} from './LocalFilesGenerator.ts'; import {QueueTransformer} from './QueueTransformer.ts'; -import {NavigationHierarchy} from './generateNavigationHierarchy.ts'; import {ConflictFile, LocalFile, RedirFile} from '../../model/LocalFile.ts'; import {TaskLocalFileTransform} from './TaskLocalFileTransform.ts'; import {MimeTypes} from '../../model/GoogleFile.ts'; @@ -200,7 +199,6 @@ export class TransformLog extends Transport { export class TransformContainer extends Container { private logger: winston.Logger; private generatedFileService: FileContentService; - private hierarchy: NavigationHierarchy = {}; private localLog: LocalLog; private localLinks: LocalLinks; private filterFilesIds: FileId[]; @@ -210,6 +208,7 @@ export class TransformContainer extends Container { private transformLog: TransformLog; private isFailed = false; private useGoogleMarkdowns = false; + private globalHeadersMap: {[key: string]: string} = {}; constructor(public readonly params: ContainerConfig, public readonly paramsArr: ContainerConfigArr = {}) { super(params, paramsArr); @@ -299,7 +298,8 @@ export class TransformContainer extends Container { destinationDirectory, localFile, this.localLinks, - this.userConfigService.config + this.userConfigService.config, + this.globalHeadersMap ); queueTransformer.addTask(task); } else { @@ -443,15 +443,22 @@ export class TransformContainer extends Container { if (fileName.endsWith('.md') || fileName.endsWith('.svg')) { const content = await destinationDirectory.readFile(fileName); - const newContent = content.replace(/(gdoc:[A-Z0-9_-]+)/ig, (str: string) => { - const fileId = str.substring('gdoc:'.length).replace(/#.*/, ''); - const hash = getUrlHash(str); + const newContent = content.replace(/(gdoc:[A-Z0-9_-]+)(#[^'")\s]*)?/ig, (str: string) => { + let fileId = str.substring('gdoc:'.length).replace(/#.*/, ''); + let hash = getUrlHash(str) || ''; + if (hash && this.globalHeadersMap[str]) { + const idx = this.globalHeadersMap[str].indexOf('#'); + if (idx >= 0) { + fileId = this.globalHeadersMap[str].substring('gdoc:'.length, idx); + hash = this.globalHeadersMap[str].substring(idx); + } + } const lastLog = this.localLog.findLastFile(fileId); if (lastLog && lastLog.event !== 'removed') { if (fileName.endsWith('.svg')) { return convertToRelativeSvgPath(lastLog.filePath, destinationDirectory.getVirtualPath() + fileName); } else { - return convertToRelativeMarkDownPath(lastLog.filePath, destinationDirectory.getVirtualPath() + fileName); + return convertToRelativeMarkDownPath(lastLog.filePath, destinationDirectory.getVirtualPath() + fileName) + hash; } } else { return 'https://drive.google.com/open?id=' + fileId + hash.replace('#_', '#heading=h.'); diff --git a/src/odt/OdtToMarkdown.ts b/src/odt/OdtToMarkdown.ts index 944bd150..80306c2d 100644 --- a/src/odt/OdtToMarkdown.ts +++ b/src/odt/OdtToMarkdown.ts @@ -68,6 +68,7 @@ export class OdtToMarkdown { private picturesDir = ''; private picturesDirAbsolute = ''; private rewriteRules: RewriteRule[] = []; + private headersMap: { [p: string]: string } = {}; constructor(private document: DocumentContent, private documentStyles: DocumentStyles, private fileNameMap: StringToStringMap = {}, private xmlMap: StringToStringMap = {}) { } @@ -131,7 +132,8 @@ export class OdtToMarkdown { // text = this.processMacros(text); // text = this.fixBlockMacros(text); - await postProcess(this.chunks, this.rewriteRules); + const { headersMap } = await postProcess(this.chunks, this.rewriteRules); + this.headersMap = headersMap; const markdown = this.chunks.toString(); return this.trimBreaks(markdown); @@ -223,7 +225,7 @@ export class OdtToMarkdown { addLink(href: string) { if (href && !href.startsWith('#') && href.indexOf(':') > -1) { - this.links.add(href); + this.links.add(href.replace(/#.*$/, '')); } } @@ -701,4 +703,8 @@ export class OdtToMarkdown { this.errors.push(error); } + getHeadersMap() { + return this.headersMap; + } + } diff --git a/src/odt/executeOdtToMarkdown.ts b/src/odt/executeOdtToMarkdown.ts index 2d6c71c4..de1d0348 100644 --- a/src/odt/executeOdtToMarkdown.ts +++ b/src/odt/executeOdtToMarkdown.ts @@ -42,5 +42,7 @@ export async function executeOdtToMarkdown(workerData) { fs.writeFileSync(path.join(workerData.destinationPath, workerData.realFileName.replace(/.md$/, '.debug.xml')), markdown); } - return { links, frontMatter, markdown, errors }; + const headersMap = converter.getHeadersMap(); + + return { links, frontMatter, markdown, errors, headersMap }; } diff --git a/src/odt/postprocess/postProcess.ts b/src/odt/postprocess/postProcess.ts index defd6d68..cd125669 100644 --- a/src/odt/postprocess/postProcess.ts +++ b/src/odt/postprocess/postProcess.ts @@ -40,7 +40,7 @@ export async function postProcess(chunks: MarkdownNodes, rewriteRules: RewriteRu convertMathMl(chunks); trimParagraphs(chunks); - await rewriteHeaders(chunks); + const { headersMap} = await rewriteHeaders(chunks); trimParagraphs(chunks); addEmptyLinesAfterParas(chunks); removeTdParas(chunks); // Requires: addEmptyLinesAfterParas @@ -67,4 +67,6 @@ export async function postProcess(chunks: MarkdownNodes, rewriteRules: RewriteRu if (process.env.DEBUG_COLORS) { dump(chunks.body); } + + return { headersMap }; } diff --git a/src/odt/postprocess/rewriteHeaders.ts b/src/odt/postprocess/rewriteHeaders.ts index c24ad8f5..12900974 100644 --- a/src/odt/postprocess/rewriteHeaders.ts +++ b/src/odt/postprocess/rewriteHeaders.ts @@ -1,7 +1,10 @@ -import {walkRecursiveAsync} from '../markdownNodesUtils.ts'; +import slugify from 'slugify'; +import {extractText, walkRecursiveAsync, walkRecursiveSync} from '../markdownNodesUtils.ts'; import {MarkdownNodes, MarkdownTextNode} from '../MarkdownNodes.ts'; -export async function rewriteHeaders(markdownChunks: MarkdownNodes) { +export async function rewriteHeaders(markdownChunks: MarkdownNodes): Promise<{ headersMap: {[key: string]: string} }> { + const headersMap = {}; + let inPre = false; await walkRecursiveAsync(markdownChunks.body, async (chunk, ctx: { nodeIdx: number }) => { if (chunk.isTag && 'PRE' === chunk.tag) { @@ -21,6 +24,9 @@ export async function rewriteHeaders(markdownChunks: MarkdownNodes) { } if (chunk.isTag && ['H1', 'H2', 'H3', 'H4'].includes(chunk.tag)) { + const innerTxt = extractText(chunk); + const slug = slugify(innerTxt.trim(), { replacement: '-', lower: true, remove: /[#*+~.,^()'"!:@]/g }); + if (chunk.children.length === 1) { const child = chunk.children[0]; if (child.isTag && child.tag === 'BOOKMARK/') { @@ -32,7 +38,11 @@ export async function rewriteHeaders(markdownChunks: MarkdownNodes) { const child = chunk.children[j]; if (child.isTag && child.tag === 'BOOKMARK/') { const toMove = chunk.children.splice(j, 1); - chunk.children.splice(chunk.children.length, 0, ...toMove); + if (slug && !headersMap['#' + child.payload.id]) { + headersMap['#' + child.payload.id] = '#' + slug; + } else { + chunk.children.splice(chunk.children.length, 0, ...toMove); + } break; } } @@ -57,4 +67,16 @@ export async function rewriteHeaders(markdownChunks: MarkdownNodes) { return { nodeIdx: ctx.nodeIdx + 1 }; } }); + + if (Object.keys(headersMap).length > 0) { + walkRecursiveSync(markdownChunks.body, (chunk) => { + if (chunk.isTag === true && chunk.payload?.href) { + if (headersMap[chunk.payload.href]) { + chunk.payload.href = headersMap[chunk.payload.href]; + } + } + }); + } + + return { headersMap }; } diff --git a/test/git/RebaseTest.ts b/test/git/RebaseTest.ts index b83a5d0d..a09e79b4 100644 --- a/test/git/RebaseTest.ts +++ b/test/git/RebaseTest.ts @@ -382,8 +382,6 @@ describe('RebaseTest', function () { { const history = await scannerLocal.history(''); - console.log('historyhistory', history); - assert.equal(2, history.length); assert.equal('Change on second repo', history[0].message); diff --git a/test/odt_md/bullets.md b/test/odt_md/bullets.md index 7e3bc650..fa4a618a 100644 --- a/test/odt_md/bullets.md +++ b/test/odt_md/bullets.md @@ -1,4 +1,4 @@ -## Bullet List +## Bullet List * Capability to print standard leave forms from the patient portal * Pre-loaded standard email templates @@ -6,7 +6,7 @@ * Quick View Absence Management Worklist * Leave Types Report -## Ordered List +## Ordered List 1. Capability to print standard leave forms from the patient portal 2. Pre-loaded standard email templates @@ -14,7 +14,7 @@ 4. Quick View Absence Management Worklist 5. Leave Types Report -## Mixed Lists +## Mixed Lists Item before list diff --git a/test/odt_md/confluence.md b/test/odt_md/confluence.md index d9a81fa6..22e93b1f 100644 --- a/test/odt_md/confluence.md +++ b/test/odt_md/confluence.md @@ -1,14 +1,14 @@ -# Confluence to Google Docs Conversion Notes +# Confluence to Google Docs Conversion Notes -## Goal +## Goal Convert Confluence Documents in to Google Documents for the purpose of using WikiGDrive to publish them. -## Delivery +## Delivery A new github repo with a node.js script specific to this conversion. -## High level Process +## High level Process * Scan all of the documents in a Confluence Space * Make google documents in a shared drive (two passes will be required so links between documents can be known as content is added). @@ -30,13 +30,13 @@ A new github repo with a node.js script specific to this conversion. * Embedded Video should be converted to an image with a hyperlink * Formatting is not required to be converted. -## Proposed Instructions +## Proposed Instructions ``` confluence2google ``` -## Links and Possible Approaches +## Links and Possible Approaches 1. Use REST API 1. [Confluence Cloud REST API](https://developer.atlassian.com/cloud/confluence/rest/) @@ -52,7 +52,7 @@ confluence2google -## Examples +## Examples Simple - [https://confluence.example.com/display/DOCS/Sample](https://confluence.example.com/display/DOCS/Sample) diff --git a/test/odt_md/example-document.md b/test/odt_md/example-document.md index d2bd4180..8e7a0d81 100644 --- a/test/odt_md/example-document.md +++ b/test/odt_md/example-document.md @@ -7,13 +7,13 @@ * [Image](#image) * [Preformatted Text](#preformatted-text) -# Heading 1 +# Heading 1 -## Heading level 2 +## Heading level 2 Some normal text with hyperlinks to a [website](https://www.enterprisehealth.com/) and a link to a document on the [shared drive](gdoc:1H6vwfQXIexdg4ldfaoPUjhOZPnSkNn6h29WD6Fi-SBY) with multiple versions of [the link](gdoc:1H6vwfQXIexdg4ldfaoPUjhOZPnSkNn6h29WD6Fi-SBY) because people cut and paste. [Link to test page](gdoc:1iou0QW09pdUhaNtS1RfjJh12lxKAbbq91-SHGihXu_4). Link to [doc in another folder](gdoc:1G4xwfBdH5mvEQyGN16TD2vFUHP8aNgU7wPst-2QTZug). -### Heading level 3 - with a table +### Heading level 3 - with a table @@ -43,13 +43,13 @@ After subtable
-### Heading 3 - a diagram with links +### Heading 3 - a diagram with links [Diagram](gdoc:1Du-DYDST4liLykJl0fHSCvuQYIYhtOfwco-ntn38Dy8) [Diagram](gdoc:1Du-DYDST4liLykJl0fHSCvuQYIYhtOfwco-ntn38Dy8) -### Heading 3 - with a Table of contents +### Heading 3 - with a Table of contents * [Heading 1](#heading-1) * [Heading level 2](#heading-level-2) @@ -60,15 +60,15 @@ After subtable * [Image](#image) * [Preformatted Text](#preformatted-text) -# Other examples +# Other examples -## Image +## Image ![](1000000000000640000001CF60FB0243CA95EC14.jpg) ![](10000000000003F0000003F092F85671239C65F9.jpg) -## Preformatted Text +## Preformatted Text ``` This is monospaced text. This should line up | @@ -76,11 +76,11 @@ This is monospaced text. This should line up | ``` -## Code +## Code Code blocks are part of the Markdown spec, but syntax highlighting isn't. However, many renderers -- like Github's and *Markdown Here* -- support syntax highlighting. Which languages are supported and how those language names should be written will vary from renderer to renderer. *Markdown Here* supports highlighting for dozens of languages (and not-really-languages, like diffs and HTTP headers); to see the complete list, and how to write the language names, see the [highlight.js demo page](http://softwaremaniacs.org/media/soft/highlight/test.html). -### Typescript / Javascript +### Typescript / Javascript {{markdown}} ```javascript @@ -102,13 +102,13 @@ myArray.forEach(() => { }); // fat arrow syntax ``` {{/markdown}} -## Video +## Video From Youtube: [Google Drive, Docs, and Project Management with GSuite](https://www.youtube.com/watch?v=v6QAIWLCz8I&t=1743s) -## Horizontal Lines +## Horizontal Lines This is some text separated by a horizontal line @@ -116,7 +116,7 @@ ___ This is after the horizontal line. -## Lists +## Lists * Bullet 1 * Bullet 2 @@ -130,13 +130,13 @@ This is after the horizontal line. 2. Alpha 2 3. Alpha 3 -## Formatting +## Formatting Some **bold** **_boldanditalic_*** italic* text -## Equations +## Equations -### Using the actual equation object +### Using the actual equation object ```math E = m c^{2} @@ -146,13 +146,13 @@ E = m c^{2} e^{i \pi} - 1 = 0 ``` -### Text equivalent +### Text equivalent *E=mc**2* Inline $$E = m c^{2}$$ math -## Footnotes +## Footnotes 1Footnotes should display as a footnote, and should always display at the very end of the document (page)**?** This is some sample text with a footnote. diff --git a/test/odt_md/issue-432.md b/test/odt_md/issue-432.md index a112605e..a1117e81 100644 --- a/test/odt_md/issue-432.md +++ b/test/odt_md/issue-432.md @@ -1,4 +1,4 @@ -#### Test Methodology +#### Test Methodology MIE will report a count of messages for each supported message type: diff --git a/test/odt_md/issue-435-436.md b/test/odt_md/issue-435-436.md index 44892fd8..2488b631 100644 --- a/test/odt_md/issue-435-436.md +++ b/test/odt_md/issue-435-436.md @@ -1,4 +1,4 @@ -## Editing Existing Questions +## Editing Existing Questions Similar to adding a new question, users must have the proper permission to modify existing questions. diff --git a/test/odt_md/pre-mie.md b/test/odt_md/pre-mie.md index ef21edd0..5b72a4e0 100644 --- a/test/odt_md/pre-mie.md +++ b/test/odt_md/pre-mie.md @@ -1,4 +1,4 @@ -## Request +## Request {{% pre language="html" theme="RDark" %}} ``` @@ -8,7 +8,7 @@ https://webchartnow.com/fhirr4sandbox/webchart.cgi/fhir/CarePlan/11 ``` {{% /pre %}} -## Response +## Response {{% pre language="json" theme="RDark" %}} ``` diff --git a/test/odt_md/project-overview.md b/test/odt_md/project-overview.md index afa7c95b..f63d7fbe 100644 --- a/test/odt_md/project-overview.md +++ b/test/odt_md/project-overview.md @@ -12,7 +12,7 @@ * [Images](#images) * [FAQ](#faq) -# Wiki G Drive Project Overview +# Wiki G Drive Project Overview * [Wiki G Drive Project Overview](#wiki-g-drive-project-overview) * [Overview](#overview) @@ -28,7 +28,7 @@ * [Images](#images) * [FAQ](#faq) -## Overview +## Overview WikiGDrive is a node app that uses the [Google Drive API](https://developers.google.com/drive/api/v3/quickstart/nodejs) to transform Google Docs and Drawings into markdown. @@ -60,7 +60,7 @@ WikiGDrive GitHub -## Requirements +## Requirements The app must: @@ -84,7 +84,7 @@ Later phase: * Google sheets to CSV with MIE's datavis * Markdown -> Google Docs converter -## Instructions (proposed) +## Instructions (proposed) npm install wikigdrive @@ -106,7 +106,7 @@ Options: wikigdrive keeps a local JSON config file in the dest directory with state from prior runs. The config contains a map of URL driveIds to the local filenames along with metadata about each file. -## Renames and Redirecting +## Renames and Redirecting When a Document is renamed or moved in the shared drive the driveId says the same, but its place in the filesystem changes. For example a document named "Carbon" would be created as Carbon.md. Sometime later its renamed to "Carbon Fiber" then a new file "Carbon Fiber.md" would be made with the content and the old "Carbon.md" is changed to: @@ -141,7 +141,7 @@ Then sometime later, "Example 1" is renamed to "Sample 1" the folder layout shou * Example-1.md -> /Container/Sample-1.md * Example-2.md -> /Container/Example-2.md -## Collisions with Filenames +## Collisions with Filenames Google Drive allows filenames with the same name to be created on shared drives. When transforming them into the local filesystem, each file will be renamed to a new file and a disambiguation page will be placed in their place. Eg: @@ -163,11 +163,11 @@ The contents of Carbon.md would show each of the conflicting references: -## Table of Contents and Index +## Table of Contents and Index In the root of the local filesystem two files will be created: the toc.md and index.md -### Table of Contents +### Table of Contents The table of contents is a layout of the documents and their position in the drive as an unordered list. It should not contain redirected files, images, etc. @@ -175,7 +175,7 @@ The table of contents is a layout of the documents and their position in the dri The index is a listing of all of the defined terms and their references in the documents. The processing may be passed to another tool to construct the index. Examples: [kramdown](https://meta.stackexchange.com/questions/72395/is-it-possible-to-have-definition-lists-in-markdown), [Asciidoctor](https://asciidoctor.org/docs/user-manual/) -## Markdown Cleanup +## Markdown Cleanup * Bold headings: ([issue](https://github.com/mieweb/wikiGDrive/issues/17)) Remove the ** bold markdown from all headings. ![](10000201000001A5000000492C856905A808045C.png) @@ -185,13 +185,13 @@ The index is a listing of all of the defined terms and their references in the d ![](1000020100000243000000F28AB7617254FDBB3A.png) -## Images +## Images Two kinds of images exist within Google Docs: 1) Embedded images stored within the document and 2) images that are referenced to another "Drawing" on the google drive. WikiGDrive processes images by placing them in a folder named with a similar name to the page. (eg: index.md would result in a folder index.images with each embedded image in that folder). If you make a drawing somewhere in the google drive folder and link it in the google document (WITH A HYPERLINK b/c Google does not expose the internal link via the api) then WikiGDrive will process the drawing as a SVG and place a proper reference to the SVG in the markdown. -## FAQ +## FAQ * What is the purpose of this tool? * To enable collaborative editing of documentation and the ability to publish that documentation as well as linking it to revision control system branches (like in git) diff --git a/test/odt_md/strong-headers.md b/test/odt_md/strong-headers.md index b01843ec..b21393a9 100644 --- a/test/odt_md/strong-headers.md +++ b/test/odt_md/strong-headers.md @@ -1,4 +1,4 @@ -## Data Migration Workflow Considerations +## Data Migration Workflow Considerations As we may recall, the Health Surveillance module allows users to easily track and manage overall health for risk groups and patient populations. Following that is information on how to utilize the import/export tools available with every {{% system-name %}} system. @@ -9,7 +9,7 @@ Health Surveillance (HS) Data is typically broken down into 4 parts during the d 1. Active HS Memberships and Next Due Dates 2. Historical HS Memberships -#### Active HS Memberships and Next Due Dates +#### Active HS Memberships and Next Due Dates Nearly every {{% sys-name %}} data migration involves the migration of active HS memberships and Next Due Dates. Membership inclusions and exclusions may be explicit or implicit depending on the configuration of each panel, and those decisions will all weigh into the scoping of the data migration. @@ -20,11 +20,11 @@ Some clients require the migration of historical, or non-active HS memberships. 1. Discrete data migration of Historical HS Memberships involves migrating memberships with a Begin and End Date for each employee's expired panel. Discrete migrations such as these require mapping each legacy panel membership to {{% sys-name %}} panels, which will then allow for the reporting of historical panel memberships and dates, per employee. 2. Non-Discrete summary documents of historical HS memberships provide a single document showing the legacy name of the panel for each chart with HS memberships, along with the start- and end-dates where that data is available. These documents are significantly less effort than discrete data migrations of historical HS memberships, and allow clinicians to reference the historical entry and exit data of an employee's memberships. -#### Open Orders +#### Open Orders In some cases, open or pending orders are required for migration for employees with overdue but active panel memberships. This use case is discussed further in the section on [How to determine the Next Due Date on a HS Panel](#how-to-determine-the-next-due-date-on-a-hs-panel). -#### Historical Orders +#### Historical Orders In some cases, migration of completed orders are required to show that tests or tasks were completed on a particular date. Like any other data migration, discrete migration of historical orders involves more mapping and more effort than creating a summary document of historical orders. The following use cases are most common where the migration of historical orders are needed: