mirror of
https://github.com/foo-dogsquared/wiki.git
synced 2025-01-31 01:57:54 +00:00
Export to site with Next.js and GitHub Actions
This commit is contained in:
parent
74fc53bb6a
commit
fbe52394d5
34
.github/workflows/generate-site.yaml
vendored
Normal file
34
.github/workflows/generate-site.yaml
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# TODO:
|
||||||
|
# - Setup the structure correctly for site generation
|
||||||
|
# - Build the site
|
||||||
|
# - Export the site to GitHub pages
|
||||||
|
name: Generate site to GitHub pages
|
||||||
|
on: [push]
|
||||||
|
jobs:
|
||||||
|
generate-site:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: cachix/install-nix-action@v13
|
||||||
|
with:
|
||||||
|
nix_path: nixpkgs=channel:nixos-unstable
|
||||||
|
- uses: workflow/nix-shell-action@v1
|
||||||
|
with:
|
||||||
|
packages: nodejs,coreutils
|
||||||
|
script: |
|
||||||
|
mkdir -p site/public
|
||||||
|
mv *.org structured/ site/public
|
||||||
|
cd site
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
ls -la
|
||||||
|
- name: Deploy to GitHub Pages
|
||||||
|
if: success()
|
||||||
|
uses: crazy-max/ghaction-github-pages@v2
|
||||||
|
with:
|
||||||
|
jekyll: false
|
||||||
|
target_branch: gh-pages
|
||||||
|
build_dir: site/out
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.PAGES_TOKEN }}
|
||||||
|
|
11
README.adoc
11
README.adoc
@ -227,6 +227,17 @@ As a side effect, this mitigates against overwriting of generated assets from or
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
== Static site export
|
||||||
|
|
||||||
|
While the wiki is exclusively used with Emacs, there is an exported website with Next.js and link:https://github.com/rasendubi/uniorg/[uniorg] deployed using GitHub Actions (at link:./.github/workflows/[`./.github/workflows/`]).
|
||||||
|
The source code of the site is at link:./site/[`./site/`].
|
||||||
|
|
||||||
|
Here's the image summarizing the workflow.
|
||||||
|
|
||||||
|
image::assets/workflow.png[]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
== Future tasks
|
== Future tasks
|
||||||
|
|
||||||
This also means expect the following changes if you're watching this repo for some reason.
|
This also means expect the following changes if you're watching this repo for some reason.
|
||||||
|
BIN
assets/workflow.kra
Normal file
BIN
assets/workflow.kra
Normal file
Binary file not shown.
BIN
assets/workflow.png
Normal file
BIN
assets/workflow.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 312 KiB |
34
site/.gitignore
vendored
Normal file
34
site/.gitignore
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||||
|
|
||||||
|
# dependencies
|
||||||
|
/node_modules
|
||||||
|
/.pnp
|
||||||
|
.pnp.js
|
||||||
|
|
||||||
|
# testing
|
||||||
|
/coverage
|
||||||
|
|
||||||
|
# next.js
|
||||||
|
/.next/
|
||||||
|
/out/
|
||||||
|
|
||||||
|
# production
|
||||||
|
/build
|
||||||
|
|
||||||
|
# misc
|
||||||
|
.DS_Store
|
||||||
|
*.pem
|
||||||
|
|
||||||
|
# debug
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
|
||||||
|
# local env files
|
||||||
|
.env.local
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
|
||||||
|
# vercel
|
||||||
|
.vercel
|
5641
site/package-lock.json
generated
Normal file
5641
site/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
site/package.json
Normal file
28
site/package.json
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"name": "org-braindump",
|
||||||
|
"version": "0.3.0",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"dev": "next dev",
|
||||||
|
"build": "next build && next export",
|
||||||
|
"start": "next start"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"next": "10.0.5",
|
||||||
|
"orgast-util-visit-ids": "^0.3.0",
|
||||||
|
"react": "17.0.1",
|
||||||
|
"react-dom": "17.0.1",
|
||||||
|
"rehype-react": "^6.1.0",
|
||||||
|
"rehype-url-inspector": "^2.0.2",
|
||||||
|
"to-vfile": "^6.1.0",
|
||||||
|
"trough": "^1.0.5",
|
||||||
|
"unified": "^9.2.0",
|
||||||
|
"uniorg-extract-keywords": "^0.3.0",
|
||||||
|
"uniorg-parse": "^0.3.0",
|
||||||
|
"uniorg-rehype": "^0.3.0",
|
||||||
|
"uniorg-slug": "^0.3.0",
|
||||||
|
"vfile-find-down": "^5.0.1",
|
||||||
|
"vfile-rename": "^1.0.3",
|
||||||
|
"vfile-reporter": "^6.0.2"
|
||||||
|
}
|
||||||
|
}
|
12
site/src/components/Link.jsx
Normal file
12
site/src/components/Link.jsx
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import NextLink from 'next/link';
|
||||||
|
|
||||||
|
const MyLink = ({ href, ...props }) => {
|
||||||
|
return (
|
||||||
|
<NextLink href={href} passHref={true}>
|
||||||
|
<a href={href} {...props} />
|
||||||
|
</NextLink>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default MyLink;
|
24
site/src/components/Rehype.jsx
Normal file
24
site/src/components/Rehype.jsx
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
import unified from 'unified';
|
||||||
|
import rehype2react from 'rehype-react';
|
||||||
|
|
||||||
|
import Link from './Link';
|
||||||
|
|
||||||
|
// we use rehype-react to process hast and transform it to React
|
||||||
|
// component, which allows as replacing some of components with custom
|
||||||
|
// implementation. e.g., we can replace all <a> links to use
|
||||||
|
// `next/link`.
|
||||||
|
const processor = unified().use(rehype2react, {
|
||||||
|
createElement: React.createElement,
|
||||||
|
Fragment: React.Fragment,
|
||||||
|
components: {
|
||||||
|
a: Link,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const Rehype = ({ hast }) => {
|
||||||
|
return <>{processor.stringify(hast)}</>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Rehype;
|
113
site/src/lib/api.js
Normal file
113
site/src/lib/api.js
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
import * as path from 'path';
|
||||||
|
import trough from 'trough';
|
||||||
|
import toVFile from 'to-vfile';
|
||||||
|
import findDown from 'vfile-find-down';
|
||||||
|
import rename from 'vfile-rename';
|
||||||
|
import report from 'vfile-reporter';
|
||||||
|
|
||||||
|
import orgToHtml from './orgToHtml';
|
||||||
|
import resolveLinks from './resolveLinks';
|
||||||
|
|
||||||
|
// We serve posts from "public" directory, so that we don't have to
|
||||||
|
// copy assets.
|
||||||
|
//
|
||||||
|
// If you change this directory, make sure you copy all assets
|
||||||
|
// (images, linked files) to the public directory, so that next.js
|
||||||
|
// serves them.
|
||||||
|
const pagesDirectory = path.join(process.cwd(), 'public');
|
||||||
|
|
||||||
|
const processor = trough()
|
||||||
|
.use(collectFiles)
|
||||||
|
.use(processPosts)
|
||||||
|
.use(resolveLinks)
|
||||||
|
.use(populateBacklinks);
|
||||||
|
|
||||||
|
function collectFiles(root) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
findDown.all(
|
||||||
|
(f, stats) => stats.isFile() && f.basename.endsWith('.org'),
|
||||||
|
root,
|
||||||
|
(err, files) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
files.forEach((f) => {
|
||||||
|
const slug =
|
||||||
|
'/' + path.relative(root, f.path).replace(/\.org$/, '');
|
||||||
|
f.data.slug = slug;
|
||||||
|
});
|
||||||
|
resolve(files);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processPosts(files) {
|
||||||
|
return Promise.all(files.map(processPost));
|
||||||
|
|
||||||
|
async function processPost(file) {
|
||||||
|
try {
|
||||||
|
await toVFile.read(file, 'utf8');
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Error reading file', file, e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
|
||||||
|
rename(file, { path: file.data.slug });
|
||||||
|
|
||||||
|
await orgToHtml(file);
|
||||||
|
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign all collected backlinks to file. This function should be
|
||||||
|
// called after all pages have been processed---otherwise, it might
|
||||||
|
// miss backlinks.
|
||||||
|
function populateBacklinks(files) {
|
||||||
|
const backlinks = {};
|
||||||
|
files.forEach((file) => {
|
||||||
|
file.data.links = file.data.links || new Set();
|
||||||
|
file.data.backlinks = backlinks[file.data.slug] =
|
||||||
|
backlinks[file.data.slug] || new Set();
|
||||||
|
|
||||||
|
file.data.links.forEach((other) => {
|
||||||
|
backlinks[other] = backlinks[other] || new Set();
|
||||||
|
backlinks[other].add(file.data.slug);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const loadPosts = async () => {
|
||||||
|
const files = await new Promise((resolve, reject) =>
|
||||||
|
processor.run(pagesDirectory, (err, files) => {
|
||||||
|
console.error(report(err || files, { quiet: true }));
|
||||||
|
if (err) reject(err);
|
||||||
|
else resolve(files);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
const posts = Object.fromEntries(files.map((f) => [f.data.slug, f]));
|
||||||
|
return posts;
|
||||||
|
};
|
||||||
|
|
||||||
|
const allPosts = async () => {
|
||||||
|
const posts = await loadPosts();
|
||||||
|
return posts;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function getAllPaths() {
|
||||||
|
const posts = await loadPosts();
|
||||||
|
return Object.keys(posts);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPostBySlug(slug) {
|
||||||
|
const posts = await allPosts();
|
||||||
|
const post = await posts[slug];
|
||||||
|
return post;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAllPosts() {
|
||||||
|
const posts = await allPosts();
|
||||||
|
return await Promise.all(Object.values(posts));
|
||||||
|
}
|
59
site/src/lib/orgToHtml.js
Normal file
59
site/src/lib/orgToHtml.js
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import unified from 'unified';
|
||||||
|
|
||||||
|
import orgParse from 'uniorg-parse';
|
||||||
|
import org2rehype from 'uniorg-rehype';
|
||||||
|
import extractKeywords from 'uniorg-extract-keywords';
|
||||||
|
import { uniorgSlug } from 'uniorg-slug';
|
||||||
|
import { visitIds } from 'orgast-util-visit-ids';
|
||||||
|
|
||||||
|
const processor = unified()
|
||||||
|
.use(orgParse)
|
||||||
|
.use(extractKeywords)
|
||||||
|
.use(uniorgSlug)
|
||||||
|
.use(extractIds)
|
||||||
|
.use(org2rehype)
|
||||||
|
.use(toJson);
|
||||||
|
|
||||||
|
export default async function orgToHtml(file) {
|
||||||
|
try {
|
||||||
|
return await processor.process(file);
|
||||||
|
} catch (e) {
|
||||||
|
console.error('failed to process file', file.path, e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractIds() {
|
||||||
|
return transformer;
|
||||||
|
|
||||||
|
function transformer(tree, file) {
|
||||||
|
const data = file.data || (file.data = {});
|
||||||
|
// ids is a map: id => #anchor
|
||||||
|
const ids = data.ids || (data.ids = {});
|
||||||
|
|
||||||
|
visitIds(tree, (id, node) => {
|
||||||
|
if (node.type === 'org-data') {
|
||||||
|
ids[id] = '';
|
||||||
|
} else if (node.type === 'headline') {
|
||||||
|
if (!node.data?.hProperties?.id) {
|
||||||
|
// The headline doesn't have an html id assigned. (Did you
|
||||||
|
// remove uniorg-slug?)
|
||||||
|
//
|
||||||
|
// Assign an html id property based on org id property.
|
||||||
|
node.data = node.data || {};
|
||||||
|
node.data.hProperties = node.data.hProperties || {};
|
||||||
|
node.data.hProperties.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
ids[id] = '#' + node.data.hProperties.id;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** A primitive compiler to return node as is without stringifying. */
|
||||||
|
function toJson() {
|
||||||
|
this.Compiler = (node) => {
|
||||||
|
return node;
|
||||||
|
};
|
||||||
|
}
|
75
site/src/lib/resolveLinks.js
Normal file
75
site/src/lib/resolveLinks.js
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import unified from 'unified';
|
||||||
|
import inspectUrls from 'rehype-url-inspector';
|
||||||
|
|
||||||
|
export default function resolveLinks(files) {
|
||||||
|
// map from id -> { path, url }
|
||||||
|
const idMap = {};
|
||||||
|
files.forEach((file) => {
|
||||||
|
Object.entries(file.data.ids).forEach(([id, anchor]) => {
|
||||||
|
idMap[id] = { path: file.path, anchor };
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const processor = unified()
|
||||||
|
.use(fromJson)
|
||||||
|
.use(inspectUrls, { inspectEach: processUrl })
|
||||||
|
.use(toJson);
|
||||||
|
|
||||||
|
return Promise.all(files.map((file) => processor.process(file)));
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process each link to:
|
||||||
|
* 1. Resolve id links.
|
||||||
|
* 2. Convert relative file:// links to path used by
|
||||||
|
* blog. file://file.org -> /file.org
|
||||||
|
* 3. Collect all links to file.data.links, so they can be used later
|
||||||
|
* to calculate backlinks.
|
||||||
|
*/
|
||||||
|
function processUrl({ url: urlString, propertyName, node, file }) {
|
||||||
|
try {
|
||||||
|
// next/link does not handle relative urls properly. Use
|
||||||
|
// file.path (the slug of the file) to normalize link against.
|
||||||
|
let url = new URL(urlString, 'file://' + file.path);
|
||||||
|
|
||||||
|
// process id links
|
||||||
|
if (url.protocol === 'id:') {
|
||||||
|
const id = url.pathname;
|
||||||
|
const ref = idMap[id];
|
||||||
|
if (ref) {
|
||||||
|
url = new URL(`file://${ref.path}${ref.anchor}`);
|
||||||
|
} else {
|
||||||
|
console.warn(`${file.path}: Unresolved id link`, urlString);
|
||||||
|
}
|
||||||
|
// fallthrough. id links are re-processed as file links
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.protocol === 'file:') {
|
||||||
|
let href = url.pathname.replace(/\.org$/, '');
|
||||||
|
node.properties[propertyName] = href + url.hash;
|
||||||
|
|
||||||
|
file.data.links = file.data.links || [];
|
||||||
|
file.data.links.push(href);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// This can happen if org file contains an invalid string, that
|
||||||
|
// looks like URL string (e.g., "http://example.com:blah/"
|
||||||
|
// passes regexes, but fails to parse as URL).
|
||||||
|
console.warn(`${file.path}: Failed to process URL`, urlString, e);
|
||||||
|
// No re-throwing: the issue is not critical enough to stop
|
||||||
|
// processing. The document is still valid, it's just link that
|
||||||
|
// isn't.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function fromJson() {
|
||||||
|
this.Parser = (node, file) => {
|
||||||
|
return file.result || JSON.parse(node);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function toJson() {
|
||||||
|
this.Compiler = (node) => {
|
||||||
|
return node;
|
||||||
|
};
|
||||||
|
}
|
60
site/src/pages/[[...slug]].jsx
Normal file
60
site/src/pages/[[...slug]].jsx
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
import { join } from 'path';
|
||||||
|
import Head from 'next/head';
|
||||||
|
|
||||||
|
import { getAllPaths, getPostBySlug } from '../lib/api';
|
||||||
|
|
||||||
|
import Link from '../components/Link';
|
||||||
|
import Rehype from '../components/Rehype';
|
||||||
|
|
||||||
|
const Note = ({ title, hast, backlinks }) => {
|
||||||
|
return (
|
||||||
|
<main>
|
||||||
|
<Head>
|
||||||
|
<title>{title}</title>
|
||||||
|
</Head>
|
||||||
|
<h1>{title}</h1>
|
||||||
|
<Rehype hast={hast} />
|
||||||
|
{!!backlinks.length && (
|
||||||
|
<section>
|
||||||
|
<h2>{'Backlinks'}</h2>
|
||||||
|
<ul>
|
||||||
|
{backlinks.map((b) => (
|
||||||
|
<li key={b.path}>
|
||||||
|
<Link href={b.path}>{b.title}</Link>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</section>
|
||||||
|
)}
|
||||||
|
</main>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export default Note;
|
||||||
|
|
||||||
|
export const getStaticPaths = async () => {
|
||||||
|
const paths = await getAllPaths();
|
||||||
|
// add '/' which is synonymous to '/index'
|
||||||
|
paths.push('/');
|
||||||
|
|
||||||
|
return {
|
||||||
|
paths,
|
||||||
|
fallback: false,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getStaticProps = async ({ params }) => {
|
||||||
|
const path = '/' + join(...(params.slug || ['index']));
|
||||||
|
const post = await getPostBySlug(path);
|
||||||
|
const data = post.data;
|
||||||
|
const backlinks = await Promise.all([...data.backlinks].map(getPostBySlug));
|
||||||
|
return {
|
||||||
|
props: {
|
||||||
|
title: data.title || post.basename,
|
||||||
|
hast: post.result,
|
||||||
|
backlinks: backlinks.map((b) => ({
|
||||||
|
path: b.path,
|
||||||
|
title: b.data.title || b.basename,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
7
site/src/pages/_app.js
Normal file
7
site/src/pages/_app.js
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import '../styles/globals.css';
|
||||||
|
|
||||||
|
function MyApp({ Component, pageProps }) {
|
||||||
|
return <Component {...pageProps} />;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default MyApp;
|
33
site/src/pages/archive.jsx
Normal file
33
site/src/pages/archive.jsx
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import Head from 'next/head';
|
||||||
|
|
||||||
|
import { getAllPosts } from '../lib/api';
|
||||||
|
import Link from '../components/Link';
|
||||||
|
|
||||||
|
const Archive = ({ posts }) => {
|
||||||
|
return (
|
||||||
|
<main>
|
||||||
|
<Head>
|
||||||
|
<title>{'Archive'}</title>
|
||||||
|
</Head>
|
||||||
|
<h1>{'Archive'}</h1>
|
||||||
|
<ul>
|
||||||
|
{posts.map((p) => (
|
||||||
|
<li key={p.path}>
|
||||||
|
<Link href={p.path}>{p.title}</Link>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</main>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export default Archive;
|
||||||
|
|
||||||
|
export const getStaticProps = async () => {
|
||||||
|
const allPosts = await getAllPosts();
|
||||||
|
const posts = allPosts
|
||||||
|
.map((p) => ({ title: p.data.title || p.basename, path: p.path }))
|
||||||
|
.sort((a, b) => {
|
||||||
|
return a.title.toLowerCase() < b.title.toLowerCase() ? -1 : 1;
|
||||||
|
});
|
||||||
|
return { props: { posts } };
|
||||||
|
};
|
34
site/src/styles/globals.css
Normal file
34
site/src/styles/globals.css
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
html,
|
||||||
|
body {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen,
|
||||||
|
Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif;
|
||||||
|
|
||||||
|
line-height: 1.5;
|
||||||
|
word-wrap: break-word;
|
||||||
|
overflow-wrap: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
#__next {
|
||||||
|
margin: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
main {
|
||||||
|
max-width: 800px;
|
||||||
|
margin: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
background-color: #eee;
|
||||||
|
padding: 4px;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
code {
|
||||||
|
background-color: #eee;
|
||||||
|
|
||||||
|
}
|
5
site/vercel.json
Normal file
5
site/vercel.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"github": {
|
||||||
|
"silent": true
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user