mirror of
https://github.com/foo-dogsquared/asciidoctor-foodogsquared-extensions.git
synced 2025-01-30 22:57:56 +00:00
Update according to Rubocop linter
This commit is contained in:
parent
3f450e2197
commit
dca17c64e7
@ -1,3 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'asciidoctor'
|
||||
require 'asciidoctor/extensions'
|
||||
require_relative 'man-inline-macro/extension'
|
||||
|
@ -1,3 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'uri'
|
||||
|
||||
class GitHubLinkInlineMacro < Asciidoctor::Extensions::InlineMacroProcessor
|
||||
@ -6,7 +8,7 @@ class GitHubLinkInlineMacro < Asciidoctor::Extensions::InlineMacroProcessor
|
||||
named :github
|
||||
name_positional_attributes 'caption'
|
||||
|
||||
def process parent, target, attrs
|
||||
def process(parent, target, attrs)
|
||||
doc = parent.document
|
||||
|
||||
text = attrs['caption'] || target
|
||||
|
@ -1,14 +1,16 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'base64'
|
||||
require 'json'
|
||||
require 'open-uri'
|
||||
require 'uri'
|
||||
|
||||
class GitHubRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
def handles? target
|
||||
def handles?(target)
|
||||
target.start_with? 'github:'
|
||||
end
|
||||
|
||||
def warn_or_raise doc, warning
|
||||
def warn_or_raise(doc, warning)
|
||||
if (doc.safe > Asciidoctor::SafeMode::SERVER) && !(doc.attr? 'allow-uri-read')
|
||||
raise warning
|
||||
else
|
||||
@ -16,7 +18,7 @@ class GitHubRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
end
|
||||
end
|
||||
|
||||
def process doc, reader, target, attrs
|
||||
def process(doc, reader, target, attrs)
|
||||
src = target.delete_prefix('github:').split('/', 3)
|
||||
owner = src.at 0
|
||||
repo = src.at 1
|
||||
@ -28,14 +30,14 @@ class GitHubRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
uri = URI.parse %(https://api.github.com/repos/#{owner}/#{repo}/contents/#{path})
|
||||
|
||||
if attrs['rev']
|
||||
query = { :ref => attrs['rev'] }
|
||||
query = { ref: attrs['rev'] }
|
||||
uri.query = URI.encode_www_form query
|
||||
end
|
||||
|
||||
begin
|
||||
headers = {
|
||||
'Header' => 'application/vnd.github+json',
|
||||
'X-GitHub-Api-Version' => '2022-11-28',
|
||||
'X-GitHub-Api-Version' => '2022-11-28'
|
||||
}
|
||||
|
||||
headers['Authorization'] = "Token #{ENV['GITHUB_API_BEARER_TOKEN']}" if ENV['GITHUB_API_BEARER_TOKEN']
|
||||
@ -45,20 +47,18 @@ class GitHubRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
|
||||
# If the response is an array, it is likely to be a directory. In this
|
||||
# usecase, we'll just list them.
|
||||
content = if response.kind_of? Array
|
||||
content = if response.is_a? Array
|
||||
warning = %(given path '#{path}' from GitHub repo '#{repo}' is a directory)
|
||||
warn_or_raise doc, warning
|
||||
warning
|
||||
elsif response.kind_of? Object
|
||||
if response['content'] && response['encoding'] == 'base64'
|
||||
Base64.decode64 response['content']
|
||||
end
|
||||
elsif response.is_a? Object
|
||||
Base64.decode64 response['content'] if response['content'] && response['encoding'] == 'base64'
|
||||
end
|
||||
|
||||
reader.push_include content, target, target, 1, attrs
|
||||
end
|
||||
rescue OpenURI::HTTPError => e
|
||||
warning = %(error while getting '#{path}' in GitHub repo '#{repo}: #{e}')
|
||||
warning = %(error while getting '#{path}' in GitHub repo '#{namespaced_repo}: #{e}')
|
||||
warn_or_raise doc, warning
|
||||
reader.push_include warning, target, target, 1, attrs
|
||||
end
|
||||
|
@ -1,3 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'uri'
|
||||
|
||||
class GitLabLinkInlineMacro < Asciidoctor::Extensions::InlineMacroProcessor
|
||||
@ -7,7 +9,7 @@ class GitLabLinkInlineMacro < Asciidoctor::Extensions::InlineMacroProcessor
|
||||
name_positional_attributes 'caption'
|
||||
default_attributes 'domain' => 'gitlab.com'
|
||||
|
||||
def process parent, target, attrs
|
||||
def process(parent, target, attrs)
|
||||
doc = parent.document
|
||||
|
||||
text = attrs['caption'] || target
|
||||
|
@ -1,13 +1,15 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'base64'
|
||||
require 'json'
|
||||
require 'open-uri'
|
||||
require 'uri'
|
||||
|
||||
class GitLabRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
@@prefix = 'gitlab:'
|
||||
@prefix = 'gitlab:'
|
||||
|
||||
def handles? target
|
||||
target.start_with? @@prefix
|
||||
def handles?(target)
|
||||
target.start_with? @prefix
|
||||
end
|
||||
|
||||
def warn_or_raise doc, warning
|
||||
@ -18,14 +20,14 @@ class GitLabRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
end
|
||||
end
|
||||
|
||||
def process doc, reader, target, attrs
|
||||
src = target.delete_prefix(@@prefix).split('/', 2)
|
||||
def process(doc, reader, target, attrs)
|
||||
src = target.delete_prefix(@prefix).split('/', 2)
|
||||
owner = src.at 0
|
||||
repo = src.at 1
|
||||
namespaced_repo = "#{owner}/#{repo}"
|
||||
|
||||
raise %(there is no 'path' attribute given for GitLab repo '#{namespaced_repo}') unless (attrs.key? 'path')
|
||||
raise %(no given ref for getting file in '#{namespaced_repo}') unless (attrs.key? 'rev')
|
||||
raise %(there is no 'path' attribute given for GitLab repo '#{namespaced_repo}') unless attrs.key? 'path'
|
||||
raise %(no given ref for getting file in '#{namespaced_repo}') unless attrs.key? 'rev'
|
||||
|
||||
path = attrs['path']
|
||||
rev = attrs['rev']
|
||||
@ -42,7 +44,7 @@ class GitLabRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
uri += %(/repository/files/#{URI.encode_www_form_component path})
|
||||
|
||||
# Then the revision.
|
||||
query = { :ref => rev }
|
||||
query = { ref: rev }
|
||||
uri.query = URI.encode_www_form query
|
||||
|
||||
content = begin
|
||||
@ -52,9 +54,7 @@ class GitLabRawIncludeProcessor < Asciidoctor::Extensions::IncludeProcessor
|
||||
OpenURI.open_uri(uri, headers) do |f|
|
||||
response = JSON.parse(f.read)
|
||||
|
||||
if response['content'] && response['encoding'] == 'base64'
|
||||
Base64.decode64 response['content']
|
||||
end
|
||||
Base64.decode64 response['content'] if response['content'] && response['encoding'] == 'base64'
|
||||
|
||||
reader.push_include content, target, target, 1, attrs
|
||||
end
|
||||
|
@ -1,3 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class ManInlineMacro < Asciidoctor::Extensions::InlineMacroProcessor
|
||||
use_dsl
|
||||
|
||||
@ -5,7 +7,7 @@ class ManInlineMacro < Asciidoctor::Extensions::InlineMacroProcessor
|
||||
name_positional_attributes 'volnum'
|
||||
default_attributes 'domain' => 'manpages.debian.org'
|
||||
|
||||
def process parent, target, attrs
|
||||
def process(parent, target, attrs)
|
||||
doc = parent.document
|
||||
text = manname = target
|
||||
suffix = (volnum = attrs['volnum']) ? %((#{volnum})) : ''
|
||||
|
@ -1,15 +1,17 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class SWHInlineMacro < Asciidoctor::Extensions::InlineMacroProcessor
|
||||
use_dsl
|
||||
|
||||
named :swh
|
||||
name_positional_attributes 'caption'
|
||||
|
||||
def process parent, target, attrs
|
||||
def process(parent, target, attrs)
|
||||
doc = parent.document
|
||||
|
||||
# We're only considering `swh:` starting with the scheme version. Also, it
|
||||
# looks nice aesthetically.
|
||||
swhid = (target.start_with? 'swh:') ? target : %(swh:#{target})
|
||||
swhid = target.start_with?('swh:') ? target : %(swh:#{target})
|
||||
swhid_core_identifier = (swhid.split ';').at 0
|
||||
|
||||
text = attrs['caption'] || swhid_core_identifier
|
||||
|
Loading…
Reference in New Issue
Block a user